From a8e5bde2bc0297613febd9bd1b78c4a6b2f1ba35 Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Mon, 22 Jul 2024 09:18:10 +0400 Subject: [PATCH 01/52] chore: Make eth_tx_manager logs less verbose (#2449) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ Makes eth_tx_manager emit less logs ## Why ❔ Even if there is no activity (e.g. locally), `eth_tx_manager` keeps emitting a lot of logs, which makes it harder to understand what's going on with the server. ## Checklist - [ ] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [ ] Tests for the changes have been added / updated. - [ ] Documentation comments have been added / updated. - [ ] Code has been formatted via `zk fmt` and `zk lint`. --- core/node/eth_sender/src/eth_tx_manager.rs | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/core/node/eth_sender/src/eth_tx_manager.rs b/core/node/eth_sender/src/eth_tx_manager.rs index feac9311a727..d2ee4380d68b 100644 --- a/core/node/eth_sender/src/eth_tx_manager.rs +++ b/core/node/eth_sender/src/eth_tx_manager.rs @@ -586,10 +586,14 @@ impl EthTxManager { .await .unwrap(); - tracing::info!( - "Sending {} {operator_type:?} new transactions", - new_eth_tx.len() - ); + if !new_eth_tx.is_empty() { + tracing::info!( + "Sending {} {operator_type:?} new transactions", + new_eth_tx.len() + ); + } else { + tracing::trace!("No new transactions to send"); + } for tx in new_eth_tx { let result = self.send_eth_tx(storage, &tx, 0, current_block).await; // If one of the transactions doesn't succeed, this means we should return @@ -632,7 +636,7 @@ impl EthTxManager { storage: &mut Connection<'_, Core>, l1_block_numbers: L1BlockNumbers, ) { - tracing::info!("Loop iteration at block {}", l1_block_numbers.latest); + tracing::trace!("Loop iteration at block {}", l1_block_numbers.latest); // We can treat those two operators independently as they have different nonces and // aggregator makes sure that corresponding Commit transaction is confirmed before creating // a PublishProof transaction From 4977818bcb4994549e82e61925510a1c4114ea7e Mon Sep 17 00:00:00 2001 From: Daniyar Itegulov Date: Mon, 22 Jul 2024 15:49:15 +1000 Subject: [PATCH 02/52] chore: fix cargo deny check (#2450) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ ## Why ❔ ## Checklist - [ ] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [ ] Tests for the changes have been added / updated. - [ ] Documentation comments have been added / updated. - [ ] Code has been formatted via `zk fmt` and `zk lint`. --- Cargo.lock | 8 ++++---- deny.toml | 6 ++---- 2 files changed, 6 insertions(+), 8 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index f73206e46e06..716edb33c877 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4105,9 +4105,9 @@ checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" [[package]] name = "openssl" -version = "0.10.57" +version = "0.10.66" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bac25ee399abb46215765b1cb35bc0212377e58a061560d8b29b024fd0430e7c" +checksum = "9529f4786b70a3e8c61e11179af17ab6188ad8d0ded78c5529441ed39d4bd9c1" dependencies = [ "bitflags 2.6.0", "cfg-if 1.0.0", @@ -4137,9 +4137,9 @@ checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" [[package]] name = "openssl-sys" -version = "0.9.93" +version = "0.9.103" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db4d56a4c0478783083cfafcc42493dd4a981d41669da64b4572a2a089b51b1d" +checksum = "7f9e8deee91df40a943c71b917e5874b951d32a802526c85721ce3b776c929d6" dependencies = [ "cc", "libc", diff --git a/deny.toml b/deny.toml index 59265ec085be..1e4a30ad6231 100644 --- a/deny.toml +++ b/deny.toml @@ -6,9 +6,7 @@ vulnerability = "deny" unmaintained = "warn" yanked = "warn" notice = "warn" -ignore = [ - "RUSTSEC-2023-0018", -] +ignore = [] [licenses] unlicensed = "deny" @@ -30,7 +28,7 @@ allow-osi-fsf-free = "neither" default = "deny" confidence-threshold = 0.8 exceptions = [ - { name = "ring", allow = ["OpenSSL"] }, + { name = "ring", allow = ["OpenSSL"] }, ] unused-allowed-license = "allow" From 62c3326d6286faf79f1a28ee584d8ea8d5c2fadc Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Mon, 22 Jul 2024 11:40:54 +0400 Subject: [PATCH 03/52] refactor(prover_fri_gateway): Refactor gateway structures (#2451) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ Improves readability of prover fri gateway: - `PeriodicApi` is moved to a separate file. Generic argument was removed; instead now it has associated types for both request and response. - `PeriodicApiStruct` was renamed to `ProverApiClient`. `PeriodicApiStruct::run` was moved to `PeriodicApi` trait. - Dedicated types were created for `ProofSubmitter` and `ProofGenDataFetcher`. - A bit of doc comments. It can be refactored further, but I want to focus on incremental improvements for now. ## Why ❔ - Previously the workflow responsibilities were split between `PeriodicApi` and `PeriodicApiStruct`. Now each type has its own area of responsiblity. - Using the same type for two different pollers was pretty confusing. ## Checklist - [ ] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [ ] Tests for the changes have been added / updated. - [ ] Documentation comments have been added / updated. - [ ] Code has been formatted via `zk fmt` and `zk lint`. --- .../src/api_data_fetcher.rs | 109 ------------------ prover/prover_fri_gateway/src/client.rs | 51 ++++++++ prover/prover_fri_gateway/src/main.rs | 38 +++--- .../src/proof_gen_data_fetcher.rs | 38 ++++-- .../prover_fri_gateway/src/proof_submitter.rs | 41 +++++-- prover/prover_fri_gateway/src/traits.rs | 62 ++++++++++ 6 files changed, 194 insertions(+), 145 deletions(-) delete mode 100644 prover/prover_fri_gateway/src/api_data_fetcher.rs create mode 100644 prover/prover_fri_gateway/src/client.rs create mode 100644 prover/prover_fri_gateway/src/traits.rs diff --git a/prover/prover_fri_gateway/src/api_data_fetcher.rs b/prover/prover_fri_gateway/src/api_data_fetcher.rs deleted file mode 100644 index f2492588c739..000000000000 --- a/prover/prover_fri_gateway/src/api_data_fetcher.rs +++ /dev/null @@ -1,109 +0,0 @@ -use std::{sync::Arc, time::Duration}; - -use async_trait::async_trait; -use reqwest::Client; -use serde::{de::DeserializeOwned, Serialize}; -use tokio::{sync::watch, time::sleep}; -use zksync_object_store::ObjectStore; -use zksync_prover_dal::{ConnectionPool, Prover}; - -use crate::metrics::METRICS; - -/// The path to the API endpoint that returns the next proof generation data. -pub(crate) const PROOF_GENERATION_DATA_PATH: &str = "/proof_generation_data"; - -/// The path to the API endpoint that submits the proof. -pub(crate) const SUBMIT_PROOF_PATH: &str = "/submit_proof"; - -pub(crate) struct PeriodicApiStruct { - pub(crate) blob_store: Arc, - pub(crate) pool: ConnectionPool, - pub(crate) api_url: String, - pub(crate) poll_duration: Duration, - pub(crate) client: Client, -} - -impl PeriodicApiStruct { - pub(crate) async fn send_http_request( - &self, - request: Req, - endpoint: &str, - ) -> Result - where - Req: Serialize, - Resp: DeserializeOwned, - { - tracing::info!("Sending request to {}", endpoint); - - self.client - .post(endpoint) - .json(&request) - .send() - .await? - .error_for_status()? - .json::() - .await - } - - pub(crate) async fn run( - self, - mut stop_receiver: watch::Receiver, - ) -> anyhow::Result<()> - where - Req: Send, - Self: PeriodicApi, - { - tracing::info!( - "Starting periodic job: {} with frequency: {:?}", - Self::SERVICE_NAME, - self.poll_duration - ); - - loop { - if *stop_receiver.borrow() { - tracing::warn!("Stop signal received, shutting down {}", Self::SERVICE_NAME); - return Ok(()); - } - - if let Some((job_id, request)) = self.get_next_request().await { - match self.send_request(job_id, request).await { - Ok(response) => { - self.handle_response(job_id, response).await; - } - Err(err) => { - METRICS.http_error[&Self::SERVICE_NAME].inc(); - tracing::error!("HTTP request failed due to error: {}", err); - } - } - } - tokio::select! { - _ = stop_receiver.changed() => { - tracing::warn!("Stop signal received, shutting down {}", Self::SERVICE_NAME); - return Ok(()); - } - _ = sleep(self.poll_duration) => {} - } - } - } -} - -/// Trait for fetching data from an API periodically. -#[async_trait] -pub(crate) trait PeriodicApi: Sync + Send { - type JobId: Send + Copy; - type Response: Send; - - const SERVICE_NAME: &'static str; - - /// Returns the next request to be sent to the API and the endpoint to send it to. - async fn get_next_request(&self) -> Option<(Self::JobId, Req)>; - - /// Handles the response from the API. - async fn send_request( - &self, - job_id: Self::JobId, - request: Req, - ) -> reqwest::Result; - - async fn handle_response(&self, job_id: Self::JobId, response: Self::Response); -} diff --git a/prover/prover_fri_gateway/src/client.rs b/prover/prover_fri_gateway/src/client.rs new file mode 100644 index 000000000000..5f1ad79ef36f --- /dev/null +++ b/prover/prover_fri_gateway/src/client.rs @@ -0,0 +1,51 @@ +use std::sync::Arc; + +use serde::{de::DeserializeOwned, Serialize}; +use zksync_object_store::ObjectStore; +use zksync_prover_dal::{ConnectionPool, Prover}; + +/// A tiny wrapper over the reqwest client that also stores +/// the objects commonly needed when interacting with prover API. +#[derive(Debug)] +pub(crate) struct ProverApiClient { + pub(crate) blob_store: Arc, + pub(crate) pool: ConnectionPool, + pub(crate) api_url: String, + pub(crate) client: reqwest::Client, +} + +impl ProverApiClient { + pub(crate) fn new( + blob_store: Arc, + pool: ConnectionPool, + api_url: String, + ) -> Self { + Self { + blob_store, + pool, + api_url, + client: reqwest::Client::new(), + } + } + + pub(crate) async fn send_http_request( + &self, + request: Req, + endpoint: &str, + ) -> Result + where + Req: Serialize, + Resp: DeserializeOwned, + { + tracing::info!("Sending request to {}", endpoint); + + self.client + .post(endpoint) + .json(&request) + .send() + .await? + .error_for_status()? + .json::() + .await + } +} diff --git a/prover/prover_fri_gateway/src/main.rs b/prover/prover_fri_gateway/src/main.rs index caa165331116..c204fb7395f2 100644 --- a/prover/prover_fri_gateway/src/main.rs +++ b/prover/prover_fri_gateway/src/main.rs @@ -2,22 +2,22 @@ use std::time::Duration; use anyhow::Context as _; use clap::Parser; -use reqwest::Client; +use proof_gen_data_fetcher::ProofGenDataFetcher; +use proof_submitter::ProofSubmitter; use tokio::sync::{oneshot, watch}; +use traits::PeriodicApi as _; use zksync_core_leftovers::temp_config_store::{load_database_secrets, load_general_config}; use zksync_env_config::object_store::ProverObjectStoreConfig; use zksync_object_store::ObjectStoreFactory; use zksync_prover_dal::{ConnectionPool, Prover}; -use zksync_prover_interface::api::{ProofGenerationDataRequest, SubmitProofRequest}; use zksync_utils::wait_for_tasks::ManagedTasks; use zksync_vlog::prometheus::PrometheusExporterConfig; -use crate::api_data_fetcher::{PeriodicApiStruct, PROOF_GENERATION_DATA_PATH, SUBMIT_PROOF_PATH}; - -mod api_data_fetcher; +mod client; mod metrics; mod proof_gen_data_fetcher; mod proof_submitter; +mod traits; #[tokio::main] async fn main() -> anyhow::Result<()> { @@ -65,20 +65,16 @@ async fn main() -> anyhow::Result<()> { ); let store_factory = ObjectStoreFactory::new(object_store_config.0); - let proof_submitter = PeriodicApiStruct { - blob_store: store_factory.create_store().await?, - pool: pool.clone(), - api_url: format!("{}{SUBMIT_PROOF_PATH}", config.api_url), - poll_duration: config.api_poll_duration(), - client: Client::new(), - }; - let proof_gen_data_fetcher = PeriodicApiStruct { - blob_store: store_factory.create_store().await?, + let proof_submitter = ProofSubmitter::new( + store_factory.create_store().await?, + config.api_url.clone(), + pool.clone(), + ); + let proof_gen_data_fetcher = ProofGenDataFetcher::new( + store_factory.create_store().await?, + config.api_url.clone(), pool, - api_url: format!("{}{PROOF_GENERATION_DATA_PATH}", config.api_url), - poll_duration: config.api_poll_duration(), - client: Client::new(), - }; + ); let (stop_sender, stop_receiver) = watch::channel(false); @@ -98,10 +94,8 @@ async fn main() -> anyhow::Result<()> { PrometheusExporterConfig::pull(config.prometheus_listener_port) .run(stop_receiver.clone()), ), - tokio::spawn( - proof_gen_data_fetcher.run::(stop_receiver.clone()), - ), - tokio::spawn(proof_submitter.run::(stop_receiver)), + tokio::spawn(proof_gen_data_fetcher.run(config.api_poll_duration(), stop_receiver.clone())), + tokio::spawn(proof_submitter.run(config.api_poll_duration(), stop_receiver)), ]; let mut tasks = ManagedTasks::new(tasks); diff --git a/prover/prover_fri_gateway/src/proof_gen_data_fetcher.rs b/prover/prover_fri_gateway/src/proof_gen_data_fetcher.rs index 9dcc93a4be77..e1add827e890 100644 --- a/prover/prover_fri_gateway/src/proof_gen_data_fetcher.rs +++ b/prover/prover_fri_gateway/src/proof_gen_data_fetcher.rs @@ -1,14 +1,37 @@ +use std::sync::Arc; + use async_trait::async_trait; -use zksync_prover_dal::ProverDal; +use zksync_object_store::ObjectStore; +use zksync_prover_dal::{ConnectionPool, Prover, ProverDal}; use zksync_prover_interface::api::{ ProofGenerationData, ProofGenerationDataRequest, ProofGenerationDataResponse, }; -use crate::api_data_fetcher::{PeriodicApi, PeriodicApiStruct}; +use crate::{client::ProverApiClient, traits::PeriodicApi}; + +/// Poller structure that will periodically check the prover API for new proof generation data. +/// Fetched data is stored to the database/object store for further processing. +#[derive(Debug)] +pub struct ProofGenDataFetcher(ProverApiClient); + +/// The path to the API endpoint that returns the next proof generation data. +const PROOF_GENERATION_DATA_PATH: &str = "/proof_generation_data"; + +impl ProofGenDataFetcher { + pub(crate) fn new( + blob_store: Arc, + base_url: String, + pool: ConnectionPool, + ) -> Self { + let api_url = format!("{base_url}{PROOF_GENERATION_DATA_PATH}"); + let inner = ProverApiClient::new(blob_store, pool, api_url); + Self(inner) + } +} -impl PeriodicApiStruct { +impl ProofGenDataFetcher { async fn save_proof_gen_data(&self, data: ProofGenerationData) { - let store = &*self.blob_store; + let store = &*self.0.blob_store; let merkle_paths = store .put(data.l1_batch_number, &data.witness_input_data.merkle_paths) .await @@ -17,7 +40,7 @@ impl PeriodicApiStruct { .put(data.l1_batch_number, &data.witness_input_data) .await .expect("Failed to save proof generation data to GCS"); - let mut connection = self.pool.connection().await.unwrap(); + let mut connection = self.0.pool.connection().await.unwrap(); connection .fri_protocol_versions_dal() @@ -38,8 +61,9 @@ impl PeriodicApiStruct { } #[async_trait] -impl PeriodicApi for PeriodicApiStruct { +impl PeriodicApi for ProofGenDataFetcher { type JobId = (); + type Request = ProofGenerationDataRequest; type Response = ProofGenerationDataResponse; const SERVICE_NAME: &'static str = "ProofGenDataFetcher"; @@ -53,7 +77,7 @@ impl PeriodicApi for PeriodicApiStruct { _: (), request: ProofGenerationDataRequest, ) -> reqwest::Result { - self.send_http_request(request, &self.api_url).await + self.0.send_http_request(request, &self.0.api_url).await } async fn handle_response(&self, _: (), response: Self::Response) { diff --git a/prover/prover_fri_gateway/src/proof_submitter.rs b/prover/prover_fri_gateway/src/proof_submitter.rs index 8b20ab67b516..2a74781b59dd 100644 --- a/prover/prover_fri_gateway/src/proof_submitter.rs +++ b/prover/prover_fri_gateway/src/proof_submitter.rs @@ -1,13 +1,37 @@ +use std::sync::Arc; + use async_trait::async_trait; -use zksync_prover_dal::ProverDal; +use zksync_object_store::ObjectStore; +use zksync_prover_dal::{ConnectionPool, Prover, ProverDal}; use zksync_prover_interface::api::{SubmitProofRequest, SubmitProofResponse}; use zksync_types::{prover_dal::ProofCompressionJobStatus, L1BatchNumber}; -use crate::api_data_fetcher::{PeriodicApi, PeriodicApiStruct}; +use crate::{client::ProverApiClient, traits::PeriodicApi}; + +/// The path to the API endpoint that submits the proof. +const SUBMIT_PROOF_PATH: &str = "/submit_proof"; -impl PeriodicApiStruct { +/// Poller structure that will periodically check the database for new proofs to submit. +/// Once a new proof is detected, it will be sent to the prover API. +#[derive(Debug)] +pub struct ProofSubmitter(ProverApiClient); + +impl ProofSubmitter { + pub(crate) fn new( + blob_store: Arc, + base_url: String, + pool: ConnectionPool, + ) -> Self { + let api_url = format!("{base_url}{SUBMIT_PROOF_PATH}"); + let inner = ProverApiClient::new(blob_store, pool, api_url); + Self(inner) + } +} + +impl ProofSubmitter { async fn next_submit_proof_request(&self) -> Option<(L1BatchNumber, SubmitProofRequest)> { let (l1_batch_number, protocol_version, status) = self + .0 .pool .connection() .await @@ -19,6 +43,7 @@ impl PeriodicApiStruct { let request = match status { ProofCompressionJobStatus::Successful => { let proof = self + .0 .blob_store .get((l1_batch_number, protocol_version)) .await @@ -36,7 +61,8 @@ impl PeriodicApiStruct { } async fn save_successful_sent_proof(&self, l1_batch_number: L1BatchNumber) { - self.pool + self.0 + .pool .connection() .await .unwrap() @@ -47,8 +73,9 @@ impl PeriodicApiStruct { } #[async_trait] -impl PeriodicApi for PeriodicApiStruct { +impl PeriodicApi for ProofSubmitter { type JobId = L1BatchNumber; + type Request = SubmitProofRequest; type Response = SubmitProofResponse; const SERVICE_NAME: &'static str = "ProofSubmitter"; @@ -62,8 +89,8 @@ impl PeriodicApi for PeriodicApiStruct { job_id: Self::JobId, request: SubmitProofRequest, ) -> reqwest::Result { - let endpoint = format!("{}/{job_id}", self.api_url); - self.send_http_request(request, &endpoint).await + let endpoint = format!("{}/{job_id}", self.0.api_url); + self.0.send_http_request(request, &endpoint).await } async fn handle_response(&self, job_id: L1BatchNumber, response: Self::Response) { diff --git a/prover/prover_fri_gateway/src/traits.rs b/prover/prover_fri_gateway/src/traits.rs new file mode 100644 index 000000000000..e54ffe2414ce --- /dev/null +++ b/prover/prover_fri_gateway/src/traits.rs @@ -0,0 +1,62 @@ +use std::time::Duration; + +use tokio::sync::watch; + +use crate::metrics::METRICS; + +/// Trait for fetching data from an API periodically. +#[async_trait::async_trait] +pub(crate) trait PeriodicApi: Sync + Send + 'static + Sized { + type JobId: Send + Copy; + type Request: Send; + type Response: Send; + + const SERVICE_NAME: &'static str; + + /// Returns the next request to be sent to the API and the endpoint to send it to. + async fn get_next_request(&self) -> Option<(Self::JobId, Self::Request)>; + + /// Handles the response from the API. + async fn send_request( + &self, + job_id: Self::JobId, + request: Self::Request, + ) -> reqwest::Result; + + async fn handle_response(&self, job_id: Self::JobId, response: Self::Response); + + async fn run( + self, + poll_duration: Duration, + mut stop_receiver: watch::Receiver, + ) -> anyhow::Result<()> { + tracing::info!( + "Starting periodic job: {} with frequency: {:?}", + Self::SERVICE_NAME, + poll_duration + ); + + loop { + if *stop_receiver.borrow() { + tracing::warn!("Stop signal received, shutting down {}", Self::SERVICE_NAME); + return Ok(()); + } + + if let Some((job_id, request)) = self.get_next_request().await { + match self.send_request(job_id, request).await { + Ok(response) => { + self.handle_response(job_id, response).await; + } + Err(err) => { + METRICS.http_error[&Self::SERVICE_NAME].inc(); + tracing::error!("HTTP request failed due to error: {}", err); + } + } + } + // Exit condition will be checked on the next iteration. + tokio::time::timeout(poll_duration, stop_receiver.changed()) + .await + .ok(); + } + } +} From c9da5497e2aa9d85f204ab7b74fefcfe941793ff Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Mon, 22 Jul 2024 12:40:36 +0400 Subject: [PATCH 04/52] feat(prover): Make it possible to run prover out of GCP (#2448) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ ~~When zone read domain name cannot be resolved, assumes local environment and uses `local` zone.~~ - Introduces a new config to choose cloud type, either GCP or local. - Creates `RegionFetcher` structure that can fetch the zone based on configuration. - Introduces strong typing for zone. ## Why ❔ Makes it possible to run prover locally. ## Checklist - [ ] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [ ] Tests for the changes have been added / updated. - [ ] Documentation comments have been added / updated. - [ ] Code has been formatted via `zk fmt` and `zk lint`. --- core/lib/config/src/configs/fri_prover.rs | 14 +++ core/lib/config/src/testonly.rs | 11 +++ core/lib/env_config/src/fri_prover.rs | 6 +- .../src/proto/config/prover.proto | 6 ++ core/lib/protobuf_config/src/prover.rs | 26 ++++++ prover/proof_fri_compressor/Cargo.toml | 1 + .../src/gpu_prover_availability_checker.rs | 7 +- .../src/gpu_prover_job_processor.rs | 7 +- prover/prover_fri/src/main.rs | 28 +++--- prover/prover_fri/src/socket_listener.rs | 9 +- prover/prover_fri_utils/src/region_fetcher.rs | 93 ++++++++++++++----- .../witness_vector_generator/src/generator.rs | 11 ++- prover/witness_vector_generator/src/main.rs | 17 ++-- 13 files changed, 180 insertions(+), 56 deletions(-) diff --git a/core/lib/config/src/configs/fri_prover.rs b/core/lib/config/src/configs/fri_prover.rs index 99e3d354536e..5cd25450531a 100644 --- a/core/lib/config/src/configs/fri_prover.rs +++ b/core/lib/config/src/configs/fri_prover.rs @@ -10,6 +10,18 @@ pub enum SetupLoadMode { FromMemory, } +/// Kind of cloud environment prover subsystem runs in. +/// +/// Currently will only affect how the prover zone is chosen. +#[derive(Debug, Default, Deserialize, Clone, Copy, PartialEq, Eq)] +pub enum CloudType { + /// Assumes that the prover runs in GCP. + #[default] + GCP, + /// Assumes that the prover runs locally. + Local, +} + /// Configuration for the fri prover application #[derive(Debug, Deserialize, Clone, PartialEq)] pub struct FriProverConfig { @@ -28,6 +40,8 @@ pub struct FriProverConfig { pub shall_save_to_public_bucket: bool, pub prover_object_store: Option, pub public_object_store: Option, + #[serde(default)] + pub cloud_type: CloudType, } impl FriProverConfig { diff --git a/core/lib/config/src/testonly.rs b/core/lib/config/src/testonly.rs index a5e51131c3a8..e105c3282639 100644 --- a/core/lib/config/src/testonly.rs +++ b/core/lib/config/src/testonly.rs @@ -438,6 +438,16 @@ impl Distribution for EncodeDist { } } +impl Distribution for EncodeDist { + fn sample(&self, rng: &mut R) -> configs::fri_prover::CloudType { + type T = configs::fri_prover::CloudType; + match rng.gen_range(0..1) { + 0 => T::GCP, + _ => T::Local, + } + } +} + impl Distribution for EncodeDist { fn sample(&self, rng: &mut R) -> configs::FriProverConfig { configs::FriProverConfig { @@ -454,6 +464,7 @@ impl Distribution for EncodeDist { availability_check_interval_in_secs: self.sample(rng), prover_object_store: self.sample(rng), public_object_store: self.sample(rng), + cloud_type: self.sample(rng), } } } diff --git a/core/lib/env_config/src/fri_prover.rs b/core/lib/env_config/src/fri_prover.rs index 96069d6514ea..bdcf5291ee05 100644 --- a/core/lib/env_config/src/fri_prover.rs +++ b/core/lib/env_config/src/fri_prover.rs @@ -18,7 +18,10 @@ impl FromEnv for FriProverConfig { #[cfg(test)] mod tests { use zksync_config::{ - configs::{fri_prover::SetupLoadMode, object_store::ObjectStoreMode}, + configs::{ + fri_prover::{CloudType, SetupLoadMode}, + object_store::ObjectStoreMode, + }, ObjectStoreConfig, }; @@ -57,6 +60,7 @@ mod tests { local_mirror_path: None, }), availability_check_interval_in_secs: Some(1_800), + cloud_type: CloudType::GCP, } } diff --git a/core/lib/protobuf_config/src/proto/config/prover.proto b/core/lib/protobuf_config/src/proto/config/prover.proto index c50ebdde4eef..80d45f40bbcb 100644 --- a/core/lib/protobuf_config/src/proto/config/prover.proto +++ b/core/lib/protobuf_config/src/proto/config/prover.proto @@ -21,6 +21,11 @@ enum SetupLoadMode { FROM_MEMORY = 1; } +enum CloudType { + GCP = 0; + LOCAL = 1; +} + message Prover { optional string setup_data_path = 1; // required; fs path? optional uint32 prometheus_port = 2; // required; u16 @@ -35,6 +40,7 @@ message Prover { optional bool shall_save_to_public_bucket = 13; // required optional config.object_store.ObjectStore public_object_store = 22; optional config.object_store.ObjectStore prover_object_store = 23; + optional CloudType cloud_type = 24; // optional reserved 5, 6, 9; reserved "base_layer_circuit_ids_to_be_verified", "recursive_layer_circuit_ids_to_be_verified", "witness_vector_generator_thread_count"; } diff --git a/core/lib/protobuf_config/src/prover.rs b/core/lib/protobuf_config/src/prover.rs index 50782ab8e968..e1c31ee1fccd 100644 --- a/core/lib/protobuf_config/src/prover.rs +++ b/core/lib/protobuf_config/src/prover.rs @@ -292,6 +292,24 @@ impl proto::SetupLoadMode { } } +impl proto::CloudType { + fn new(x: &configs::fri_prover::CloudType) -> Self { + use configs::fri_prover::CloudType as From; + match x { + From::GCP => Self::Gcp, + From::Local => Self::Local, + } + } + + fn parse(&self) -> configs::fri_prover::CloudType { + use configs::fri_prover::CloudType as To; + match self { + Self::Gcp => To::GCP, + Self::Local => To::Local, + } + } +} + impl ProtoRepr for proto::Prover { type Type = configs::FriProverConfig; fn read(&self) -> anyhow::Result { @@ -338,6 +356,13 @@ impl ProtoRepr for proto::Prover { .context("shall_save_to_public_bucket")?, public_object_store, prover_object_store, + cloud_type: self + .cloud_type + .map(proto::CloudType::try_from) + .transpose() + .context("cloud_type")? + .map(|x| x.parse()) + .unwrap_or_default(), }) } @@ -356,6 +381,7 @@ impl ProtoRepr for proto::Prover { shall_save_to_public_bucket: Some(this.shall_save_to_public_bucket), prover_object_store: this.prover_object_store.as_ref().map(ProtoRepr::build), public_object_store: this.public_object_store.as_ref().map(ProtoRepr::build), + cloud_type: Some(proto::CloudType::new(&this.cloud_type).into()), } } } diff --git a/prover/proof_fri_compressor/Cargo.toml b/prover/proof_fri_compressor/Cargo.toml index 14fc44d5a3b2..0c01a40874f2 100644 --- a/prover/proof_fri_compressor/Cargo.toml +++ b/prover/proof_fri_compressor/Cargo.toml @@ -41,5 +41,6 @@ serde = { workspace = true, features = ["derive"] } wrapper_prover = { workspace = true, optional = true } [features] +default = [] gpu = ["wrapper_prover"] diff --git a/prover/prover_fri/src/gpu_prover_availability_checker.rs b/prover/prover_fri/src/gpu_prover_availability_checker.rs index 4b51b26e5d38..6e154ba553a9 100644 --- a/prover/prover_fri/src/gpu_prover_availability_checker.rs +++ b/prover/prover_fri/src/gpu_prover_availability_checker.rs @@ -4,6 +4,7 @@ pub mod availability_checker { use tokio::sync::Notify; use zksync_prover_dal::{ConnectionPool, Prover, ProverDal}; + use zksync_prover_fri_utils::region_fetcher::Zone; use zksync_types::prover_dal::{GpuProverInstanceStatus, SocketAddress}; use crate::metrics::{KillingReason, METRICS}; @@ -12,7 +13,7 @@ pub mod availability_checker { /// If the prover instance is not found in the database or marked as dead, the availability checker will shut down the prover. pub struct AvailabilityChecker { address: SocketAddress, - zone: String, + zone: Zone, polling_interval: Duration, pool: ConnectionPool, } @@ -20,7 +21,7 @@ pub mod availability_checker { impl AvailabilityChecker { pub fn new( address: SocketAddress, - zone: String, + zone: Zone, polling_interval_secs: u32, pool: ConnectionPool, ) -> Self { @@ -46,7 +47,7 @@ pub mod availability_checker { .await .unwrap() .fri_gpu_prover_queue_dal() - .get_prover_instance_status(self.address.clone(), self.zone.clone()) + .get_prover_instance_status(self.address.clone(), self.zone.to_string()) .await; // If the prover instance is not found in the database or marked as dead, we should shut down the prover diff --git a/prover/prover_fri/src/gpu_prover_job_processor.rs b/prover/prover_fri/src/gpu_prover_job_processor.rs index cbd363e9b4f4..6148ca3e0aed 100644 --- a/prover/prover_fri/src/gpu_prover_job_processor.rs +++ b/prover/prover_fri/src/gpu_prover_job_processor.rs @@ -28,6 +28,7 @@ pub mod gpu_prover { }, CircuitWrapper, FriProofWrapper, ProverServiceDataKey, WitnessVectorArtifacts, }; + use zksync_prover_fri_utils::region_fetcher::Zone; use zksync_queued_job_processor::{async_trait, JobProcessor}; use zksync_types::{ basic_fri_types::CircuitIdRoundTuple, protocol_version::ProtocolSemanticVersion, @@ -64,7 +65,7 @@ pub mod gpu_prover { witness_vector_queue: SharedWitnessVectorQueue, prover_context: ProverContext, address: SocketAddress, - zone: String, + zone: Zone, protocol_version: ProtocolSemanticVersion, } @@ -79,7 +80,7 @@ pub mod gpu_prover { circuit_ids_for_round_to_be_proven: Vec, witness_vector_queue: SharedWitnessVectorQueue, address: SocketAddress, - zone: String, + zone: Zone, protocol_version: ProtocolSemanticVersion, ) -> Self { Prover { @@ -230,7 +231,7 @@ pub mod gpu_prover { .fri_gpu_prover_queue_dal() .update_prover_instance_from_full_to_available( self.address.clone(), - self.zone.clone(), + self.zone.to_string(), ) .await; } diff --git a/prover/prover_fri/src/main.rs b/prover/prover_fri/src/main.rs index dfab8648d74c..e4b2fd5a6709 100644 --- a/prover/prover_fri/src/main.rs +++ b/prover/prover_fri/src/main.rs @@ -16,7 +16,10 @@ use zksync_env_config::FromEnv; use zksync_object_store::{ObjectStore, ObjectStoreFactory}; use zksync_prover_dal::{ConnectionPool, Prover, ProverDal}; use zksync_prover_fri_types::PROVER_PROTOCOL_SEMANTIC_VERSION; -use zksync_prover_fri_utils::{get_all_circuit_id_round_tuples_for, region_fetcher::get_zone}; +use zksync_prover_fri_utils::{ + get_all_circuit_id_round_tuples_for, + region_fetcher::{RegionFetcher, Zone}, +}; use zksync_queued_job_processor::JobProcessor; use zksync_types::{ basic_fri_types::CircuitIdRoundTuple, @@ -32,24 +35,20 @@ mod prover_job_processor; mod socket_listener; mod utils; -async fn graceful_shutdown(port: u16) -> anyhow::Result> { +async fn graceful_shutdown(zone: Zone, port: u16) -> anyhow::Result> { let database_secrets = DatabaseSecrets::from_env().context("DatabaseSecrets::from_env()")?; let pool = ConnectionPool::::singleton(database_secrets.prover_url()?) .build() .await .context("failed to build a connection pool")?; let host = local_ip().context("Failed obtaining local IP address")?; - let zone_url = &FriProverConfig::from_env() - .context("FriProverConfig::from_env()")? - .zone_read_url; - let zone = get_zone(zone_url).await.context("get_zone()")?; let address = SocketAddress { host, port }; Ok(async move { pool.connection() .await .unwrap() .fri_gpu_prover_queue_dal() - .update_prover_instance_status(address, GpuProverInstanceStatus::Dead, zone) + .update_prover_instance_status(address, GpuProverInstanceStatus::Dead, zone.to_string()) .await }) } @@ -107,6 +106,13 @@ async fn main() -> anyhow::Result<()> { }) .context("Error setting Ctrl+C handler")?; + let zone = RegionFetcher::new( + prover_config.cloud_type, + prover_config.zone_read_url.clone(), + ) + .get_zone() + .await?; + let (stop_sender, stop_receiver) = tokio::sync::watch::channel(false); let prover_object_store_config = prover_config .prover_object_store @@ -156,6 +162,7 @@ async fn main() -> anyhow::Result<()> { let prover_tasks = get_prover_tasks( prover_config, + zone.clone(), stop_receiver.clone(), object_store_factory, public_blob_store, @@ -174,7 +181,7 @@ async fn main() -> anyhow::Result<()> { tokio::select! { _ = tasks.wait_single() => { if cfg!(feature = "gpu") { - graceful_shutdown(port) + graceful_shutdown(zone, port) .await .context("failed to prepare graceful shutdown future")? .await; @@ -194,6 +201,7 @@ async fn main() -> anyhow::Result<()> { #[cfg(not(feature = "gpu"))] async fn get_prover_tasks( prover_config: FriProverConfig, + _zone: Zone, stop_receiver: Receiver, store_factory: ObjectStoreFactory, public_blob_store: Option>, @@ -228,6 +236,7 @@ async fn get_prover_tasks( #[cfg(feature = "gpu")] async fn get_prover_tasks( prover_config: FriProverConfig, + zone: Zone, stop_receiver: Receiver, store_factory: ObjectStoreFactory, public_blob_store: Option>, @@ -246,9 +255,6 @@ async fn get_prover_tasks( let shared_witness_vector_queue = Arc::new(Mutex::new(witness_vector_queue)); let consumer = shared_witness_vector_queue.clone(); - let zone = get_zone(&prover_config.zone_read_url) - .await - .context("get_zone()")?; let local_ip = local_ip().context("Failed obtaining local IP address")?; let address = SocketAddress { host: local_ip, diff --git a/prover/prover_fri/src/socket_listener.rs b/prover/prover_fri/src/socket_listener.rs index 5e857e651bcf..e65471409e1e 100644 --- a/prover/prover_fri/src/socket_listener.rs +++ b/prover/prover_fri/src/socket_listener.rs @@ -11,6 +11,7 @@ pub mod gpu_socket_listener { use zksync_object_store::bincode; use zksync_prover_dal::{ConnectionPool, Prover, ProverDal}; use zksync_prover_fri_types::WitnessVectorArtifacts; + use zksync_prover_fri_utils::region_fetcher::Zone; use zksync_types::{ protocol_version::ProtocolSemanticVersion, prover_dal::{GpuProverInstanceStatus, SocketAddress}, @@ -26,7 +27,7 @@ pub mod gpu_socket_listener { queue: SharedWitnessVectorQueue, pool: ConnectionPool, specialized_prover_group_id: u8, - zone: String, + zone: Zone, protocol_version: ProtocolSemanticVersion, } @@ -36,7 +37,7 @@ pub mod gpu_socket_listener { queue: SharedWitnessVectorQueue, pool: ConnectionPool, specialized_prover_group_id: u8, - zone: String, + zone: Zone, protocol_version: ProtocolSemanticVersion, ) -> Self { Self { @@ -68,7 +69,7 @@ pub mod gpu_socket_listener { .insert_prover_instance( self.address.clone(), self.specialized_prover_group_id, - self.zone.clone(), + self.zone.to_string(), self.protocol_version, ) .await; @@ -154,7 +155,7 @@ pub mod gpu_socket_listener { .await .unwrap() .fri_gpu_prover_queue_dal() - .update_prover_instance_status(self.address.clone(), status, self.zone.clone()) + .update_prover_instance_status(self.address.clone(), status, self.zone.to_string()) .await; tracing::info!( "Marked prover as {:?} after {:?}", diff --git a/prover/prover_fri_utils/src/region_fetcher.rs b/prover/prover_fri_utils/src/region_fetcher.rs index cae211c26cbe..c73e83d531b4 100644 --- a/prover/prover_fri_utils/src/region_fetcher.rs +++ b/prover/prover_fri_utils/src/region_fetcher.rs @@ -1,51 +1,98 @@ +use core::fmt; + use anyhow::Context; use regex::Regex; use reqwest::{ header::{HeaderMap, HeaderValue}, Method, }; +use zksync_config::configs::fri_prover::CloudType; use zksync_utils::http_with_retries::send_request_with_retries; -pub async fn get_zone(zone_url: &str) -> anyhow::Result { - let data = fetch_from_url(zone_url).await.context("fetch_from_url()")?; - parse_zone(&data).context("parse_zone") +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct RegionFetcher { + cloud_type: CloudType, + zone_url: String, +} + +impl RegionFetcher { + pub fn new(cloud_type: CloudType, zone_url: String) -> Self { + Self { + cloud_type, + zone_url, + } + } + + pub async fn get_zone(&self) -> anyhow::Result { + match self.cloud_type { + CloudType::GCP => GcpZoneFetcher::get_zone(&self.zone_url).await, + CloudType::Local => Ok(Zone("local".to_string())), + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct Zone(String); + +impl fmt::Display for Zone { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.0) + } } -async fn fetch_from_url(url: &str) -> anyhow::Result { - let mut headers = HeaderMap::new(); - headers.insert("Metadata-Flavor", HeaderValue::from_static("Google")); - let response = send_request_with_retries(url, 5, Method::GET, Some(headers), None).await; - response - .map_err(|err| anyhow::anyhow!("Failed fetching response from url: {url}: {err:?}"))? - .text() - .await - .context("Failed to read response as text") +impl Zone { + pub fn new(zone: T) -> Self { + Self(zone.to_string()) + } } -fn parse_zone(data: &str) -> anyhow::Result { - // Statically provided Regex should always compile. - let re = Regex::new(r"^projects/\d+/zones/(\w+-\w+-\w+)$").unwrap(); - if let Some(caps) = re.captures(data) { - let zone = &caps[1]; - return Ok(zone.to_string()); +#[derive(Debug, Clone, Copy)] +struct GcpZoneFetcher; + +impl GcpZoneFetcher { + pub async fn get_zone(zone_url: &str) -> anyhow::Result { + let data = Self::fetch_from_url(zone_url) + .await + .context("fetch_from_url()")?; + Self::parse_zone(&data).context("parse_zone") + } + + async fn fetch_from_url(url: &str) -> anyhow::Result { + let mut headers = HeaderMap::new(); + headers.insert("Metadata-Flavor", HeaderValue::from_static("Google")); + let response = send_request_with_retries(url, 5, Method::GET, Some(headers), None).await; + response + .map_err(|err| anyhow::anyhow!("Failed fetching response from url: {url}: {err:?}"))? + .text() + .await + .context("Failed to read response as text") + } + + fn parse_zone(data: &str) -> anyhow::Result { + // Statically provided Regex should always compile. + let re = Regex::new(r"^projects/\d+/zones/(\w+-\w+-\w+)$").unwrap(); + if let Some(caps) = re.captures(data) { + let zone = &caps[1]; + return Ok(Zone(zone.to_string())); + } + anyhow::bail!("failed to extract zone from: {data}"); } - anyhow::bail!("failed to extract zone from: {data}"); } #[cfg(test)] mod tests { - use crate::region_fetcher::parse_zone; + use super::*; #[test] fn test_parse_zone() { let data = "projects/295056426491/zones/us-central1-a"; - let zone = parse_zone(data).unwrap(); - assert_eq!(zone, "us-central1-a"); + let zone = GcpZoneFetcher::parse_zone(data).unwrap(); + assert_eq!(zone, Zone::new("us-central1-a")); } #[test] fn test_parse_zone_panic() { let data = "invalid data"; - assert!(parse_zone(data).is_err()); + assert!(GcpZoneFetcher::parse_zone(data).is_err()); } } diff --git a/prover/witness_vector_generator/src/generator.rs b/prover/witness_vector_generator/src/generator.rs index d2b13beccd61..5574f0f1578d 100644 --- a/prover/witness_vector_generator/src/generator.rs +++ b/prover/witness_vector_generator/src/generator.rs @@ -15,7 +15,7 @@ use zksync_prover_fri_types::{ WitnessVectorArtifacts, }; use zksync_prover_fri_utils::{ - fetch_next_circuit, get_numeric_circuit_id, socket_utils::send_assembly, + fetch_next_circuit, get_numeric_circuit_id, region_fetcher::Zone, socket_utils::send_assembly, }; use zksync_queued_job_processor::JobProcessor; use zksync_types::{ @@ -30,7 +30,7 @@ pub struct WitnessVectorGenerator { object_store: Arc, pool: ConnectionPool, circuit_ids_for_round_to_be_proven: Vec, - zone: String, + zone: Zone, config: FriWitnessVectorGeneratorConfig, protocol_version: ProtocolSemanticVersion, max_attempts: u32, @@ -43,7 +43,7 @@ impl WitnessVectorGenerator { object_store: Arc, prover_connection_pool: ConnectionPool, circuit_ids_for_round_to_be_proven: Vec, - zone: String, + zone: Zone, config: FriWitnessVectorGeneratorConfig, protocol_version: ProtocolSemanticVersion, max_attempts: u32, @@ -167,7 +167,7 @@ impl JobProcessor for WitnessVectorGenerator { .lock_available_prover( self.config.max_prover_reservation_duration(), self.config.specialized_group_id, - self.zone.clone(), + self.zone.to_string(), self.protocol_version, ) .await; @@ -179,7 +179,8 @@ impl JobProcessor for WitnessVectorGenerator { now.elapsed() ); let result = send_assembly(job_id, &serialized, &address); - handle_send_result(&result, job_id, &address, &self.pool, self.zone.clone()).await; + handle_send_result(&result, job_id, &address, &self.pool, self.zone.to_string()) + .await; if result.is_ok() { METRICS.prover_waiting_time[&circuit_type].observe(now.elapsed()); diff --git a/prover/witness_vector_generator/src/main.rs b/prover/witness_vector_generator/src/main.rs index cb61be4227c9..58db6d6d5eb4 100644 --- a/prover/witness_vector_generator/src/main.rs +++ b/prover/witness_vector_generator/src/main.rs @@ -11,7 +11,7 @@ use zksync_env_config::object_store::ProverObjectStoreConfig; use zksync_object_store::ObjectStoreFactory; use zksync_prover_dal::ConnectionPool; use zksync_prover_fri_types::PROVER_PROTOCOL_SEMANTIC_VERSION; -use zksync_prover_fri_utils::{get_all_circuit_id_round_tuples_for, region_fetcher::get_zone}; +use zksync_prover_fri_utils::{get_all_circuit_id_round_tuples_for, region_fetcher::RegionFetcher}; use zksync_queued_job_processor::JobProcessor; use zksync_utils::wait_for_tasks::ManagedTasks; use zksync_vlog::prometheus::PrometheusExporterConfig; @@ -95,9 +95,14 @@ async fn main() -> anyhow::Result<()> { .unwrap_or_default(); let circuit_ids_for_round_to_be_proven = get_all_circuit_id_round_tuples_for(circuit_ids_for_round_to_be_proven); - let fri_prover_config = general_config.prover_config.context("prover config")?; - let zone_url = &fri_prover_config.zone_read_url; - let zone = get_zone(zone_url).await.context("get_zone()")?; + let prover_config = general_config.prover_config.context("prover config")?; + let zone = RegionFetcher::new( + prover_config.cloud_type, + prover_config.zone_read_url.clone(), + ) + .get_zone() + .await + .context("get_zone()")?; let protocol_version = PROVER_PROTOCOL_SEMANTIC_VERSION; @@ -108,8 +113,8 @@ async fn main() -> anyhow::Result<()> { zone.clone(), config, protocol_version, - fri_prover_config.max_attempts, - Some(fri_prover_config.setup_data_path.clone()), + prover_config.max_attempts, + Some(prover_config.setup_data_path.clone()), ); let (stop_sender, stop_receiver) = watch::channel(false); From 55aabffbb39701eed0dfb338d8fd06751e736190 Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Mon, 22 Jul 2024 12:43:21 +0400 Subject: [PATCH 05/52] chore: Publish fix-ups (#2445) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ A few more fixes that were required to publish packages. Core workspace crates are already published under `crates.io-v0.1.0` tag. ## Why ❔ Publishing on crates.io ## Checklist - [ ] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [ ] Tests for the changes have been added / updated. - [ ] Documentation comments have been added / updated. - [ ] Code has been formatted via `zk fmt` and `zk lint`. --- core/lib/vlog/Cargo.toml | 1 - core/tests/test_account/Cargo.toml | 4 ++-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/core/lib/vlog/Cargo.toml b/core/lib/vlog/Cargo.toml index 17f0e88b8c84..eb1ed735519c 100644 --- a/core/lib/vlog/Cargo.toml +++ b/core/lib/vlog/Cargo.toml @@ -9,7 +9,6 @@ repository.workspace = true license.workspace = true keywords.workspace = true categories.workspace = true -publish = false [dependencies] anyhow.workspace = true diff --git a/core/tests/test_account/Cargo.toml b/core/tests/test_account/Cargo.toml index 6df10edd7dca..0dda4f8ac777 100644 --- a/core/tests/test_account/Cargo.toml +++ b/core/tests/test_account/Cargo.toml @@ -1,6 +1,7 @@ [package] name = "zksync_test_account" -version = "0.1.0" +description = "ZKsync test account for writing unit tests" +version.workspace = true edition.workspace = true authors.workspace = true homepage.workspace = true @@ -8,7 +9,6 @@ repository.workspace = true license.workspace = true keywords.workspace = true categories.workspace = true -publish = false [dependencies] zksync_types.workspace = true From ce62ddea65e77cd43b9b55f97df6423d2a63e0ca Mon Sep 17 00:00:00 2001 From: zksync-era-bot <147085853+zksync-era-bot@users.noreply.github.com> Date: Mon, 22 Jul 2024 12:57:05 +0300 Subject: [PATCH 06/52] chore(main): release core 24.10.0 (#2423) :robot: I have created a release *beep* *boop* --- ## [24.10.0](https://github.com/matter-labs/zksync-era/compare/core-v24.9.0...core-v24.10.0) (2024-07-22) ### Features * Add blob size metrics ([#2411](https://github.com/matter-labs/zksync-era/issues/2411)) ([41c535a](https://github.com/matter-labs/zksync-era/commit/41c535af2bcc72000116277d5dd9e04b5c0b2372)) * **en:** Switch EN to use node framework ([#2427](https://github.com/matter-labs/zksync-era/issues/2427)) ([0cee530](https://github.com/matter-labs/zksync-era/commit/0cee530b2f2e8304b7e20a093a32abe116463b57)) * **eth-sender:** add early return in sending new transactions to not spam logs with errors ([#2425](https://github.com/matter-labs/zksync-era/issues/2425)) ([192f2a3](https://github.com/matter-labs/zksync-era/commit/192f2a374d83eaecb52f198fdcfa615262378530)) * **eth-watch:** Integrate decentralized upgrades ([#2401](https://github.com/matter-labs/zksync-era/issues/2401)) ([5a48e10](https://github.com/matter-labs/zksync-era/commit/5a48e1026260024c6ae2b4d1100ee9b798a83e8d)) * L1 batch signing (BFT-474) ([#2414](https://github.com/matter-labs/zksync-era/issues/2414)) ([ab699db](https://github.com/matter-labs/zksync-era/commit/ab699dbe8cffa8bd291d6054579061b47fd4aa0e)) * **prover:** Make it possible to run prover out of GCP ([#2448](https://github.com/matter-labs/zksync-era/issues/2448)) ([c9da549](https://github.com/matter-labs/zksync-era/commit/c9da5497e2aa9d85f204ab7b74fefcfe941793ff)) * **zk_toolbox:** Small adjustment for zk toolbox ([#2424](https://github.com/matter-labs/zksync-era/issues/2424)) ([ce43c42](https://github.com/matter-labs/zksync-era/commit/ce43c422fddccfe88c07ee22a2b8726dd0bd5f61)) ### Bug Fixes * **eth-sender:** add bump of min 10% when resending txs to avoid "replacement transaction underpriced" ([#2422](https://github.com/matter-labs/zksync-era/issues/2422)) ([a7bcf5d](https://github.com/matter-labs/zksync-era/commit/a7bcf5d7f75eb45384312d7c97f25a50a91e7a31)) * Set attesters in Connection::adjust_genesis (BFT-489) ([#2429](https://github.com/matter-labs/zksync-era/issues/2429)) ([ca4cb3c](https://github.com/matter-labs/zksync-era/commit/ca4cb3cba04757dc1760397c667a838931cd2d11)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --------- Co-authored-by: zksync-era-bot --- .github/release-please/manifest.json | 2 +- Cargo.lock | 2 +- core/CHANGELOG.md | 19 +++++++++++++++++++ core/bin/external_node/Cargo.toml | 2 +- 4 files changed, 22 insertions(+), 3 deletions(-) diff --git a/.github/release-please/manifest.json b/.github/release-please/manifest.json index b50534880a10..058b522b417e 100644 --- a/.github/release-please/manifest.json +++ b/.github/release-please/manifest.json @@ -1,4 +1,4 @@ { - "core": "24.9.0", + "core": "24.10.0", "prover": "16.0.0" } diff --git a/Cargo.lock b/Cargo.lock index 716edb33c877..f3605beb7918 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8624,7 +8624,7 @@ dependencies = [ [[package]] name = "zksync_external_node" -version = "24.9.0" +version = "24.10.0" dependencies = [ "anyhow", "assert_matches", diff --git a/core/CHANGELOG.md b/core/CHANGELOG.md index ee4aad02eaf6..45182e704e5a 100644 --- a/core/CHANGELOG.md +++ b/core/CHANGELOG.md @@ -1,5 +1,24 @@ # Changelog +## [24.10.0](https://github.com/matter-labs/zksync-era/compare/core-v24.9.0...core-v24.10.0) (2024-07-22) + + +### Features + +* Add blob size metrics ([#2411](https://github.com/matter-labs/zksync-era/issues/2411)) ([41c535a](https://github.com/matter-labs/zksync-era/commit/41c535af2bcc72000116277d5dd9e04b5c0b2372)) +* **en:** Switch EN to use node framework ([#2427](https://github.com/matter-labs/zksync-era/issues/2427)) ([0cee530](https://github.com/matter-labs/zksync-era/commit/0cee530b2f2e8304b7e20a093a32abe116463b57)) +* **eth-sender:** add early return in sending new transactions to not spam logs with errors ([#2425](https://github.com/matter-labs/zksync-era/issues/2425)) ([192f2a3](https://github.com/matter-labs/zksync-era/commit/192f2a374d83eaecb52f198fdcfa615262378530)) +* **eth-watch:** Integrate decentralized upgrades ([#2401](https://github.com/matter-labs/zksync-era/issues/2401)) ([5a48e10](https://github.com/matter-labs/zksync-era/commit/5a48e1026260024c6ae2b4d1100ee9b798a83e8d)) +* L1 batch signing (BFT-474) ([#2414](https://github.com/matter-labs/zksync-era/issues/2414)) ([ab699db](https://github.com/matter-labs/zksync-era/commit/ab699dbe8cffa8bd291d6054579061b47fd4aa0e)) +* **prover:** Make it possible to run prover out of GCP ([#2448](https://github.com/matter-labs/zksync-era/issues/2448)) ([c9da549](https://github.com/matter-labs/zksync-era/commit/c9da5497e2aa9d85f204ab7b74fefcfe941793ff)) +* **zk_toolbox:** Small adjustment for zk toolbox ([#2424](https://github.com/matter-labs/zksync-era/issues/2424)) ([ce43c42](https://github.com/matter-labs/zksync-era/commit/ce43c422fddccfe88c07ee22a2b8726dd0bd5f61)) + + +### Bug Fixes + +* **eth-sender:** add bump of min 10% when resending txs to avoid "replacement transaction underpriced" ([#2422](https://github.com/matter-labs/zksync-era/issues/2422)) ([a7bcf5d](https://github.com/matter-labs/zksync-era/commit/a7bcf5d7f75eb45384312d7c97f25a50a91e7a31)) +* Set attesters in Connection::adjust_genesis (BFT-489) ([#2429](https://github.com/matter-labs/zksync-era/issues/2429)) ([ca4cb3c](https://github.com/matter-labs/zksync-era/commit/ca4cb3cba04757dc1760397c667a838931cd2d11)) + ## [24.9.0](https://github.com/matter-labs/zksync-era/compare/core-v24.8.0...core-v24.9.0) (2024-07-10) diff --git a/core/bin/external_node/Cargo.toml b/core/bin/external_node/Cargo.toml index c083561897d2..84c0ddd16e09 100644 --- a/core/bin/external_node/Cargo.toml +++ b/core/bin/external_node/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "zksync_external_node" description = "Non-validator ZKsync node" -version = "24.9.0" # x-release-please-version +version = "24.10.0" # x-release-please-version edition.workspace = true authors.workspace = true homepage.workspace = true From 2025f3c1f712227469ad3d17d3ba2874e142f576 Mon Sep 17 00:00:00 2001 From: Roman Brodetski Date: Mon, 22 Jul 2024 11:50:50 +0100 Subject: [PATCH 07/52] fix(workflow): Fix build-docker-from-tag.yml (#2454) --- .github/workflows/build-docker-from-tag.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build-docker-from-tag.yml b/.github/workflows/build-docker-from-tag.yml index 50c28d9677d8..7e5257796643 100644 --- a/.github/workflows/build-docker-from-tag.yml +++ b/.github/workflows/build-docker-from-tag.yml @@ -61,7 +61,7 @@ jobs: build-push-tee-prover-images: name: Build and push images - needs: [setup, changed_files] + needs: [setup] uses: ./.github/workflows/build-tee-prover-template.yml if: contains(github.ref_name, 'core') secrets: From b61a144f553fa533502afd4f89d984f202ff4058 Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Mon, 22 Jul 2024 16:03:35 +0400 Subject: [PATCH 08/52] refactor: Change prover workspace hierarchy (#2453) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ Prover workspace has got its own "mature" hierarchy, with multiple crates, some data folders, some important files lying in the root, etc. This makes prover workspace less understandable (especially figuring out the `vk_setup_data_generator_fri/data` folder). This is the first PR of N that aims to improve prover workspace hierarchy. It moves all the crates to two subfolders: `crates/bin` and `crates/lib`. Right now we have most of logic in binaries, but later we'll move some of it to `lib` too. And hopefully, in a foreseeable future we will also have `crates/test`. Later on we will also have a top-level directory(ies) for _data_ (e.g. keys). ## Why ❔ Make the workspace easier to orient in. ## Checklist - [ ] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [ ] Tests for the changes have been added / updated. - [ ] Documentation comments have been added / updated. - [ ] Code has been formatted via `zk fmt` and `zk lint`. --- .dockerignore | 2 +- .gitignore | 2 +- Cargo.toml | 4 ++-- docker/local-node/Dockerfile | 2 +- docker/proof-fri-compressor/Dockerfile | 2 +- docker/proof-fri-gpu-compressor/Dockerfile | 2 +- docker/prover-fri-gateway/Dockerfile | 2 +- docker/prover-fri/Dockerfile | 2 +- docker/prover-gpu-fri-gar/Dockerfile | 2 +- docker/prover-gpu-fri/Dockerfile | 2 +- docker/witness-generator/Dockerfile | 2 +- docker/witness-vector-generator/Dockerfile | 2 +- docs/guides/advanced/zk_intuition.md | 3 ++- infrastructure/zk/src/database.ts | 2 +- infrastructure/zk/src/format_sql.ts | 2 +- prover/Cargo.toml | 22 +++++------------- .../bin}/proof_fri_compressor/Cargo.toml | 0 .../bin}/proof_fri_compressor/README.md | 0 .../proof_fri_compressor/src/compressor.rs | 0 .../src/initial_setup_keys.rs | 0 .../bin}/proof_fri_compressor/src/main.rs | 0 .../bin}/proof_fri_compressor/src/metrics.rs | 0 prover/{ => crates/bin}/prover_cli/Cargo.toml | 0 prover/{ => crates/bin}/prover_cli/README.md | 2 +- prover/{ => crates/bin}/prover_cli/src/cli.rs | 0 .../bin}/prover_cli/src/commands/config.rs | 0 .../prover_cli/src/commands/debug_proof.rs | 0 .../bin}/prover_cli/src/commands/delete.rs | 0 .../prover_cli/src/commands/get_file_info.rs | 0 .../bin}/prover_cli/src/commands/mod.rs | 0 .../bin}/prover_cli/src/commands/requeue.rs | 0 .../bin}/prover_cli/src/commands/restart.rs | 0 .../bin}/prover_cli/src/commands/stats.rs | 0 .../prover_cli/src/commands/status/batch.rs | 0 .../bin}/prover_cli/src/commands/status/l1.rs | 0 .../prover_cli/src/commands/status/mod.rs | 0 .../prover_cli/src/commands/status/utils.rs | 0 .../bin}/prover_cli/src/config/mod.rs | 0 .../bin}/prover_cli/src/examples/pliconfig | 0 .../{ => crates/bin}/prover_cli/src/helper.rs | 0 prover/{ => crates/bin}/prover_cli/src/lib.rs | 0 .../{ => crates/bin}/prover_cli/src/main.rs | 0 prover/{ => crates/bin}/prover_fri/Cargo.toml | 0 prover/{ => crates/bin}/prover_fri/README.md | 0 .../src/gpu_prover_availability_checker.rs | 0 .../src/gpu_prover_job_processor.rs | 0 prover/{ => crates/bin}/prover_fri/src/lib.rs | 0 .../{ => crates/bin}/prover_fri/src/main.rs | 0 .../bin}/prover_fri/src/metrics.rs | 0 .../prover_fri/src/prover_job_processor.rs | 0 .../bin}/prover_fri/src/socket_listener.rs | 0 .../{ => crates/bin}/prover_fri/src/utils.rs | 0 .../bin}/prover_fri/tests/basic_test.rs | 0 .../tests/data/proofs_fri/proof_1293714.bin | Bin .../tests/data/proofs_fri/proof_5176866.bin | Bin .../114499_479_6_BasicCircuits_0.bin | Bin .../128623_1086_1_BasicCircuits_0.bin | Bin .../bin}/prover_fri_gateway/Cargo.toml | 0 .../bin}/prover_fri_gateway/README.md | 0 .../bin}/prover_fri_gateway/src/client.rs | 0 .../bin}/prover_fri_gateway/src/main.rs | 0 .../bin}/prover_fri_gateway/src/metrics.rs | 0 .../src/proof_gen_data_fetcher.rs | 0 .../prover_fri_gateway/src/proof_submitter.rs | 0 .../bin}/prover_fri_gateway/src/traits.rs | 0 .../bin}/prover_version/Cargo.toml | 0 .../bin}/prover_version/src/main.rs | 0 .../Cargo.toml | 0 .../README.md | 0 .../data/commitments.json | 0 .../data/finalization_hints_basic_1.bin | Bin .../data/finalization_hints_basic_10.bin | Bin .../data/finalization_hints_basic_11.bin | Bin .../data/finalization_hints_basic_12.bin | Bin .../data/finalization_hints_basic_13.bin | Bin .../data/finalization_hints_basic_14.bin | Bin .../data/finalization_hints_basic_15.bin | Bin .../data/finalization_hints_basic_2.bin | Bin .../data/finalization_hints_basic_255.bin | Bin .../data/finalization_hints_basic_3.bin | Bin .../data/finalization_hints_basic_4.bin | Bin .../data/finalization_hints_basic_5.bin | Bin .../data/finalization_hints_basic_6.bin | Bin .../data/finalization_hints_basic_7.bin | Bin .../data/finalization_hints_basic_8.bin | Bin .../data/finalization_hints_basic_9.bin | Bin .../data/finalization_hints_leaf_10.bin | Bin .../data/finalization_hints_leaf_11.bin | Bin .../data/finalization_hints_leaf_12.bin | Bin .../data/finalization_hints_leaf_13.bin | Bin .../data/finalization_hints_leaf_14.bin | Bin .../data/finalization_hints_leaf_15.bin | Bin .../data/finalization_hints_leaf_16.bin | Bin .../data/finalization_hints_leaf_17.bin | Bin .../data/finalization_hints_leaf_18.bin | Bin .../data/finalization_hints_leaf_3.bin | Bin .../data/finalization_hints_leaf_4.bin | Bin .../data/finalization_hints_leaf_5.bin | Bin .../data/finalization_hints_leaf_6.bin | Bin .../data/finalization_hints_leaf_7.bin | Bin .../data/finalization_hints_leaf_8.bin | Bin .../data/finalization_hints_leaf_9.bin | Bin .../data/finalization_hints_node.bin | Bin .../data/finalization_hints_recursion_tip.bin | Bin .../data/finalization_hints_scheduler.bin | Bin .../snark_verification_scheduler_key.json | 0 .../data/verification_basic_10_key.json | 0 .../data/verification_basic_11_key.json | 0 .../data/verification_basic_12_key.json | 0 .../data/verification_basic_13_key.json | 0 .../data/verification_basic_14_key.json | 0 .../data/verification_basic_15_key.json | 0 .../data/verification_basic_1_key.json | 0 .../data/verification_basic_255_key.json | 0 .../data/verification_basic_2_key.json | 0 .../data/verification_basic_3_key.json | 0 .../data/verification_basic_4_key.json | 0 .../data/verification_basic_5_key.json | 0 .../data/verification_basic_6_key.json | 0 .../data/verification_basic_7_key.json | 0 .../data/verification_basic_8_key.json | 0 .../data/verification_basic_9_key.json | 0 .../data/verification_leaf_10_key.json | 0 .../data/verification_leaf_11_key.json | 0 .../data/verification_leaf_12_key.json | 0 .../data/verification_leaf_13_key.json | 0 .../data/verification_leaf_14_key.json | 0 .../data/verification_leaf_15_key.json | 0 .../data/verification_leaf_16_key.json | 0 .../data/verification_leaf_17_key.json | 0 .../data/verification_leaf_18_key.json | 0 .../data/verification_leaf_3_key.json | 0 .../data/verification_leaf_4_key.json | 0 .../data/verification_leaf_5_key.json | 0 .../data/verification_leaf_6_key.json | 0 .../data/verification_leaf_7_key.json | 0 .../data/verification_leaf_8_key.json | 0 .../data/verification_leaf_9_key.json | 0 .../data/verification_node_key.json | 0 .../data/verification_recursion_tip_key.json | 0 .../data/verification_scheduler_key.json | 0 .../historical_data/0.24.0/commitments.json | 0 .../snark_verification_scheduler_key.json | 0 .../historical_data/0.24.1/commitments.json | 0 .../snark_verification_scheduler_key.json | 0 .../historical_data/18/commitments.json | 0 .../18/snark_verification_scheduler_key.json | 0 .../historical_data/19/commitments.json | 0 .../19/snark_verification_scheduler_key.json | 0 .../historical_data/20/commitments.json | 0 .../20/snark_verification_scheduler_key.json | 0 .../historical_data/21/commitments.json | 0 .../21/snark_verification_scheduler_key.json | 0 .../historical_data/22/commitments.json | 0 .../22/snark_verification_scheduler_key.json | 0 .../historical_data/23/commitments.json | 0 .../23/snark_verification_scheduler_key.json | 0 .../historical_data/README.md | 0 .../src/commitment_generator.rs | 0 .../src/commitment_utils.rs | 0 .../src/keystore.rs | 4 ++-- .../src/lib.rs | 0 .../src/main.rs | 0 .../src/setup_data_generator.rs | 0 .../src/tests.rs | 0 .../src/utils.rs | 0 .../src/vk_commitment_helper.rs | 0 .../bin}/witness_generator/Cargo.toml | 0 .../bin}/witness_generator/README.md | 0 .../witness_generator/src/basic_circuits.rs | 0 .../witness_generator/src/leaf_aggregation.rs | 0 .../bin}/witness_generator/src/lib.rs | 0 .../bin}/witness_generator/src/main.rs | 0 .../bin}/witness_generator/src/metrics.rs | 0 .../witness_generator/src/node_aggregation.rs | 0 .../precalculated_merkle_paths_provider.rs | 0 .../witness_generator/src/recursion_tip.rs | 0 .../bin}/witness_generator/src/scheduler.rs | 0 .../witness_generator/src/storage_oracle.rs | 0 .../bin}/witness_generator/src/tests.rs | 0 .../witness_generator/src/trusted_setup.json | 0 .../bin}/witness_generator/src/utils.rs | 0 .../witness_generator/tests/basic_test.rs | 0 .../closed_form_inputs_125010_4.bin | Bin .../aggregations_125010_6_0.bin | Bin .../data/leaf/proofs_fri/proof_4639043.bin | Bin .../data/leaf/proofs_fri/proof_4639044.bin | Bin .../data/leaf/proofs_fri/proof_4639045.bin | Bin .../aggregations_127856_8_0.bin | Bin .../aggregations_127856_8_1.bin | Bin .../data/node/proofs_fri/proof_5211320.bin | Bin .../scheduler/proofs_fri/proof_5627082.bin | Bin .../scheduler/proofs_fri/proof_5627083.bin | Bin .../scheduler/proofs_fri/proof_5627084.bin | Bin .../scheduler/proofs_fri/proof_5627085.bin | Bin .../scheduler/proofs_fri/proof_5627086.bin | Bin .../scheduler/proofs_fri/proof_5627090.bin | Bin .../scheduler/proofs_fri/proof_5627091.bin | Bin .../scheduler/proofs_fri/proof_5627092.bin | Bin .../scheduler/proofs_fri/proof_5627093.bin | Bin .../scheduler/proofs_fri/proof_5627094.bin | Bin .../scheduler/proofs_fri/proof_5629097.bin | Bin .../scheduler/proofs_fri/proof_5631320.bin | Bin .../scheduler/proofs_fri/proof_5639969.bin | Bin .../128599_0_1_Scheduler_0.bin | Bin .../scheduler_witness_128599.bin | Bin .../bin}/witness_vector_generator/Cargo.toml | 0 .../bin}/witness_vector_generator/README.md | 0 .../witness_vector_generator/src/generator.rs | 0 .../bin}/witness_vector_generator/src/lib.rs | 0 .../bin}/witness_vector_generator/src/main.rs | 0 .../witness_vector_generator/src/metrics.rs | 0 .../tests/basic_test.rs | 0 .../tests/data/base_layer_main_vm.bin | Bin ...f6e1df560ab1e8935564355236e90b6147d2f.json | 0 ...579b23540815afa1c6a8d4c36bba951861fe7.json | 0 ...dab9b63eee7f21c450a723e4ba011edc8e2bb.json | 0 ...bb3402044d201e85e114ff4582394c32bd2bf.json | 0 ...f113a19feb73c4cf9876855523499998b99c0.json | 0 ...8dbc21cccb9a95e3db1c93da239845a5e9036.json | 0 ...2254a457665179d9cf0a3c0b18c3fe09e4838.json | 0 ...98f5e2450cc4faee2f80b37fbd5626324dbeb.json | 0 ...aae31358088e142dff51c9f0bde8f386900d3.json | 0 ...5d2832571464e74b5fed92cf54617573c84ec.json | 0 ...a68a48db6a64afcd41bbe0e17d98fa38fdb19.json | 0 ...dd8547a1ad20492ec37c3c0be5639e5d49952.json | 0 ...a8b699386b3c5b4e02d5ce046f0f2e0ddc388.json | 0 ...7def3a97275b66ad33d214054dc9048ddf584.json | 0 ...a8ea23052f714cd74c1d28ae1203ce8f0eaa9.json | 0 ...52554ccfb5b83f00efdc12bed0f60ef439785.json | 0 ...19d03f894f40d2ec528382b5643c3d51ec8e7.json | 0 ...7249ec09c0daf4368021788207370213a6d94.json | 0 ...f1d4d9a4b83a8b42846d8373ea13b96d612cf.json | 0 ...9fd5b3d210a117bb0027d58c6cb4debd63f33.json | 0 ...e2d3a6ebb3657862b91e3ece34119f098fc2d.json | 0 ...6769dbb04d3a61cf232892236c974660ffe64.json | 0 ...0ad195b0dd2a8ce56b1a9eb531103130b5e3e.json | 0 ...8b87ead36f593488437c6f67da629ca81e4fa.json | 0 ...97ed410fa47b268a66f1fc56d469c06ae50af.json | 0 ...2601d35fd2881ac1fd070f0f1a8add4bc388d.json | 0 ...5da82065836fe17687ffad04126a6a8b2b27c.json | 0 ...9a8f447824a5ab466bb6eea1710e8aeaa2c56.json | 0 ...d94f28b7b2b60d551d552a9b0bab1f1791e39.json | 0 ...592895215e22fd4cf0dfe69b83277f8d05db3.json | 0 ...7a1a04821495487a80595cc9b523dac6ac8e9.json | 0 ...7effac442434c6e734d977e6682a7484abe7f.json | 0 ...52aeb5f06c26f68d131dd242f6ed68816c513.json | 0 ...d0fc9ac9a7f3dce2ba48c2d0e6f38b6ba455a.json | 0 ...9d0c658093dede5eb61489205aa751ad5b8ec.json | 0 ...7ac83cd32a628d3e01e5cd1949c519683a352.json | 0 ...d419667f11d80036cda021ecbf23b0b5f7f42.json | 0 ...715e903f3b399886c2c73e838bd924fed6776.json | 0 ...4f32042dfead8a37401558f5fd3b03480f2dd.json | 0 ...7227120a8279db1875d26ccae5ee0785f46a9.json | 0 ...c39ae8a6e053a0e03afd3fb5e02ee17157067.json | 0 ...78815e29440592b2bb00adacf02730b526458.json | 0 ...cdce6412e2725cf5162ce7a733f6dceaecb11.json | 0 ...c0b6c018e6a4d279acd24a4ea7d81b5cc5123.json | 0 ...e085ea80cf93c2fd66fd3949aab428bbdc560.json | 0 ...023678f31a1b7f5ee33b643dd551c40e88329.json | 0 ...89daacb88fe5aaf368c5f81a885821522b99c.json | 0 ...866e8f67a380302762c272bfb27307682d62e.json | 0 ...9bfb838c787fc58d7536f9e9976e5e515431a.json | 0 ...0767a2cd4488e670674cd9149f7a332c0198d.json | 0 ...b210d65149cdd4a3411a79b717aadbffb43af.json | 0 ...c9a64904026506914abae2946e5d353d6a604.json | 0 ...ef3ad13840d2c497760e9bd0513f68dc4271c.json | 0 ...43c868c63c853edb5c4f41e48a3cc6378eca9.json | 0 ...01ab8ae3f32526d9b5eadcfe52d139f7d6e66.json | 0 ...20222e177262292241bd8cb89dbb9c1e74c2d.json | 0 ...7b56187686173327498ac75424593547c19c5.json | 0 ...f8c12deeca6b8843fe3869cc2b02b30da5de6.json | 0 ...49b6370c211a7fc24ad03a5f0e327f9d18040.json | 0 ...0103263af3ff5cb6c9dc5df59d3cd2a5e56b4.json | 0 ...a9dc31c7d51476f18cffa80cad653298ad252.json | 0 ...5263556f258565f79cbb40f5ecc1a4f6402f5.json | 0 ...912d57f8eb2a38bdb7884fc812a2897a3a660.json | 0 ...69718349ac4fc08b455c7f4265d7443f2ec13.json | 0 ...6997fcfbc7ad688f2eee3dfab1029344d2382.json | 0 ...d34a5baece02812f8c950fc84d37eeebd33a4.json | 0 ...4775c6f7414c7bed75d33b61de00fdbabc349.json | 0 ...ac429aac3c030f7e226a1264243d8cdae038d.json | 0 ...cb21a635037d89ce24dd3ad58ffaadb59594a.json | 0 ...3b6da86d1e693be03936730c340121167341f.json | 0 ...3e67f08f2ead5f55bfb6594e50346bf9cf2ef.json | 0 ...f029e262be45614404159908af1624349700b.json | 0 ...191a43dc8eafc33ee067bd41e20f25f7625f0.json | 0 ...8b02c44b099e27e3c45c5c810cd5fcd8884ed.json | 0 ...c6fadb8e12a9218399d189b4d95e2ca4fcc48.json | 0 ...2060fbea775dc185f639139fbfd23e4d5f3c6.json | 0 ...70a4e629b2a1cde641e74e4e55bb100df809f.json | 0 ...e118cabc67b6e507efefb7b69e102f1b43c58.json | 0 ...d4f9a3b98458746972c9860fb9473947d59ff.json | 0 ...9bae42849574731d33539bfdcca21c9b64f4e.json | 0 ...93a4eb2ee0284aa89bca1ba958f470a2d6254.json | 0 ...fba74ec2cfc3c89c7e4e2ea475c3ce4092849.json | 0 ...567878f347bdaf36294e9b24ee9c0aa1e861b.json | 0 ...b99cf505662036f2dd7a9f1807c4c1bad7c7b.json | 0 ...c3465e2211ef3013386feb12d4cc04e0eade9.json | 0 ...15aaade450980719933089824eb8c494d64a4.json | 0 ...583a7526ae38ceb4bf80543cfd3fb60492fb9.json | 0 ...dae905acac53b46eeaeb059d23e48a71df3b4.json | 0 ...304e8a35fd65bf37e976b7106f57c57e70b9b.json | 0 prover/{ => crates/lib}/prover_dal/Cargo.toml | 0 .../prover_dal/doc/FriProofCompressorDal.md | 0 .../lib}/prover_dal/doc/FriProverDal.md | 0 .../prover_dal/doc/FriWitnessGeneratorDal.md | 0 ...31134938_initial-prover-migration.down.sql | 0 ...0131134938_initial-prover-migration.up.sql | 0 ...226120310_add_support_for_eip4844.down.sql | 0 ...40226120310_add_support_for_eip4844.up.sql | 0 ...at_column_to_prover_queue_archive.down.sql | 0 ...d_at_column_to_prover_queue_archive.up.sql | 0 ...9_add-protocol-versions-to-tables.down.sql | 0 ...719_add-protocol-versions-to-tables.up.sql | 0 ...606_add_changes_for_recursion_tip.down.sql | 0 ...02606_add_changes_for_recursion_tip.up.sql | 0 ...3522_add-patch-columns-for-semver.down.sql | 0 ...123522_add-patch-columns-for-semver.up.sql | 0 ...mber_of_final_node_jobs_mandatory.down.sql | 0 ...number_of_final_node_jobs_mandatory.up.sql | 0 ...0703113903_add-vm_run_data-column.down.sql | 0 ...240703113903_add-vm_run_data-column.up.sql | 0 .../src/fri_gpu_prover_queue_dal.rs | 0 .../src/fri_proof_compressor_dal.rs | 0 .../src/fri_protocol_versions_dal.rs | 0 .../lib}/prover_dal/src/fri_prover_dal.rs | 0 .../src/fri_witness_generator_dal.rs | 0 prover/{ => crates/lib}/prover_dal/src/lib.rs | 0 .../lib}/prover_fri_types/Cargo.toml | 0 .../lib}/prover_fri_types/README.md | 0 .../lib}/prover_fri_types/src/keys.rs | 0 .../lib}/prover_fri_types/src/lib.rs | 0 .../lib}/prover_fri_types/src/queue.rs | 0 .../lib}/prover_fri_utils/Cargo.toml | 0 .../lib}/prover_fri_utils/src/lib.rs | 0 .../lib}/prover_fri_utils/src/metrics.rs | 0 .../prover_fri_utils/src/region_fetcher.rs | 0 .../lib}/prover_fri_utils/src/socket_utils.rs | 0 zk_toolbox/crates/zk_inception/src/consts.rs | 2 +- zk_toolbox/crates/zk_supervisor/src/dals.rs | 2 +- 341 files changed, 28 insertions(+), 37 deletions(-) rename prover/{ => crates/bin}/proof_fri_compressor/Cargo.toml (100%) rename prover/{ => crates/bin}/proof_fri_compressor/README.md (100%) rename prover/{ => crates/bin}/proof_fri_compressor/src/compressor.rs (100%) rename prover/{ => crates/bin}/proof_fri_compressor/src/initial_setup_keys.rs (100%) rename prover/{ => crates/bin}/proof_fri_compressor/src/main.rs (100%) rename prover/{ => crates/bin}/proof_fri_compressor/src/metrics.rs (100%) rename prover/{ => crates/bin}/prover_cli/Cargo.toml (100%) rename prover/{ => crates/bin}/prover_cli/README.md (99%) rename prover/{ => crates/bin}/prover_cli/src/cli.rs (100%) rename prover/{ => crates/bin}/prover_cli/src/commands/config.rs (100%) rename prover/{ => crates/bin}/prover_cli/src/commands/debug_proof.rs (100%) rename prover/{ => crates/bin}/prover_cli/src/commands/delete.rs (100%) rename prover/{ => crates/bin}/prover_cli/src/commands/get_file_info.rs (100%) rename prover/{ => crates/bin}/prover_cli/src/commands/mod.rs (100%) rename prover/{ => crates/bin}/prover_cli/src/commands/requeue.rs (100%) rename prover/{ => crates/bin}/prover_cli/src/commands/restart.rs (100%) rename prover/{ => crates/bin}/prover_cli/src/commands/stats.rs (100%) rename prover/{ => crates/bin}/prover_cli/src/commands/status/batch.rs (100%) rename prover/{ => crates/bin}/prover_cli/src/commands/status/l1.rs (100%) rename prover/{ => crates/bin}/prover_cli/src/commands/status/mod.rs (100%) rename prover/{ => crates/bin}/prover_cli/src/commands/status/utils.rs (100%) rename prover/{ => crates/bin}/prover_cli/src/config/mod.rs (100%) rename prover/{ => crates/bin}/prover_cli/src/examples/pliconfig (100%) rename prover/{ => crates/bin}/prover_cli/src/helper.rs (100%) rename prover/{ => crates/bin}/prover_cli/src/lib.rs (100%) rename prover/{ => crates/bin}/prover_cli/src/main.rs (100%) rename prover/{ => crates/bin}/prover_fri/Cargo.toml (100%) rename prover/{ => crates/bin}/prover_fri/README.md (100%) rename prover/{ => crates/bin}/prover_fri/src/gpu_prover_availability_checker.rs (100%) rename prover/{ => crates/bin}/prover_fri/src/gpu_prover_job_processor.rs (100%) rename prover/{ => crates/bin}/prover_fri/src/lib.rs (100%) rename prover/{ => crates/bin}/prover_fri/src/main.rs (100%) rename prover/{ => crates/bin}/prover_fri/src/metrics.rs (100%) rename prover/{ => crates/bin}/prover_fri/src/prover_job_processor.rs (100%) rename prover/{ => crates/bin}/prover_fri/src/socket_listener.rs (100%) rename prover/{ => crates/bin}/prover_fri/src/utils.rs (100%) rename prover/{ => crates/bin}/prover_fri/tests/basic_test.rs (100%) rename prover/{ => crates/bin}/prover_fri/tests/data/proofs_fri/proof_1293714.bin (100%) rename prover/{ => crates/bin}/prover_fri/tests/data/proofs_fri/proof_5176866.bin (100%) rename prover/{ => crates/bin}/prover_fri/tests/data/prover_jobs_fri/114499_479_6_BasicCircuits_0.bin (100%) rename prover/{ => crates/bin}/prover_fri/tests/data/prover_jobs_fri/128623_1086_1_BasicCircuits_0.bin (100%) rename prover/{ => crates/bin}/prover_fri_gateway/Cargo.toml (100%) rename prover/{ => crates/bin}/prover_fri_gateway/README.md (100%) rename prover/{ => crates/bin}/prover_fri_gateway/src/client.rs (100%) rename prover/{ => crates/bin}/prover_fri_gateway/src/main.rs (100%) rename prover/{ => crates/bin}/prover_fri_gateway/src/metrics.rs (100%) rename prover/{ => crates/bin}/prover_fri_gateway/src/proof_gen_data_fetcher.rs (100%) rename prover/{ => crates/bin}/prover_fri_gateway/src/proof_submitter.rs (100%) rename prover/{ => crates/bin}/prover_fri_gateway/src/traits.rs (100%) rename prover/{ => crates/bin}/prover_version/Cargo.toml (100%) rename prover/{ => crates/bin}/prover_version/src/main.rs (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/Cargo.toml (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/README.md (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/commitments.json (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/finalization_hints_basic_1.bin (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/finalization_hints_basic_10.bin (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/finalization_hints_basic_11.bin (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/finalization_hints_basic_12.bin (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/finalization_hints_basic_13.bin (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/finalization_hints_basic_14.bin (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/finalization_hints_basic_15.bin (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/finalization_hints_basic_2.bin (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/finalization_hints_basic_255.bin (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/finalization_hints_basic_3.bin (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/finalization_hints_basic_4.bin (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/finalization_hints_basic_5.bin (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/finalization_hints_basic_6.bin (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/finalization_hints_basic_7.bin (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/finalization_hints_basic_8.bin (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/finalization_hints_basic_9.bin (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_10.bin (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_11.bin (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_12.bin (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_13.bin (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_14.bin (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_15.bin (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_16.bin (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_17.bin (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_18.bin (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_3.bin (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_4.bin (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_5.bin (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_6.bin (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_7.bin (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_8.bin (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_9.bin (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/finalization_hints_node.bin (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/finalization_hints_recursion_tip.bin (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/finalization_hints_scheduler.bin (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/snark_verification_scheduler_key.json (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/verification_basic_10_key.json (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/verification_basic_11_key.json (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/verification_basic_12_key.json (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/verification_basic_13_key.json (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/verification_basic_14_key.json (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/verification_basic_15_key.json (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/verification_basic_1_key.json (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/verification_basic_255_key.json (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/verification_basic_2_key.json (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/verification_basic_3_key.json (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/verification_basic_4_key.json (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/verification_basic_5_key.json (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/verification_basic_6_key.json (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/verification_basic_7_key.json (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/verification_basic_8_key.json (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/verification_basic_9_key.json (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/verification_leaf_10_key.json (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/verification_leaf_11_key.json (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/verification_leaf_12_key.json (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/verification_leaf_13_key.json (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/verification_leaf_14_key.json (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/verification_leaf_15_key.json (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/verification_leaf_16_key.json (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/verification_leaf_17_key.json (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/verification_leaf_18_key.json (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/verification_leaf_3_key.json (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/verification_leaf_4_key.json (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/verification_leaf_5_key.json (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/verification_leaf_6_key.json (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/verification_leaf_7_key.json (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/verification_leaf_8_key.json (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/verification_leaf_9_key.json (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/verification_node_key.json (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/verification_recursion_tip_key.json (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/data/verification_scheduler_key.json (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/historical_data/0.24.0/commitments.json (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/historical_data/0.24.0/snark_verification_scheduler_key.json (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/historical_data/0.24.1/commitments.json (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/historical_data/0.24.1/snark_verification_scheduler_key.json (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/historical_data/18/commitments.json (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/historical_data/18/snark_verification_scheduler_key.json (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/historical_data/19/commitments.json (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/historical_data/19/snark_verification_scheduler_key.json (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/historical_data/20/commitments.json (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/historical_data/20/snark_verification_scheduler_key.json (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/historical_data/21/commitments.json (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/historical_data/21/snark_verification_scheduler_key.json (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/historical_data/22/commitments.json (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/historical_data/22/snark_verification_scheduler_key.json (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/historical_data/23/commitments.json (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/historical_data/23/snark_verification_scheduler_key.json (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/historical_data/README.md (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/src/commitment_generator.rs (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/src/commitment_utils.rs (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/src/keystore.rs (99%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/src/lib.rs (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/src/main.rs (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/src/setup_data_generator.rs (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/src/tests.rs (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/src/utils.rs (100%) rename prover/{ => crates/bin}/vk_setup_data_generator_server_fri/src/vk_commitment_helper.rs (100%) rename prover/{ => crates/bin}/witness_generator/Cargo.toml (100%) rename prover/{ => crates/bin}/witness_generator/README.md (100%) rename prover/{ => crates/bin}/witness_generator/src/basic_circuits.rs (100%) rename prover/{ => crates/bin}/witness_generator/src/leaf_aggregation.rs (100%) rename prover/{ => crates/bin}/witness_generator/src/lib.rs (100%) rename prover/{ => crates/bin}/witness_generator/src/main.rs (100%) rename prover/{ => crates/bin}/witness_generator/src/metrics.rs (100%) rename prover/{ => crates/bin}/witness_generator/src/node_aggregation.rs (100%) rename prover/{ => crates/bin}/witness_generator/src/precalculated_merkle_paths_provider.rs (100%) rename prover/{ => crates/bin}/witness_generator/src/recursion_tip.rs (100%) rename prover/{ => crates/bin}/witness_generator/src/scheduler.rs (100%) rename prover/{ => crates/bin}/witness_generator/src/storage_oracle.rs (100%) rename prover/{ => crates/bin}/witness_generator/src/tests.rs (100%) rename prover/{ => crates/bin}/witness_generator/src/trusted_setup.json (100%) rename prover/{ => crates/bin}/witness_generator/src/utils.rs (100%) rename prover/{ => crates/bin}/witness_generator/tests/basic_test.rs (100%) rename prover/{ => crates/bin}/witness_generator/tests/data/leaf/leaf_aggregation_witness_jobs_fri/closed_form_inputs_125010_4.bin (100%) rename prover/{ => crates/bin}/witness_generator/tests/data/leaf/node_aggregation_witness_jobs_fri/aggregations_125010_6_0.bin (100%) rename prover/{ => crates/bin}/witness_generator/tests/data/leaf/proofs_fri/proof_4639043.bin (100%) rename prover/{ => crates/bin}/witness_generator/tests/data/leaf/proofs_fri/proof_4639044.bin (100%) rename prover/{ => crates/bin}/witness_generator/tests/data/leaf/proofs_fri/proof_4639045.bin (100%) rename prover/{ => crates/bin}/witness_generator/tests/data/node/node_aggregation_witness_jobs_fri/aggregations_127856_8_0.bin (100%) rename prover/{ => crates/bin}/witness_generator/tests/data/node/node_aggregation_witness_jobs_fri/aggregations_127856_8_1.bin (100%) rename prover/{ => crates/bin}/witness_generator/tests/data/node/proofs_fri/proof_5211320.bin (100%) rename prover/{ => crates/bin}/witness_generator/tests/data/scheduler/proofs_fri/proof_5627082.bin (100%) rename prover/{ => crates/bin}/witness_generator/tests/data/scheduler/proofs_fri/proof_5627083.bin (100%) rename prover/{ => crates/bin}/witness_generator/tests/data/scheduler/proofs_fri/proof_5627084.bin (100%) rename prover/{ => crates/bin}/witness_generator/tests/data/scheduler/proofs_fri/proof_5627085.bin (100%) rename prover/{ => crates/bin}/witness_generator/tests/data/scheduler/proofs_fri/proof_5627086.bin (100%) rename prover/{ => crates/bin}/witness_generator/tests/data/scheduler/proofs_fri/proof_5627090.bin (100%) rename prover/{ => crates/bin}/witness_generator/tests/data/scheduler/proofs_fri/proof_5627091.bin (100%) rename prover/{ => crates/bin}/witness_generator/tests/data/scheduler/proofs_fri/proof_5627092.bin (100%) rename prover/{ => crates/bin}/witness_generator/tests/data/scheduler/proofs_fri/proof_5627093.bin (100%) rename prover/{ => crates/bin}/witness_generator/tests/data/scheduler/proofs_fri/proof_5627094.bin (100%) rename prover/{ => crates/bin}/witness_generator/tests/data/scheduler/proofs_fri/proof_5629097.bin (100%) rename prover/{ => crates/bin}/witness_generator/tests/data/scheduler/proofs_fri/proof_5631320.bin (100%) rename prover/{ => crates/bin}/witness_generator/tests/data/scheduler/proofs_fri/proof_5639969.bin (100%) rename prover/{ => crates/bin}/witness_generator/tests/data/scheduler/prover_jobs_fri/128599_0_1_Scheduler_0.bin (100%) rename prover/{ => crates/bin}/witness_generator/tests/data/scheduler/scheduler_witness_jobs_fri/scheduler_witness_128599.bin (100%) rename prover/{ => crates/bin}/witness_vector_generator/Cargo.toml (100%) rename prover/{ => crates/bin}/witness_vector_generator/README.md (100%) rename prover/{ => crates/bin}/witness_vector_generator/src/generator.rs (100%) rename prover/{ => crates/bin}/witness_vector_generator/src/lib.rs (100%) rename prover/{ => crates/bin}/witness_vector_generator/src/main.rs (100%) rename prover/{ => crates/bin}/witness_vector_generator/src/metrics.rs (100%) rename prover/{ => crates/bin}/witness_vector_generator/tests/basic_test.rs (100%) rename prover/{ => crates/bin}/witness_vector_generator/tests/data/base_layer_main_vm.bin (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-00b88ec7fcf40bb18e0018b7c76f6e1df560ab1e8935564355236e90b6147d2f.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-02f2010c60dfa5b93d3f2ee7594579b23540815afa1c6a8d4c36bba951861fe7.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-069f04bdfafbe2e3628ac3ded93dab9b63eee7f21c450a723e4ba011edc8e2bb.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-081e2b928f0816c41d6645c1dedbb3402044d201e85e114ff4582394c32bd2bf.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-0a1ec4690d6b4a67d6ad16badcbf113a19feb73c4cf9876855523499998b99c0.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-0b70c98c2edd8370ad09ac553c18dbc21cccb9a95e3db1c93da239845a5e9036.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-16548daf69e9ff0528904be2e142254a457665179d9cf0a3c0b18c3fe09e4838.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-1849cfa3167eed2809e7724a63198f5e2450cc4faee2f80b37fbd5626324dbeb.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-2095e5646c382ccbc6e3bafdeddaae31358088e142dff51c9f0bde8f386900d3.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-21621153e545859d71188e2421f5d2832571464e74b5fed92cf54617573c84ec.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-28397b5a0b7af832d2a4d3d7011a68a48db6a64afcd41bbe0e17d98fa38fdb19.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-285d0ff850fa5c9af36564fcb14dd8547a1ad20492ec37c3c0be5639e5d49952.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-28f03acf565c4b50fe86f606c18a8b699386b3c5b4e02d5ce046f0f2e0ddc388.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-29ff260b02f7b955f9fe0b657b87def3a97275b66ad33d214054dc9048ddf584.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-2ab2f83b273c5aa88c1eefc8f70a8ea23052f714cd74c1d28ae1203ce8f0eaa9.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-2b626262c8003817ee02978f77452554ccfb5b83f00efdc12bed0f60ef439785.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-2dc6b7bf08cced8791354fc47e319d03f894f40d2ec528382b5643c3d51ec8e7.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-2df88abaae97b6f916b104375bd7249ec09c0daf4368021788207370213a6d94.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-2e6e2b68efc28114f44616b68fcf1d4d9a4b83a8b42846d8373ea13b96d612cf.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-3902f6a8e09cd5ad560d23fe0269fd5b3d210a117bb0027d58c6cb4debd63f33.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-3941da180ee62a7c5d4e392ff4fe2d3a6ebb3657862b91e3ece34119f098fc2d.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-3c3abbf689fa64c6da7de69fd916769dbb04d3a61cf232892236c974660ffe64.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-3e0a1ebc684810c09ff83784bdd0ad195b0dd2a8ce56b1a9eb531103130b5e3e.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-3ec365c5c81f4678a905ae5bbd48b87ead36f593488437c6f67da629ca81e4fa.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-412ef600a2f6025d8c22c2df8a497ed410fa47b268a66f1fc56d469c06ae50af.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-41af30620f8a1f20b8a6c46be162601d35fd2881ac1fd070f0f1a8add4bc388d.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-46c4696fff5a4b8cc5cb46b05645da82065836fe17687ffad04126a6a8b2b27c.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-534822a226068cde83ad8c30b569a8f447824a5ab466bb6eea1710e8aeaa2c56.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-53f78fdee39b113d2f55f6f951bd94f28b7b2b60d551d552a9b0bab1f1791e39.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-542af2ff4259182310363ac0213592895215e22fd4cf0dfe69b83277f8d05db3.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-5db868e03dc6901a0afa06f82a37a1a04821495487a80595cc9b523dac6ac8e9.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-5e781f84ec41edd0941fa84de837effac442434c6e734d977e6682a7484abe7f.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-61b2b858d4636809c21838635aa52aeb5f06c26f68d131dd242f6ed68816c513.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-67f5f3a015dc478f02f4f701c90d0fc9ac9a7f3dce2ba48c2d0e6f38b6ba455a.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-6cfc59d2fc039c706f30ae91b7d9d0c658093dede5eb61489205aa751ad5b8ec.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-6f20d468efe916f8e92cbf259b37ac83cd32a628d3e01e5cd1949c519683a352.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-764693ceeb45f8478a20242b592d419667f11d80036cda021ecbf23b0b5f7f42.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-7a2145e2234a7896031bbc1ce82715e903f3b399886c2c73e838bd924fed6776.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-7effbacbdcc4bd762386351755f4f32042dfead8a37401558f5fd3b03480f2dd.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-806b82a9effd885ba537a2a1c7d7227120a8279db1875d26ccae5ee0785f46a9.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-85a69b433c08847876bf6e7af9bc39ae8a6e053a0e03afd3fb5e02ee17157067.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-860846c9bcad1edd1a2906542c178815e29440592b2bb00adacf02730b526458.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-8719c090a9ad2488d556e495238cdce6412e2725cf5162ce7a733f6dceaecb11.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-8720d411e0c9640afd61e927a89c0b6c018e6a4d279acd24a4ea7d81b5cc5123.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-87a73aa95a85efeb065428f9e56e085ea80cf93c2fd66fd3949aab428bbdc560.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-8bcad2be3dd29e36ea731417b68023678f31a1b7f5ee33b643dd551c40e88329.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-8ffb62f6a17c68af701e790044989daacb88fe5aaf368c5f81a885821522b99c.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-93b9706aa8eb840d574d7c156cc866e8f67a380302762c272bfb27307682d62e.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-94a75b05ecbab75d6ebf39cca029bfb838c787fc58d7536f9e9976e5e515431a.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-9b713312b539b4eefa58346f0070767a2cd4488e670674cd9149f7a332c0198d.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-a0f60a97f09b2467ca73bb6fbebb210d65149cdd4a3411a79b717aadbffb43af.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-a84ee70bec8c03bd51e1c6bad44c9a64904026506914abae2946e5d353d6a604.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-a94fffdbc1827dc5df908ea1e99ef3ad13840d2c497760e9bd0513f68dc4271c.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-abc93d27a8673b23e18d050e84c43c868c63c853edb5c4f41e48a3cc6378eca9.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-adaa3126792aac4e3afb805068f01ab8ae3f32526d9b5eadcfe52d139f7d6e66.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-b25c66b9705b3f2fb8a3492f1bd20222e177262292241bd8cb89dbb9c1e74c2d.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-b321c5ba22358cbb1fd9c627f1e7b56187686173327498ac75424593547c19c5.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-b367ecb1ebee86ec598c4079591f8c12deeca6b8843fe3869cc2b02b30da5de6.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-b3d71dbe14bcd94131b29b64dcb49b6370c211a7fc24ad03a5f0e327f9d18040.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-b4794e6a0c2366d5d95ab373c310103263af3ff5cb6c9dc5df59d3cd2a5e56b4.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-b568f9cb9c2bd53b5dcde15f368a9dc31c7d51476f18cffa80cad653298ad252.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-bf06bd08d8ccf67fc00bbc364715263556f258565f79cbb40f5ecc1a4f6402f5.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-bfb80956a18eabf266f5b5a9d62912d57f8eb2a38bdb7884fc812a2897a3a660.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-c156004a0e5ad5bcc33d3b894fd69718349ac4fc08b455c7f4265d7443f2ec13.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-c173743af526d8150b6091ea52e6997fcfbc7ad688f2eee3dfab1029344d2382.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-c2c140d136df5303d7b3a66ccd0d34a5baece02812f8c950fc84d37eeebd33a4.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-c340c043c938bf5f4b63d57a1654775c6f7414c7bed75d33b61de00fdbabc349.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-c706a49ff54f6b424e24d061fe7ac429aac3c030f7e226a1264243d8cdae038d.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-ca9d06141265b8524ee28c55569cb21a635037d89ce24dd3ad58ffaadb59594a.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-caff620ae66d7fbe3caff7505173b6da86d1e693be03936730c340121167341f.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-ce5779092feb8a3d3e2c5e395783e67f08f2ead5f55bfb6594e50346bf9cf2ef.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-d16278c6025eb3a205266fb5273f029e262be45614404159908af1624349700b.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-d272c91f1209c277189a31c59ee191a43dc8eafc33ee067bd41e20f25f7625f0.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-d4949debfe0dc5112204cd196c68b02c44b099e27e3c45c5c810cd5fcd8884ed.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-d91c931e2a14cf1183a608d041fc6fadb8e12a9218399d189b4d95e2ca4fcc48.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-db3e74f0e83ffbf84a6d61e560f2060fbea775dc185f639139fbfd23e4d5f3c6.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-df00e33809768120e395d8f740770a4e629b2a1cde641e74e4e55bb100df809f.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-e0a6cc885e437aa7ded9def71f3e118cabc67b6e507efefb7b69e102f1b43c58.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-e3194873d24e67f8d0e98bf8bf2d4f9a3b98458746972c9860fb9473947d59ff.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-e32c0d85cb2841efb0b7cea6b049bae42849574731d33539bfdcca21c9b64f4e.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-e495b78add1c942d89d806e228093a4eb2ee0284aa89bca1ba958f470a2d6254.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-e743af4c18ec91eb46db5a19556fba74ec2cfc3c89c7e4e2ea475c3ce4092849.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-e8066db420e075306235f728d57567878f347bdaf36294e9b24ee9c0aa1e861b.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-e9c9b69852fa68f463f17b6d63ab99cf505662036f2dd7a9f1807c4c1bad7c7b.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-ec04b89218111a5dc8d5ade506ac3465e2211ef3013386feb12d4cc04e0eade9.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-edc61e1285bf6d3837acc67af4f15aaade450980719933089824eb8c494d64a4.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-eef1b56e87eff63fcf6ffb98791583a7526ae38ceb4bf80543cfd3fb60492fb9.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-f99c34313e2717ec74b4f274e33dae905acac53b46eeaeb059d23e48a71df3b4.json (100%) rename prover/{ => crates/lib}/prover_dal/.sqlx/query-fcddeb96dcd1611dedb2091c1be304e8a35fd65bf37e976b7106f57c57e70b9b.json (100%) rename prover/{ => crates/lib}/prover_dal/Cargo.toml (100%) rename prover/{ => crates/lib}/prover_dal/doc/FriProofCompressorDal.md (100%) rename prover/{ => crates/lib}/prover_dal/doc/FriProverDal.md (100%) rename prover/{ => crates/lib}/prover_dal/doc/FriWitnessGeneratorDal.md (100%) rename prover/{ => crates/lib}/prover_dal/migrations/20240131134938_initial-prover-migration.down.sql (100%) rename prover/{ => crates/lib}/prover_dal/migrations/20240131134938_initial-prover-migration.up.sql (100%) rename prover/{ => crates/lib}/prover_dal/migrations/20240226120310_add_support_for_eip4844.down.sql (100%) rename prover/{ => crates/lib}/prover_dal/migrations/20240226120310_add_support_for_eip4844.up.sql (100%) rename prover/{ => crates/lib}/prover_dal/migrations/20240403070124_add_archived_at_column_to_prover_queue_archive.down.sql (100%) rename prover/{ => crates/lib}/prover_dal/migrations/20240403070124_add_archived_at_column_to_prover_queue_archive.up.sql (100%) rename prover/{ => crates/lib}/prover_dal/migrations/20240410141719_add-protocol-versions-to-tables.down.sql (100%) rename prover/{ => crates/lib}/prover_dal/migrations/20240410141719_add-protocol-versions-to-tables.up.sql (100%) rename prover/{ => crates/lib}/prover_dal/migrations/20240419102606_add_changes_for_recursion_tip.down.sql (100%) rename prover/{ => crates/lib}/prover_dal/migrations/20240419102606_add_changes_for_recursion_tip.up.sql (100%) rename prover/{ => crates/lib}/prover_dal/migrations/20240524123522_add-patch-columns-for-semver.down.sql (100%) rename prover/{ => crates/lib}/prover_dal/migrations/20240524123522_add-patch-columns-for-semver.up.sql (100%) rename prover/{ => crates/lib}/prover_dal/migrations/20240613111518_make_number_of_final_node_jobs_mandatory.down.sql (100%) rename prover/{ => crates/lib}/prover_dal/migrations/20240613111518_make_number_of_final_node_jobs_mandatory.up.sql (100%) rename prover/{ => crates/lib}/prover_dal/migrations/20240703113903_add-vm_run_data-column.down.sql (100%) rename prover/{ => crates/lib}/prover_dal/migrations/20240703113903_add-vm_run_data-column.up.sql (100%) rename prover/{ => crates/lib}/prover_dal/src/fri_gpu_prover_queue_dal.rs (100%) rename prover/{ => crates/lib}/prover_dal/src/fri_proof_compressor_dal.rs (100%) rename prover/{ => crates/lib}/prover_dal/src/fri_protocol_versions_dal.rs (100%) rename prover/{ => crates/lib}/prover_dal/src/fri_prover_dal.rs (100%) rename prover/{ => crates/lib}/prover_dal/src/fri_witness_generator_dal.rs (100%) rename prover/{ => crates/lib}/prover_dal/src/lib.rs (100%) rename prover/{ => crates/lib}/prover_fri_types/Cargo.toml (100%) rename prover/{ => crates/lib}/prover_fri_types/README.md (100%) rename prover/{ => crates/lib}/prover_fri_types/src/keys.rs (100%) rename prover/{ => crates/lib}/prover_fri_types/src/lib.rs (100%) rename prover/{ => crates/lib}/prover_fri_types/src/queue.rs (100%) rename prover/{ => crates/lib}/prover_fri_utils/Cargo.toml (100%) rename prover/{ => crates/lib}/prover_fri_utils/src/lib.rs (100%) rename prover/{ => crates/lib}/prover_fri_utils/src/metrics.rs (100%) rename prover/{ => crates/lib}/prover_fri_utils/src/region_fetcher.rs (100%) rename prover/{ => crates/lib}/prover_fri_utils/src/socket_utils.rs (100%) diff --git a/.dockerignore b/.dockerignore index ee2e8af78dd3..c32286be6a01 100644 --- a/.dockerignore +++ b/.dockerignore @@ -39,7 +39,7 @@ contracts/.git !etc/multivm_bootloaders !cargo !bellman-cuda -!prover/vk_setup_data_generator_server_fri/data/ +!prover/crates/bin/vk_setup_data_generator_server_fri/data/ !.github/release-please/manifest.json !etc/env/file_based diff --git a/.gitignore b/.gitignore index 32ed5815b017..3ffddc7a7930 100644 --- a/.gitignore +++ b/.gitignore @@ -108,7 +108,7 @@ hyperchain-*.yml /etc/hyperchains/artifacts # Prover keys that should not be commited -prover/vk_setup_data_generator_server_fri/data/setup_* +prover/crates/bin/vk_setup_data_generator_server_fri/data/setup_* # Zk Toolbox chains/era/configs/* diff --git a/Cargo.toml b/Cargo.toml index aa77cf2f7cc1..0ce4be5c8431 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -79,7 +79,7 @@ members = [ "core/tests/vm-benchmark/harness", # Parts of prover workspace that are needed for Core workspace - "prover/prover_dal" + "prover/crates/lib/prover_dal" ] resolver = "2" @@ -222,7 +222,7 @@ zksync_protobuf_build = "=0.1.0-rc.2" # "Local" dependencies zksync_multivm = { version = "0.1.0", path = "core/lib/multivm" } -zksync_prover_dal = { version = "0.1.0", path = "prover/prover_dal" } +zksync_prover_dal = { version = "0.1.0", path = "prover/crates/lib/prover_dal" } zksync_vlog = { version = "0.1.0", path = "core/lib/vlog" } zksync_vm_utils = { version = "0.1.0", path = "core/lib/vm_utils" } zksync_vm_benchmark_harness = { version = "0.1.0", path = "core/tests/vm-benchmark/harness" } diff --git a/docker/local-node/Dockerfile b/docker/local-node/Dockerfile index c0592f89d563..2e6b09ef3d10 100644 --- a/docker/local-node/Dockerfile +++ b/docker/local-node/Dockerfile @@ -64,7 +64,7 @@ COPY package.json / # Copy DAL - needed to setup database schemas. COPY core/lib/dal core/lib/dal -COPY prover/prover_dal prover/prover_dal +COPY prover/crates/lib/prover_dal prover/crates/lib/prover_dal RUN mkdir /etc/env/l1-inits && mkdir /etc/env/l2-inits diff --git a/docker/proof-fri-compressor/Dockerfile b/docker/proof-fri-compressor/Dockerfile index afa8477dcf72..2cf131abb4b8 100644 --- a/docker/proof-fri-compressor/Dockerfile +++ b/docker/proof-fri-compressor/Dockerfile @@ -14,7 +14,7 @@ FROM debian:bookworm-slim RUN apt-get update && apt-get install -y curl libpq5 ca-certificates && rm -rf /var/lib/apt/lists/* # copy VK required for proof wrapping -COPY prover/vk_setup_data_generator_server_fri/data/ /prover/vk_setup_data_generator_server_fri/data/ +COPY prover/crates/bin/vk_setup_data_generator_server_fri/data/ /prover/crates/bin/vk_setup_data_generator_server_fri/data/ # copy universal setup key required for proof compression COPY setup_2\^26.key /setup_2\^26.key diff --git a/docker/proof-fri-gpu-compressor/Dockerfile b/docker/proof-fri-gpu-compressor/Dockerfile index 8249f123081b..e6d2e0f11627 100644 --- a/docker/proof-fri-gpu-compressor/Dockerfile +++ b/docker/proof-fri-gpu-compressor/Dockerfile @@ -37,7 +37,7 @@ FROM nvidia/cuda:12.2.0-runtime-ubuntu22.04 RUN apt-get update && apt-get install -y curl libpq5 ca-certificates && rm -rf /var/lib/apt/lists/* # copy VK required for proof wrapping -COPY prover/vk_setup_data_generator_server_fri/data/ /prover/vk_setup_data_generator_server_fri/data/ +COPY prover/crates/bin/vk_setup_data_generator_server_fri/data/ /prover/crates/bin/vk_setup_data_generator_server_fri/data/ COPY setup_2\^24.key /setup_2\^24.key diff --git a/docker/prover-fri-gateway/Dockerfile b/docker/prover-fri-gateway/Dockerfile index f5dfa027b418..c53f27818687 100644 --- a/docker/prover-fri-gateway/Dockerfile +++ b/docker/prover-fri-gateway/Dockerfile @@ -11,7 +11,7 @@ FROM debian:bookworm-slim RUN apt-get update && apt-get install -y curl libpq5 ca-certificates && rm -rf /var/lib/apt/lists/* # copy VK required for proof wrapping -COPY prover/vk_setup_data_generator_server_fri/data/ /prover/vk_setup_data_generator_server_fri/data/ +COPY prover/crates/bin/vk_setup_data_generator_server_fri/data/ /prover/crates/bin/vk_setup_data_generator_server_fri/data/ COPY --from=builder /usr/src/zksync/prover/target/release/zksync_prover_fri_gateway /usr/bin/ diff --git a/docker/prover-fri/Dockerfile b/docker/prover-fri/Dockerfile index 98a0d2d831dc..2dde8d9794ce 100644 --- a/docker/prover-fri/Dockerfile +++ b/docker/prover-fri/Dockerfile @@ -11,7 +11,7 @@ FROM debian:bookworm-slim RUN apt-get update && apt-get install -y curl libpq5 ca-certificates && rm -rf /var/lib/apt/lists/* # copy VK required for protocol version -COPY prover/vk_setup_data_generator_server_fri/data/ /prover/vk_setup_data_generator_server_fri/data/ +COPY prover/crates/bin/vk_setup_data_generator_server_fri/data/ /prover/crates/bin/vk_setup_data_generator_server_fri/data/ COPY --from=builder /usr/src/zksync/prover/target/release/zksync_prover_fri /usr/bin/ diff --git a/docker/prover-gpu-fri-gar/Dockerfile b/docker/prover-gpu-fri-gar/Dockerfile index bd70be7ee4b4..248f6aaf35fe 100644 --- a/docker/prover-gpu-fri-gar/Dockerfile +++ b/docker/prover-gpu-fri-gar/Dockerfile @@ -9,7 +9,7 @@ COPY *.bin / RUN apt-get update && apt-get install -y libpq5 ca-certificates openssl && rm -rf /var/lib/apt/lists/* # copy finalization hints required for assembly generation -COPY --from=prover prover/vk_setup_data_generator_server_fri/data/ /prover/vk_setup_data_generator_server_fri/data/ +COPY --from=prover prover/crates/bin/vk_setup_data_generator_server_fri/data/ /prover/crates/bin/vk_setup_data_generator_server_fri/data/ COPY --from=prover /usr/bin/zksync_prover_fri /usr/bin/ ENTRYPOINT ["zksync_prover_fri"] diff --git a/docker/prover-gpu-fri/Dockerfile b/docker/prover-gpu-fri/Dockerfile index 1093ed9e4ebf..0894c1c0c47d 100644 --- a/docker/prover-gpu-fri/Dockerfile +++ b/docker/prover-gpu-fri/Dockerfile @@ -31,7 +31,7 @@ FROM nvidia/cuda:12.2.0-runtime-ubuntu22.04 RUN apt-get update && apt-get install -y curl libpq5 ca-certificates && rm -rf /var/lib/apt/lists/* # copy finalization hints required for assembly generation -COPY prover/vk_setup_data_generator_server_fri/data/ /prover/vk_setup_data_generator_server_fri/data/ +COPY prover/crates/bin/vk_setup_data_generator_server_fri/data/ /prover/crates/bin/vk_setup_data_generator_server_fri/data/ COPY --from=builder /usr/src/zksync/prover/target/release/zksync_prover_fri /usr/bin/ diff --git a/docker/witness-generator/Dockerfile b/docker/witness-generator/Dockerfile index 595168702b70..3f8affbd2a9b 100644 --- a/docker/witness-generator/Dockerfile +++ b/docker/witness-generator/Dockerfile @@ -11,7 +11,7 @@ FROM debian:bookworm-slim RUN apt-get update && apt-get install -y curl libpq5 ca-certificates && rm -rf /var/lib/apt/lists/* -COPY prover/vk_setup_data_generator_server_fri/data/ /prover/vk_setup_data_generator_server_fri/data/ +COPY prover/crates/bin/vk_setup_data_generator_server_fri/data/ /prover/crates/bin/vk_setup_data_generator_server_fri/data/ COPY --from=builder /usr/src/zksync/prover/target/release/zksync_witness_generator /usr/bin/ diff --git a/docker/witness-vector-generator/Dockerfile b/docker/witness-vector-generator/Dockerfile index 9064595fcbed..d1bc1e29c5fa 100644 --- a/docker/witness-vector-generator/Dockerfile +++ b/docker/witness-vector-generator/Dockerfile @@ -12,7 +12,7 @@ FROM debian:bookworm-slim RUN apt-get update && apt-get install -y curl libpq5 ca-certificates && rm -rf /var/lib/apt/lists/* # copy finalization hints required for witness vector generation -COPY prover/vk_setup_data_generator_server_fri/data/ /prover/vk_setup_data_generator_server_fri/data/ +COPY prover/crates/bin/vk_setup_data_generator_server_fri/data/ /prover/crates/bin/vk_setup_data_generator_server_fri/data/ COPY --from=builder /usr/src/zksync/prover/target/release/zksync_witness_vector_generator /usr/bin/ diff --git a/docs/guides/advanced/zk_intuition.md b/docs/guides/advanced/zk_intuition.md index e567ebf7ca82..6e0224a3237f 100644 --- a/docs/guides/advanced/zk_intuition.md +++ b/docs/guides/advanced/zk_intuition.md @@ -144,7 +144,8 @@ version 1.4.0. [bellman cuda repo]: https://github.com/matter-labs/era-bellman-cuda [example ecrecover circuit]: https://github.com/matter-labs/era-sync_vm/blob/v1.3.2/src/glue/ecrecover_circuit/mod.rs#L157 -[separate witness binary]: https://github.com/matter-labs/zksync-era/blob/main/prover/witness_generator/src/main.rs +[separate witness binary]: + https://github.com/matter-labs/zksync-era/blob/main/prover/crates/bin/witness_generator/src/main.rs [zkevm_test_harness witness]: https://github.com/matter-labs/era-zkevm_test_harness/blob/fb47657ae3b6ff6e4bb5199964d3d37212978200/src/external_calls.rs#L579 [heavy_ops_service repo]: https://github.com/matter-labs/era-heavy-ops-service diff --git a/infrastructure/zk/src/database.ts b/infrastructure/zk/src/database.ts index 2d11bca447d2..c818bd3be93f 100644 --- a/infrastructure/zk/src/database.ts +++ b/infrastructure/zk/src/database.ts @@ -10,7 +10,7 @@ export async function reset(opts: DbOpts) { export enum DalPath { CoreDal = 'core/lib/dal', - ProverDal = 'prover/prover_dal' + ProverDal = 'prover/crates/lib/prover_dal' } export interface DbOpts { diff --git a/infrastructure/zk/src/format_sql.ts b/infrastructure/zk/src/format_sql.ts index 7f18d4a46388..09f655f54867 100644 --- a/infrastructure/zk/src/format_sql.ts +++ b/infrastructure/zk/src/format_sql.ts @@ -159,7 +159,7 @@ async function formatFile(filePath: string, check: boolean) { export async function formatSqlxQueries(check: boolean) { process.chdir(`${process.env.ZKSYNC_HOME}`); const { stdout: filesRaw } = await utils.exec( - 'find core/lib/dal -type f -name "*.rs" && find prover/prover_dal -type f -name "*.rs"' + 'find core/lib/dal -type f -name "*.rs" && find prover/crates/lib/prover_dal -type f -name "*.rs"' ); const files = filesRaw.trim().split('\n'); const formatResults = await Promise.all(files.map((file) => formatFile(file, check))); diff --git a/prover/Cargo.toml b/prover/Cargo.toml index 6eebafbc520f..ffb034059c8a 100644 --- a/prover/Cargo.toml +++ b/prover/Cargo.toml @@ -1,17 +1,7 @@ [workspace] members = [ - # lib - "prover_fri_utils", - "prover_fri_types", - # binaries - "witness_generator", - "vk_setup_data_generator_server_fri", - "prover_fri", - "witness_vector_generator", - "prover_fri_gateway", - "proof_fri_compressor", - "prover_cli", - "prover_version", + "crates/bin/*", + "crates/lib/*", ] resolver = "2" @@ -94,10 +84,10 @@ zksync_contracts = { path = "../core/lib/contracts" } zksync_core_leftovers = { path = "../core/lib/zksync_core_leftovers" } # Prover workspace dependencies -zksync_prover_dal = { path = "prover_dal" } -zksync_prover_fri_types = { path = "prover_fri_types" } -zksync_prover_fri_utils = { path = "prover_fri_utils" } -vk_setup_data_generator_server_fri = { path = "vk_setup_data_generator_server_fri" } +zksync_prover_dal = { path = "crates/lib/prover_dal" } +zksync_prover_fri_types = { path = "crates/lib/prover_fri_types" } +zksync_prover_fri_utils = { path = "crates/lib/prover_fri_utils" } +vk_setup_data_generator_server_fri = { path = "crates/bin/vk_setup_data_generator_server_fri" } # for `perf` profiling [profile.perf] diff --git a/prover/proof_fri_compressor/Cargo.toml b/prover/crates/bin/proof_fri_compressor/Cargo.toml similarity index 100% rename from prover/proof_fri_compressor/Cargo.toml rename to prover/crates/bin/proof_fri_compressor/Cargo.toml diff --git a/prover/proof_fri_compressor/README.md b/prover/crates/bin/proof_fri_compressor/README.md similarity index 100% rename from prover/proof_fri_compressor/README.md rename to prover/crates/bin/proof_fri_compressor/README.md diff --git a/prover/proof_fri_compressor/src/compressor.rs b/prover/crates/bin/proof_fri_compressor/src/compressor.rs similarity index 100% rename from prover/proof_fri_compressor/src/compressor.rs rename to prover/crates/bin/proof_fri_compressor/src/compressor.rs diff --git a/prover/proof_fri_compressor/src/initial_setup_keys.rs b/prover/crates/bin/proof_fri_compressor/src/initial_setup_keys.rs similarity index 100% rename from prover/proof_fri_compressor/src/initial_setup_keys.rs rename to prover/crates/bin/proof_fri_compressor/src/initial_setup_keys.rs diff --git a/prover/proof_fri_compressor/src/main.rs b/prover/crates/bin/proof_fri_compressor/src/main.rs similarity index 100% rename from prover/proof_fri_compressor/src/main.rs rename to prover/crates/bin/proof_fri_compressor/src/main.rs diff --git a/prover/proof_fri_compressor/src/metrics.rs b/prover/crates/bin/proof_fri_compressor/src/metrics.rs similarity index 100% rename from prover/proof_fri_compressor/src/metrics.rs rename to prover/crates/bin/proof_fri_compressor/src/metrics.rs diff --git a/prover/prover_cli/Cargo.toml b/prover/crates/bin/prover_cli/Cargo.toml similarity index 100% rename from prover/prover_cli/Cargo.toml rename to prover/crates/bin/prover_cli/Cargo.toml diff --git a/prover/prover_cli/README.md b/prover/crates/bin/prover_cli/README.md similarity index 99% rename from prover/prover_cli/README.md rename to prover/crates/bin/prover_cli/README.md index 053744914b97..6a9091aef25e 100644 --- a/prover/prover_cli/README.md +++ b/prover/crates/bin/prover_cli/README.md @@ -6,7 +6,7 @@ CLI tool for performing maintenance of a ZKsync Prover ``` git clone git@github.com:matter-labs/zksync-era.git -cargo install --path prover/prover_cli/ +cargo install -p prover_cli ``` > This should be `cargo install zksync-prover-cli` or something similar ideally. diff --git a/prover/prover_cli/src/cli.rs b/prover/crates/bin/prover_cli/src/cli.rs similarity index 100% rename from prover/prover_cli/src/cli.rs rename to prover/crates/bin/prover_cli/src/cli.rs diff --git a/prover/prover_cli/src/commands/config.rs b/prover/crates/bin/prover_cli/src/commands/config.rs similarity index 100% rename from prover/prover_cli/src/commands/config.rs rename to prover/crates/bin/prover_cli/src/commands/config.rs diff --git a/prover/prover_cli/src/commands/debug_proof.rs b/prover/crates/bin/prover_cli/src/commands/debug_proof.rs similarity index 100% rename from prover/prover_cli/src/commands/debug_proof.rs rename to prover/crates/bin/prover_cli/src/commands/debug_proof.rs diff --git a/prover/prover_cli/src/commands/delete.rs b/prover/crates/bin/prover_cli/src/commands/delete.rs similarity index 100% rename from prover/prover_cli/src/commands/delete.rs rename to prover/crates/bin/prover_cli/src/commands/delete.rs diff --git a/prover/prover_cli/src/commands/get_file_info.rs b/prover/crates/bin/prover_cli/src/commands/get_file_info.rs similarity index 100% rename from prover/prover_cli/src/commands/get_file_info.rs rename to prover/crates/bin/prover_cli/src/commands/get_file_info.rs diff --git a/prover/prover_cli/src/commands/mod.rs b/prover/crates/bin/prover_cli/src/commands/mod.rs similarity index 100% rename from prover/prover_cli/src/commands/mod.rs rename to prover/crates/bin/prover_cli/src/commands/mod.rs diff --git a/prover/prover_cli/src/commands/requeue.rs b/prover/crates/bin/prover_cli/src/commands/requeue.rs similarity index 100% rename from prover/prover_cli/src/commands/requeue.rs rename to prover/crates/bin/prover_cli/src/commands/requeue.rs diff --git a/prover/prover_cli/src/commands/restart.rs b/prover/crates/bin/prover_cli/src/commands/restart.rs similarity index 100% rename from prover/prover_cli/src/commands/restart.rs rename to prover/crates/bin/prover_cli/src/commands/restart.rs diff --git a/prover/prover_cli/src/commands/stats.rs b/prover/crates/bin/prover_cli/src/commands/stats.rs similarity index 100% rename from prover/prover_cli/src/commands/stats.rs rename to prover/crates/bin/prover_cli/src/commands/stats.rs diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/crates/bin/prover_cli/src/commands/status/batch.rs similarity index 100% rename from prover/prover_cli/src/commands/status/batch.rs rename to prover/crates/bin/prover_cli/src/commands/status/batch.rs diff --git a/prover/prover_cli/src/commands/status/l1.rs b/prover/crates/bin/prover_cli/src/commands/status/l1.rs similarity index 100% rename from prover/prover_cli/src/commands/status/l1.rs rename to prover/crates/bin/prover_cli/src/commands/status/l1.rs diff --git a/prover/prover_cli/src/commands/status/mod.rs b/prover/crates/bin/prover_cli/src/commands/status/mod.rs similarity index 100% rename from prover/prover_cli/src/commands/status/mod.rs rename to prover/crates/bin/prover_cli/src/commands/status/mod.rs diff --git a/prover/prover_cli/src/commands/status/utils.rs b/prover/crates/bin/prover_cli/src/commands/status/utils.rs similarity index 100% rename from prover/prover_cli/src/commands/status/utils.rs rename to prover/crates/bin/prover_cli/src/commands/status/utils.rs diff --git a/prover/prover_cli/src/config/mod.rs b/prover/crates/bin/prover_cli/src/config/mod.rs similarity index 100% rename from prover/prover_cli/src/config/mod.rs rename to prover/crates/bin/prover_cli/src/config/mod.rs diff --git a/prover/prover_cli/src/examples/pliconfig b/prover/crates/bin/prover_cli/src/examples/pliconfig similarity index 100% rename from prover/prover_cli/src/examples/pliconfig rename to prover/crates/bin/prover_cli/src/examples/pliconfig diff --git a/prover/prover_cli/src/helper.rs b/prover/crates/bin/prover_cli/src/helper.rs similarity index 100% rename from prover/prover_cli/src/helper.rs rename to prover/crates/bin/prover_cli/src/helper.rs diff --git a/prover/prover_cli/src/lib.rs b/prover/crates/bin/prover_cli/src/lib.rs similarity index 100% rename from prover/prover_cli/src/lib.rs rename to prover/crates/bin/prover_cli/src/lib.rs diff --git a/prover/prover_cli/src/main.rs b/prover/crates/bin/prover_cli/src/main.rs similarity index 100% rename from prover/prover_cli/src/main.rs rename to prover/crates/bin/prover_cli/src/main.rs diff --git a/prover/prover_fri/Cargo.toml b/prover/crates/bin/prover_fri/Cargo.toml similarity index 100% rename from prover/prover_fri/Cargo.toml rename to prover/crates/bin/prover_fri/Cargo.toml diff --git a/prover/prover_fri/README.md b/prover/crates/bin/prover_fri/README.md similarity index 100% rename from prover/prover_fri/README.md rename to prover/crates/bin/prover_fri/README.md diff --git a/prover/prover_fri/src/gpu_prover_availability_checker.rs b/prover/crates/bin/prover_fri/src/gpu_prover_availability_checker.rs similarity index 100% rename from prover/prover_fri/src/gpu_prover_availability_checker.rs rename to prover/crates/bin/prover_fri/src/gpu_prover_availability_checker.rs diff --git a/prover/prover_fri/src/gpu_prover_job_processor.rs b/prover/crates/bin/prover_fri/src/gpu_prover_job_processor.rs similarity index 100% rename from prover/prover_fri/src/gpu_prover_job_processor.rs rename to prover/crates/bin/prover_fri/src/gpu_prover_job_processor.rs diff --git a/prover/prover_fri/src/lib.rs b/prover/crates/bin/prover_fri/src/lib.rs similarity index 100% rename from prover/prover_fri/src/lib.rs rename to prover/crates/bin/prover_fri/src/lib.rs diff --git a/prover/prover_fri/src/main.rs b/prover/crates/bin/prover_fri/src/main.rs similarity index 100% rename from prover/prover_fri/src/main.rs rename to prover/crates/bin/prover_fri/src/main.rs diff --git a/prover/prover_fri/src/metrics.rs b/prover/crates/bin/prover_fri/src/metrics.rs similarity index 100% rename from prover/prover_fri/src/metrics.rs rename to prover/crates/bin/prover_fri/src/metrics.rs diff --git a/prover/prover_fri/src/prover_job_processor.rs b/prover/crates/bin/prover_fri/src/prover_job_processor.rs similarity index 100% rename from prover/prover_fri/src/prover_job_processor.rs rename to prover/crates/bin/prover_fri/src/prover_job_processor.rs diff --git a/prover/prover_fri/src/socket_listener.rs b/prover/crates/bin/prover_fri/src/socket_listener.rs similarity index 100% rename from prover/prover_fri/src/socket_listener.rs rename to prover/crates/bin/prover_fri/src/socket_listener.rs diff --git a/prover/prover_fri/src/utils.rs b/prover/crates/bin/prover_fri/src/utils.rs similarity index 100% rename from prover/prover_fri/src/utils.rs rename to prover/crates/bin/prover_fri/src/utils.rs diff --git a/prover/prover_fri/tests/basic_test.rs b/prover/crates/bin/prover_fri/tests/basic_test.rs similarity index 100% rename from prover/prover_fri/tests/basic_test.rs rename to prover/crates/bin/prover_fri/tests/basic_test.rs diff --git a/prover/prover_fri/tests/data/proofs_fri/proof_1293714.bin b/prover/crates/bin/prover_fri/tests/data/proofs_fri/proof_1293714.bin similarity index 100% rename from prover/prover_fri/tests/data/proofs_fri/proof_1293714.bin rename to prover/crates/bin/prover_fri/tests/data/proofs_fri/proof_1293714.bin diff --git a/prover/prover_fri/tests/data/proofs_fri/proof_5176866.bin b/prover/crates/bin/prover_fri/tests/data/proofs_fri/proof_5176866.bin similarity index 100% rename from prover/prover_fri/tests/data/proofs_fri/proof_5176866.bin rename to prover/crates/bin/prover_fri/tests/data/proofs_fri/proof_5176866.bin diff --git a/prover/prover_fri/tests/data/prover_jobs_fri/114499_479_6_BasicCircuits_0.bin b/prover/crates/bin/prover_fri/tests/data/prover_jobs_fri/114499_479_6_BasicCircuits_0.bin similarity index 100% rename from prover/prover_fri/tests/data/prover_jobs_fri/114499_479_6_BasicCircuits_0.bin rename to prover/crates/bin/prover_fri/tests/data/prover_jobs_fri/114499_479_6_BasicCircuits_0.bin diff --git a/prover/prover_fri/tests/data/prover_jobs_fri/128623_1086_1_BasicCircuits_0.bin b/prover/crates/bin/prover_fri/tests/data/prover_jobs_fri/128623_1086_1_BasicCircuits_0.bin similarity index 100% rename from prover/prover_fri/tests/data/prover_jobs_fri/128623_1086_1_BasicCircuits_0.bin rename to prover/crates/bin/prover_fri/tests/data/prover_jobs_fri/128623_1086_1_BasicCircuits_0.bin diff --git a/prover/prover_fri_gateway/Cargo.toml b/prover/crates/bin/prover_fri_gateway/Cargo.toml similarity index 100% rename from prover/prover_fri_gateway/Cargo.toml rename to prover/crates/bin/prover_fri_gateway/Cargo.toml diff --git a/prover/prover_fri_gateway/README.md b/prover/crates/bin/prover_fri_gateway/README.md similarity index 100% rename from prover/prover_fri_gateway/README.md rename to prover/crates/bin/prover_fri_gateway/README.md diff --git a/prover/prover_fri_gateway/src/client.rs b/prover/crates/bin/prover_fri_gateway/src/client.rs similarity index 100% rename from prover/prover_fri_gateway/src/client.rs rename to prover/crates/bin/prover_fri_gateway/src/client.rs diff --git a/prover/prover_fri_gateway/src/main.rs b/prover/crates/bin/prover_fri_gateway/src/main.rs similarity index 100% rename from prover/prover_fri_gateway/src/main.rs rename to prover/crates/bin/prover_fri_gateway/src/main.rs diff --git a/prover/prover_fri_gateway/src/metrics.rs b/prover/crates/bin/prover_fri_gateway/src/metrics.rs similarity index 100% rename from prover/prover_fri_gateway/src/metrics.rs rename to prover/crates/bin/prover_fri_gateway/src/metrics.rs diff --git a/prover/prover_fri_gateway/src/proof_gen_data_fetcher.rs b/prover/crates/bin/prover_fri_gateway/src/proof_gen_data_fetcher.rs similarity index 100% rename from prover/prover_fri_gateway/src/proof_gen_data_fetcher.rs rename to prover/crates/bin/prover_fri_gateway/src/proof_gen_data_fetcher.rs diff --git a/prover/prover_fri_gateway/src/proof_submitter.rs b/prover/crates/bin/prover_fri_gateway/src/proof_submitter.rs similarity index 100% rename from prover/prover_fri_gateway/src/proof_submitter.rs rename to prover/crates/bin/prover_fri_gateway/src/proof_submitter.rs diff --git a/prover/prover_fri_gateway/src/traits.rs b/prover/crates/bin/prover_fri_gateway/src/traits.rs similarity index 100% rename from prover/prover_fri_gateway/src/traits.rs rename to prover/crates/bin/prover_fri_gateway/src/traits.rs diff --git a/prover/prover_version/Cargo.toml b/prover/crates/bin/prover_version/Cargo.toml similarity index 100% rename from prover/prover_version/Cargo.toml rename to prover/crates/bin/prover_version/Cargo.toml diff --git a/prover/prover_version/src/main.rs b/prover/crates/bin/prover_version/src/main.rs similarity index 100% rename from prover/prover_version/src/main.rs rename to prover/crates/bin/prover_version/src/main.rs diff --git a/prover/vk_setup_data_generator_server_fri/Cargo.toml b/prover/crates/bin/vk_setup_data_generator_server_fri/Cargo.toml similarity index 100% rename from prover/vk_setup_data_generator_server_fri/Cargo.toml rename to prover/crates/bin/vk_setup_data_generator_server_fri/Cargo.toml diff --git a/prover/vk_setup_data_generator_server_fri/README.md b/prover/crates/bin/vk_setup_data_generator_server_fri/README.md similarity index 100% rename from prover/vk_setup_data_generator_server_fri/README.md rename to prover/crates/bin/vk_setup_data_generator_server_fri/README.md diff --git a/prover/vk_setup_data_generator_server_fri/data/commitments.json b/prover/crates/bin/vk_setup_data_generator_server_fri/data/commitments.json similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/commitments.json rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/commitments.json diff --git a/prover/vk_setup_data_generator_server_fri/data/finalization_hints_basic_1.bin b/prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_basic_1.bin similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/finalization_hints_basic_1.bin rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_basic_1.bin diff --git a/prover/vk_setup_data_generator_server_fri/data/finalization_hints_basic_10.bin b/prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_basic_10.bin similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/finalization_hints_basic_10.bin rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_basic_10.bin diff --git a/prover/vk_setup_data_generator_server_fri/data/finalization_hints_basic_11.bin b/prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_basic_11.bin similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/finalization_hints_basic_11.bin rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_basic_11.bin diff --git a/prover/vk_setup_data_generator_server_fri/data/finalization_hints_basic_12.bin b/prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_basic_12.bin similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/finalization_hints_basic_12.bin rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_basic_12.bin diff --git a/prover/vk_setup_data_generator_server_fri/data/finalization_hints_basic_13.bin b/prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_basic_13.bin similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/finalization_hints_basic_13.bin rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_basic_13.bin diff --git a/prover/vk_setup_data_generator_server_fri/data/finalization_hints_basic_14.bin b/prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_basic_14.bin similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/finalization_hints_basic_14.bin rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_basic_14.bin diff --git a/prover/vk_setup_data_generator_server_fri/data/finalization_hints_basic_15.bin b/prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_basic_15.bin similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/finalization_hints_basic_15.bin rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_basic_15.bin diff --git a/prover/vk_setup_data_generator_server_fri/data/finalization_hints_basic_2.bin b/prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_basic_2.bin similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/finalization_hints_basic_2.bin rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_basic_2.bin diff --git a/prover/vk_setup_data_generator_server_fri/data/finalization_hints_basic_255.bin b/prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_basic_255.bin similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/finalization_hints_basic_255.bin rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_basic_255.bin diff --git a/prover/vk_setup_data_generator_server_fri/data/finalization_hints_basic_3.bin b/prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_basic_3.bin similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/finalization_hints_basic_3.bin rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_basic_3.bin diff --git a/prover/vk_setup_data_generator_server_fri/data/finalization_hints_basic_4.bin b/prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_basic_4.bin similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/finalization_hints_basic_4.bin rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_basic_4.bin diff --git a/prover/vk_setup_data_generator_server_fri/data/finalization_hints_basic_5.bin b/prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_basic_5.bin similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/finalization_hints_basic_5.bin rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_basic_5.bin diff --git a/prover/vk_setup_data_generator_server_fri/data/finalization_hints_basic_6.bin b/prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_basic_6.bin similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/finalization_hints_basic_6.bin rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_basic_6.bin diff --git a/prover/vk_setup_data_generator_server_fri/data/finalization_hints_basic_7.bin b/prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_basic_7.bin similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/finalization_hints_basic_7.bin rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_basic_7.bin diff --git a/prover/vk_setup_data_generator_server_fri/data/finalization_hints_basic_8.bin b/prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_basic_8.bin similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/finalization_hints_basic_8.bin rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_basic_8.bin diff --git a/prover/vk_setup_data_generator_server_fri/data/finalization_hints_basic_9.bin b/prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_basic_9.bin similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/finalization_hints_basic_9.bin rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_basic_9.bin diff --git a/prover/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_10.bin b/prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_10.bin similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_10.bin rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_10.bin diff --git a/prover/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_11.bin b/prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_11.bin similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_11.bin rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_11.bin diff --git a/prover/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_12.bin b/prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_12.bin similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_12.bin rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_12.bin diff --git a/prover/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_13.bin b/prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_13.bin similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_13.bin rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_13.bin diff --git a/prover/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_14.bin b/prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_14.bin similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_14.bin rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_14.bin diff --git a/prover/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_15.bin b/prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_15.bin similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_15.bin rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_15.bin diff --git a/prover/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_16.bin b/prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_16.bin similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_16.bin rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_16.bin diff --git a/prover/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_17.bin b/prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_17.bin similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_17.bin rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_17.bin diff --git a/prover/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_18.bin b/prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_18.bin similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_18.bin rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_18.bin diff --git a/prover/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_3.bin b/prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_3.bin similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_3.bin rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_3.bin diff --git a/prover/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_4.bin b/prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_4.bin similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_4.bin rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_4.bin diff --git a/prover/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_5.bin b/prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_5.bin similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_5.bin rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_5.bin diff --git a/prover/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_6.bin b/prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_6.bin similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_6.bin rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_6.bin diff --git a/prover/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_7.bin b/prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_7.bin similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_7.bin rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_7.bin diff --git a/prover/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_8.bin b/prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_8.bin similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_8.bin rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_8.bin diff --git a/prover/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_9.bin b/prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_9.bin similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_9.bin rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_leaf_9.bin diff --git a/prover/vk_setup_data_generator_server_fri/data/finalization_hints_node.bin b/prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_node.bin similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/finalization_hints_node.bin rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_node.bin diff --git a/prover/vk_setup_data_generator_server_fri/data/finalization_hints_recursion_tip.bin b/prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_recursion_tip.bin similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/finalization_hints_recursion_tip.bin rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_recursion_tip.bin diff --git a/prover/vk_setup_data_generator_server_fri/data/finalization_hints_scheduler.bin b/prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_scheduler.bin similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/finalization_hints_scheduler.bin rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/finalization_hints_scheduler.bin diff --git a/prover/vk_setup_data_generator_server_fri/data/snark_verification_scheduler_key.json b/prover/crates/bin/vk_setup_data_generator_server_fri/data/snark_verification_scheduler_key.json similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/snark_verification_scheduler_key.json rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/snark_verification_scheduler_key.json diff --git a/prover/vk_setup_data_generator_server_fri/data/verification_basic_10_key.json b/prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_basic_10_key.json similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/verification_basic_10_key.json rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_basic_10_key.json diff --git a/prover/vk_setup_data_generator_server_fri/data/verification_basic_11_key.json b/prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_basic_11_key.json similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/verification_basic_11_key.json rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_basic_11_key.json diff --git a/prover/vk_setup_data_generator_server_fri/data/verification_basic_12_key.json b/prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_basic_12_key.json similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/verification_basic_12_key.json rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_basic_12_key.json diff --git a/prover/vk_setup_data_generator_server_fri/data/verification_basic_13_key.json b/prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_basic_13_key.json similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/verification_basic_13_key.json rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_basic_13_key.json diff --git a/prover/vk_setup_data_generator_server_fri/data/verification_basic_14_key.json b/prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_basic_14_key.json similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/verification_basic_14_key.json rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_basic_14_key.json diff --git a/prover/vk_setup_data_generator_server_fri/data/verification_basic_15_key.json b/prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_basic_15_key.json similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/verification_basic_15_key.json rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_basic_15_key.json diff --git a/prover/vk_setup_data_generator_server_fri/data/verification_basic_1_key.json b/prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_basic_1_key.json similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/verification_basic_1_key.json rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_basic_1_key.json diff --git a/prover/vk_setup_data_generator_server_fri/data/verification_basic_255_key.json b/prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_basic_255_key.json similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/verification_basic_255_key.json rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_basic_255_key.json diff --git a/prover/vk_setup_data_generator_server_fri/data/verification_basic_2_key.json b/prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_basic_2_key.json similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/verification_basic_2_key.json rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_basic_2_key.json diff --git a/prover/vk_setup_data_generator_server_fri/data/verification_basic_3_key.json b/prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_basic_3_key.json similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/verification_basic_3_key.json rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_basic_3_key.json diff --git a/prover/vk_setup_data_generator_server_fri/data/verification_basic_4_key.json b/prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_basic_4_key.json similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/verification_basic_4_key.json rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_basic_4_key.json diff --git a/prover/vk_setup_data_generator_server_fri/data/verification_basic_5_key.json b/prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_basic_5_key.json similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/verification_basic_5_key.json rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_basic_5_key.json diff --git a/prover/vk_setup_data_generator_server_fri/data/verification_basic_6_key.json b/prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_basic_6_key.json similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/verification_basic_6_key.json rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_basic_6_key.json diff --git a/prover/vk_setup_data_generator_server_fri/data/verification_basic_7_key.json b/prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_basic_7_key.json similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/verification_basic_7_key.json rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_basic_7_key.json diff --git a/prover/vk_setup_data_generator_server_fri/data/verification_basic_8_key.json b/prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_basic_8_key.json similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/verification_basic_8_key.json rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_basic_8_key.json diff --git a/prover/vk_setup_data_generator_server_fri/data/verification_basic_9_key.json b/prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_basic_9_key.json similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/verification_basic_9_key.json rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_basic_9_key.json diff --git a/prover/vk_setup_data_generator_server_fri/data/verification_leaf_10_key.json b/prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_leaf_10_key.json similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/verification_leaf_10_key.json rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_leaf_10_key.json diff --git a/prover/vk_setup_data_generator_server_fri/data/verification_leaf_11_key.json b/prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_leaf_11_key.json similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/verification_leaf_11_key.json rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_leaf_11_key.json diff --git a/prover/vk_setup_data_generator_server_fri/data/verification_leaf_12_key.json b/prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_leaf_12_key.json similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/verification_leaf_12_key.json rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_leaf_12_key.json diff --git a/prover/vk_setup_data_generator_server_fri/data/verification_leaf_13_key.json b/prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_leaf_13_key.json similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/verification_leaf_13_key.json rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_leaf_13_key.json diff --git a/prover/vk_setup_data_generator_server_fri/data/verification_leaf_14_key.json b/prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_leaf_14_key.json similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/verification_leaf_14_key.json rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_leaf_14_key.json diff --git a/prover/vk_setup_data_generator_server_fri/data/verification_leaf_15_key.json b/prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_leaf_15_key.json similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/verification_leaf_15_key.json rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_leaf_15_key.json diff --git a/prover/vk_setup_data_generator_server_fri/data/verification_leaf_16_key.json b/prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_leaf_16_key.json similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/verification_leaf_16_key.json rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_leaf_16_key.json diff --git a/prover/vk_setup_data_generator_server_fri/data/verification_leaf_17_key.json b/prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_leaf_17_key.json similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/verification_leaf_17_key.json rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_leaf_17_key.json diff --git a/prover/vk_setup_data_generator_server_fri/data/verification_leaf_18_key.json b/prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_leaf_18_key.json similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/verification_leaf_18_key.json rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_leaf_18_key.json diff --git a/prover/vk_setup_data_generator_server_fri/data/verification_leaf_3_key.json b/prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_leaf_3_key.json similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/verification_leaf_3_key.json rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_leaf_3_key.json diff --git a/prover/vk_setup_data_generator_server_fri/data/verification_leaf_4_key.json b/prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_leaf_4_key.json similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/verification_leaf_4_key.json rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_leaf_4_key.json diff --git a/prover/vk_setup_data_generator_server_fri/data/verification_leaf_5_key.json b/prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_leaf_5_key.json similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/verification_leaf_5_key.json rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_leaf_5_key.json diff --git a/prover/vk_setup_data_generator_server_fri/data/verification_leaf_6_key.json b/prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_leaf_6_key.json similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/verification_leaf_6_key.json rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_leaf_6_key.json diff --git a/prover/vk_setup_data_generator_server_fri/data/verification_leaf_7_key.json b/prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_leaf_7_key.json similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/verification_leaf_7_key.json rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_leaf_7_key.json diff --git a/prover/vk_setup_data_generator_server_fri/data/verification_leaf_8_key.json b/prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_leaf_8_key.json similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/verification_leaf_8_key.json rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_leaf_8_key.json diff --git a/prover/vk_setup_data_generator_server_fri/data/verification_leaf_9_key.json b/prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_leaf_9_key.json similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/verification_leaf_9_key.json rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_leaf_9_key.json diff --git a/prover/vk_setup_data_generator_server_fri/data/verification_node_key.json b/prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_node_key.json similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/verification_node_key.json rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_node_key.json diff --git a/prover/vk_setup_data_generator_server_fri/data/verification_recursion_tip_key.json b/prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_recursion_tip_key.json similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/verification_recursion_tip_key.json rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_recursion_tip_key.json diff --git a/prover/vk_setup_data_generator_server_fri/data/verification_scheduler_key.json b/prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_scheduler_key.json similarity index 100% rename from prover/vk_setup_data_generator_server_fri/data/verification_scheduler_key.json rename to prover/crates/bin/vk_setup_data_generator_server_fri/data/verification_scheduler_key.json diff --git a/prover/vk_setup_data_generator_server_fri/historical_data/0.24.0/commitments.json b/prover/crates/bin/vk_setup_data_generator_server_fri/historical_data/0.24.0/commitments.json similarity index 100% rename from prover/vk_setup_data_generator_server_fri/historical_data/0.24.0/commitments.json rename to prover/crates/bin/vk_setup_data_generator_server_fri/historical_data/0.24.0/commitments.json diff --git a/prover/vk_setup_data_generator_server_fri/historical_data/0.24.0/snark_verification_scheduler_key.json b/prover/crates/bin/vk_setup_data_generator_server_fri/historical_data/0.24.0/snark_verification_scheduler_key.json similarity index 100% rename from prover/vk_setup_data_generator_server_fri/historical_data/0.24.0/snark_verification_scheduler_key.json rename to prover/crates/bin/vk_setup_data_generator_server_fri/historical_data/0.24.0/snark_verification_scheduler_key.json diff --git a/prover/vk_setup_data_generator_server_fri/historical_data/0.24.1/commitments.json b/prover/crates/bin/vk_setup_data_generator_server_fri/historical_data/0.24.1/commitments.json similarity index 100% rename from prover/vk_setup_data_generator_server_fri/historical_data/0.24.1/commitments.json rename to prover/crates/bin/vk_setup_data_generator_server_fri/historical_data/0.24.1/commitments.json diff --git a/prover/vk_setup_data_generator_server_fri/historical_data/0.24.1/snark_verification_scheduler_key.json b/prover/crates/bin/vk_setup_data_generator_server_fri/historical_data/0.24.1/snark_verification_scheduler_key.json similarity index 100% rename from prover/vk_setup_data_generator_server_fri/historical_data/0.24.1/snark_verification_scheduler_key.json rename to prover/crates/bin/vk_setup_data_generator_server_fri/historical_data/0.24.1/snark_verification_scheduler_key.json diff --git a/prover/vk_setup_data_generator_server_fri/historical_data/18/commitments.json b/prover/crates/bin/vk_setup_data_generator_server_fri/historical_data/18/commitments.json similarity index 100% rename from prover/vk_setup_data_generator_server_fri/historical_data/18/commitments.json rename to prover/crates/bin/vk_setup_data_generator_server_fri/historical_data/18/commitments.json diff --git a/prover/vk_setup_data_generator_server_fri/historical_data/18/snark_verification_scheduler_key.json b/prover/crates/bin/vk_setup_data_generator_server_fri/historical_data/18/snark_verification_scheduler_key.json similarity index 100% rename from prover/vk_setup_data_generator_server_fri/historical_data/18/snark_verification_scheduler_key.json rename to prover/crates/bin/vk_setup_data_generator_server_fri/historical_data/18/snark_verification_scheduler_key.json diff --git a/prover/vk_setup_data_generator_server_fri/historical_data/19/commitments.json b/prover/crates/bin/vk_setup_data_generator_server_fri/historical_data/19/commitments.json similarity index 100% rename from prover/vk_setup_data_generator_server_fri/historical_data/19/commitments.json rename to prover/crates/bin/vk_setup_data_generator_server_fri/historical_data/19/commitments.json diff --git a/prover/vk_setup_data_generator_server_fri/historical_data/19/snark_verification_scheduler_key.json b/prover/crates/bin/vk_setup_data_generator_server_fri/historical_data/19/snark_verification_scheduler_key.json similarity index 100% rename from prover/vk_setup_data_generator_server_fri/historical_data/19/snark_verification_scheduler_key.json rename to prover/crates/bin/vk_setup_data_generator_server_fri/historical_data/19/snark_verification_scheduler_key.json diff --git a/prover/vk_setup_data_generator_server_fri/historical_data/20/commitments.json b/prover/crates/bin/vk_setup_data_generator_server_fri/historical_data/20/commitments.json similarity index 100% rename from prover/vk_setup_data_generator_server_fri/historical_data/20/commitments.json rename to prover/crates/bin/vk_setup_data_generator_server_fri/historical_data/20/commitments.json diff --git a/prover/vk_setup_data_generator_server_fri/historical_data/20/snark_verification_scheduler_key.json b/prover/crates/bin/vk_setup_data_generator_server_fri/historical_data/20/snark_verification_scheduler_key.json similarity index 100% rename from prover/vk_setup_data_generator_server_fri/historical_data/20/snark_verification_scheduler_key.json rename to prover/crates/bin/vk_setup_data_generator_server_fri/historical_data/20/snark_verification_scheduler_key.json diff --git a/prover/vk_setup_data_generator_server_fri/historical_data/21/commitments.json b/prover/crates/bin/vk_setup_data_generator_server_fri/historical_data/21/commitments.json similarity index 100% rename from prover/vk_setup_data_generator_server_fri/historical_data/21/commitments.json rename to prover/crates/bin/vk_setup_data_generator_server_fri/historical_data/21/commitments.json diff --git a/prover/vk_setup_data_generator_server_fri/historical_data/21/snark_verification_scheduler_key.json b/prover/crates/bin/vk_setup_data_generator_server_fri/historical_data/21/snark_verification_scheduler_key.json similarity index 100% rename from prover/vk_setup_data_generator_server_fri/historical_data/21/snark_verification_scheduler_key.json rename to prover/crates/bin/vk_setup_data_generator_server_fri/historical_data/21/snark_verification_scheduler_key.json diff --git a/prover/vk_setup_data_generator_server_fri/historical_data/22/commitments.json b/prover/crates/bin/vk_setup_data_generator_server_fri/historical_data/22/commitments.json similarity index 100% rename from prover/vk_setup_data_generator_server_fri/historical_data/22/commitments.json rename to prover/crates/bin/vk_setup_data_generator_server_fri/historical_data/22/commitments.json diff --git a/prover/vk_setup_data_generator_server_fri/historical_data/22/snark_verification_scheduler_key.json b/prover/crates/bin/vk_setup_data_generator_server_fri/historical_data/22/snark_verification_scheduler_key.json similarity index 100% rename from prover/vk_setup_data_generator_server_fri/historical_data/22/snark_verification_scheduler_key.json rename to prover/crates/bin/vk_setup_data_generator_server_fri/historical_data/22/snark_verification_scheduler_key.json diff --git a/prover/vk_setup_data_generator_server_fri/historical_data/23/commitments.json b/prover/crates/bin/vk_setup_data_generator_server_fri/historical_data/23/commitments.json similarity index 100% rename from prover/vk_setup_data_generator_server_fri/historical_data/23/commitments.json rename to prover/crates/bin/vk_setup_data_generator_server_fri/historical_data/23/commitments.json diff --git a/prover/vk_setup_data_generator_server_fri/historical_data/23/snark_verification_scheduler_key.json b/prover/crates/bin/vk_setup_data_generator_server_fri/historical_data/23/snark_verification_scheduler_key.json similarity index 100% rename from prover/vk_setup_data_generator_server_fri/historical_data/23/snark_verification_scheduler_key.json rename to prover/crates/bin/vk_setup_data_generator_server_fri/historical_data/23/snark_verification_scheduler_key.json diff --git a/prover/vk_setup_data_generator_server_fri/historical_data/README.md b/prover/crates/bin/vk_setup_data_generator_server_fri/historical_data/README.md similarity index 100% rename from prover/vk_setup_data_generator_server_fri/historical_data/README.md rename to prover/crates/bin/vk_setup_data_generator_server_fri/historical_data/README.md diff --git a/prover/vk_setup_data_generator_server_fri/src/commitment_generator.rs b/prover/crates/bin/vk_setup_data_generator_server_fri/src/commitment_generator.rs similarity index 100% rename from prover/vk_setup_data_generator_server_fri/src/commitment_generator.rs rename to prover/crates/bin/vk_setup_data_generator_server_fri/src/commitment_generator.rs diff --git a/prover/vk_setup_data_generator_server_fri/src/commitment_utils.rs b/prover/crates/bin/vk_setup_data_generator_server_fri/src/commitment_utils.rs similarity index 100% rename from prover/vk_setup_data_generator_server_fri/src/commitment_utils.rs rename to prover/crates/bin/vk_setup_data_generator_server_fri/src/commitment_utils.rs diff --git a/prover/vk_setup_data_generator_server_fri/src/keystore.rs b/prover/crates/bin/vk_setup_data_generator_server_fri/src/keystore.rs similarity index 99% rename from prover/vk_setup_data_generator_server_fri/src/keystore.rs rename to prover/crates/bin/vk_setup_data_generator_server_fri/src/keystore.rs index 70aaff9fc4a4..e886b5d1b0c0 100644 --- a/prover/vk_setup_data_generator_server_fri/src/keystore.rs +++ b/prover/crates/bin/vk_setup_data_generator_server_fri/src/keystore.rs @@ -47,7 +47,7 @@ pub struct Keystore { fn get_base_path() -> PathBuf { let path = core_workspace_dir_or_current_dir(); - let new_path = path.join("prover/vk_setup_data_generator_server_fri/data"); + let new_path = path.join("prover/crates/bin/vk_setup_data_generator_server_fri/data"); if new_path.exists() { return new_path; } @@ -56,7 +56,7 @@ fn get_base_path() -> PathBuf { components.next_back().unwrap(); components .as_path() - .join("prover/vk_setup_data_generator_server_fri/data") + .join("prover/crates/bin/vk_setup_data_generator_server_fri/data") } impl Default for Keystore { diff --git a/prover/vk_setup_data_generator_server_fri/src/lib.rs b/prover/crates/bin/vk_setup_data_generator_server_fri/src/lib.rs similarity index 100% rename from prover/vk_setup_data_generator_server_fri/src/lib.rs rename to prover/crates/bin/vk_setup_data_generator_server_fri/src/lib.rs diff --git a/prover/vk_setup_data_generator_server_fri/src/main.rs b/prover/crates/bin/vk_setup_data_generator_server_fri/src/main.rs similarity index 100% rename from prover/vk_setup_data_generator_server_fri/src/main.rs rename to prover/crates/bin/vk_setup_data_generator_server_fri/src/main.rs diff --git a/prover/vk_setup_data_generator_server_fri/src/setup_data_generator.rs b/prover/crates/bin/vk_setup_data_generator_server_fri/src/setup_data_generator.rs similarity index 100% rename from prover/vk_setup_data_generator_server_fri/src/setup_data_generator.rs rename to prover/crates/bin/vk_setup_data_generator_server_fri/src/setup_data_generator.rs diff --git a/prover/vk_setup_data_generator_server_fri/src/tests.rs b/prover/crates/bin/vk_setup_data_generator_server_fri/src/tests.rs similarity index 100% rename from prover/vk_setup_data_generator_server_fri/src/tests.rs rename to prover/crates/bin/vk_setup_data_generator_server_fri/src/tests.rs diff --git a/prover/vk_setup_data_generator_server_fri/src/utils.rs b/prover/crates/bin/vk_setup_data_generator_server_fri/src/utils.rs similarity index 100% rename from prover/vk_setup_data_generator_server_fri/src/utils.rs rename to prover/crates/bin/vk_setup_data_generator_server_fri/src/utils.rs diff --git a/prover/vk_setup_data_generator_server_fri/src/vk_commitment_helper.rs b/prover/crates/bin/vk_setup_data_generator_server_fri/src/vk_commitment_helper.rs similarity index 100% rename from prover/vk_setup_data_generator_server_fri/src/vk_commitment_helper.rs rename to prover/crates/bin/vk_setup_data_generator_server_fri/src/vk_commitment_helper.rs diff --git a/prover/witness_generator/Cargo.toml b/prover/crates/bin/witness_generator/Cargo.toml similarity index 100% rename from prover/witness_generator/Cargo.toml rename to prover/crates/bin/witness_generator/Cargo.toml diff --git a/prover/witness_generator/README.md b/prover/crates/bin/witness_generator/README.md similarity index 100% rename from prover/witness_generator/README.md rename to prover/crates/bin/witness_generator/README.md diff --git a/prover/witness_generator/src/basic_circuits.rs b/prover/crates/bin/witness_generator/src/basic_circuits.rs similarity index 100% rename from prover/witness_generator/src/basic_circuits.rs rename to prover/crates/bin/witness_generator/src/basic_circuits.rs diff --git a/prover/witness_generator/src/leaf_aggregation.rs b/prover/crates/bin/witness_generator/src/leaf_aggregation.rs similarity index 100% rename from prover/witness_generator/src/leaf_aggregation.rs rename to prover/crates/bin/witness_generator/src/leaf_aggregation.rs diff --git a/prover/witness_generator/src/lib.rs b/prover/crates/bin/witness_generator/src/lib.rs similarity index 100% rename from prover/witness_generator/src/lib.rs rename to prover/crates/bin/witness_generator/src/lib.rs diff --git a/prover/witness_generator/src/main.rs b/prover/crates/bin/witness_generator/src/main.rs similarity index 100% rename from prover/witness_generator/src/main.rs rename to prover/crates/bin/witness_generator/src/main.rs diff --git a/prover/witness_generator/src/metrics.rs b/prover/crates/bin/witness_generator/src/metrics.rs similarity index 100% rename from prover/witness_generator/src/metrics.rs rename to prover/crates/bin/witness_generator/src/metrics.rs diff --git a/prover/witness_generator/src/node_aggregation.rs b/prover/crates/bin/witness_generator/src/node_aggregation.rs similarity index 100% rename from prover/witness_generator/src/node_aggregation.rs rename to prover/crates/bin/witness_generator/src/node_aggregation.rs diff --git a/prover/witness_generator/src/precalculated_merkle_paths_provider.rs b/prover/crates/bin/witness_generator/src/precalculated_merkle_paths_provider.rs similarity index 100% rename from prover/witness_generator/src/precalculated_merkle_paths_provider.rs rename to prover/crates/bin/witness_generator/src/precalculated_merkle_paths_provider.rs diff --git a/prover/witness_generator/src/recursion_tip.rs b/prover/crates/bin/witness_generator/src/recursion_tip.rs similarity index 100% rename from prover/witness_generator/src/recursion_tip.rs rename to prover/crates/bin/witness_generator/src/recursion_tip.rs diff --git a/prover/witness_generator/src/scheduler.rs b/prover/crates/bin/witness_generator/src/scheduler.rs similarity index 100% rename from prover/witness_generator/src/scheduler.rs rename to prover/crates/bin/witness_generator/src/scheduler.rs diff --git a/prover/witness_generator/src/storage_oracle.rs b/prover/crates/bin/witness_generator/src/storage_oracle.rs similarity index 100% rename from prover/witness_generator/src/storage_oracle.rs rename to prover/crates/bin/witness_generator/src/storage_oracle.rs diff --git a/prover/witness_generator/src/tests.rs b/prover/crates/bin/witness_generator/src/tests.rs similarity index 100% rename from prover/witness_generator/src/tests.rs rename to prover/crates/bin/witness_generator/src/tests.rs diff --git a/prover/witness_generator/src/trusted_setup.json b/prover/crates/bin/witness_generator/src/trusted_setup.json similarity index 100% rename from prover/witness_generator/src/trusted_setup.json rename to prover/crates/bin/witness_generator/src/trusted_setup.json diff --git a/prover/witness_generator/src/utils.rs b/prover/crates/bin/witness_generator/src/utils.rs similarity index 100% rename from prover/witness_generator/src/utils.rs rename to prover/crates/bin/witness_generator/src/utils.rs diff --git a/prover/witness_generator/tests/basic_test.rs b/prover/crates/bin/witness_generator/tests/basic_test.rs similarity index 100% rename from prover/witness_generator/tests/basic_test.rs rename to prover/crates/bin/witness_generator/tests/basic_test.rs diff --git a/prover/witness_generator/tests/data/leaf/leaf_aggregation_witness_jobs_fri/closed_form_inputs_125010_4.bin b/prover/crates/bin/witness_generator/tests/data/leaf/leaf_aggregation_witness_jobs_fri/closed_form_inputs_125010_4.bin similarity index 100% rename from prover/witness_generator/tests/data/leaf/leaf_aggregation_witness_jobs_fri/closed_form_inputs_125010_4.bin rename to prover/crates/bin/witness_generator/tests/data/leaf/leaf_aggregation_witness_jobs_fri/closed_form_inputs_125010_4.bin diff --git a/prover/witness_generator/tests/data/leaf/node_aggregation_witness_jobs_fri/aggregations_125010_6_0.bin b/prover/crates/bin/witness_generator/tests/data/leaf/node_aggregation_witness_jobs_fri/aggregations_125010_6_0.bin similarity index 100% rename from prover/witness_generator/tests/data/leaf/node_aggregation_witness_jobs_fri/aggregations_125010_6_0.bin rename to prover/crates/bin/witness_generator/tests/data/leaf/node_aggregation_witness_jobs_fri/aggregations_125010_6_0.bin diff --git a/prover/witness_generator/tests/data/leaf/proofs_fri/proof_4639043.bin b/prover/crates/bin/witness_generator/tests/data/leaf/proofs_fri/proof_4639043.bin similarity index 100% rename from prover/witness_generator/tests/data/leaf/proofs_fri/proof_4639043.bin rename to prover/crates/bin/witness_generator/tests/data/leaf/proofs_fri/proof_4639043.bin diff --git a/prover/witness_generator/tests/data/leaf/proofs_fri/proof_4639044.bin b/prover/crates/bin/witness_generator/tests/data/leaf/proofs_fri/proof_4639044.bin similarity index 100% rename from prover/witness_generator/tests/data/leaf/proofs_fri/proof_4639044.bin rename to prover/crates/bin/witness_generator/tests/data/leaf/proofs_fri/proof_4639044.bin diff --git a/prover/witness_generator/tests/data/leaf/proofs_fri/proof_4639045.bin b/prover/crates/bin/witness_generator/tests/data/leaf/proofs_fri/proof_4639045.bin similarity index 100% rename from prover/witness_generator/tests/data/leaf/proofs_fri/proof_4639045.bin rename to prover/crates/bin/witness_generator/tests/data/leaf/proofs_fri/proof_4639045.bin diff --git a/prover/witness_generator/tests/data/node/node_aggregation_witness_jobs_fri/aggregations_127856_8_0.bin b/prover/crates/bin/witness_generator/tests/data/node/node_aggregation_witness_jobs_fri/aggregations_127856_8_0.bin similarity index 100% rename from prover/witness_generator/tests/data/node/node_aggregation_witness_jobs_fri/aggregations_127856_8_0.bin rename to prover/crates/bin/witness_generator/tests/data/node/node_aggregation_witness_jobs_fri/aggregations_127856_8_0.bin diff --git a/prover/witness_generator/tests/data/node/node_aggregation_witness_jobs_fri/aggregations_127856_8_1.bin b/prover/crates/bin/witness_generator/tests/data/node/node_aggregation_witness_jobs_fri/aggregations_127856_8_1.bin similarity index 100% rename from prover/witness_generator/tests/data/node/node_aggregation_witness_jobs_fri/aggregations_127856_8_1.bin rename to prover/crates/bin/witness_generator/tests/data/node/node_aggregation_witness_jobs_fri/aggregations_127856_8_1.bin diff --git a/prover/witness_generator/tests/data/node/proofs_fri/proof_5211320.bin b/prover/crates/bin/witness_generator/tests/data/node/proofs_fri/proof_5211320.bin similarity index 100% rename from prover/witness_generator/tests/data/node/proofs_fri/proof_5211320.bin rename to prover/crates/bin/witness_generator/tests/data/node/proofs_fri/proof_5211320.bin diff --git a/prover/witness_generator/tests/data/scheduler/proofs_fri/proof_5627082.bin b/prover/crates/bin/witness_generator/tests/data/scheduler/proofs_fri/proof_5627082.bin similarity index 100% rename from prover/witness_generator/tests/data/scheduler/proofs_fri/proof_5627082.bin rename to prover/crates/bin/witness_generator/tests/data/scheduler/proofs_fri/proof_5627082.bin diff --git a/prover/witness_generator/tests/data/scheduler/proofs_fri/proof_5627083.bin b/prover/crates/bin/witness_generator/tests/data/scheduler/proofs_fri/proof_5627083.bin similarity index 100% rename from prover/witness_generator/tests/data/scheduler/proofs_fri/proof_5627083.bin rename to prover/crates/bin/witness_generator/tests/data/scheduler/proofs_fri/proof_5627083.bin diff --git a/prover/witness_generator/tests/data/scheduler/proofs_fri/proof_5627084.bin b/prover/crates/bin/witness_generator/tests/data/scheduler/proofs_fri/proof_5627084.bin similarity index 100% rename from prover/witness_generator/tests/data/scheduler/proofs_fri/proof_5627084.bin rename to prover/crates/bin/witness_generator/tests/data/scheduler/proofs_fri/proof_5627084.bin diff --git a/prover/witness_generator/tests/data/scheduler/proofs_fri/proof_5627085.bin b/prover/crates/bin/witness_generator/tests/data/scheduler/proofs_fri/proof_5627085.bin similarity index 100% rename from prover/witness_generator/tests/data/scheduler/proofs_fri/proof_5627085.bin rename to prover/crates/bin/witness_generator/tests/data/scheduler/proofs_fri/proof_5627085.bin diff --git a/prover/witness_generator/tests/data/scheduler/proofs_fri/proof_5627086.bin b/prover/crates/bin/witness_generator/tests/data/scheduler/proofs_fri/proof_5627086.bin similarity index 100% rename from prover/witness_generator/tests/data/scheduler/proofs_fri/proof_5627086.bin rename to prover/crates/bin/witness_generator/tests/data/scheduler/proofs_fri/proof_5627086.bin diff --git a/prover/witness_generator/tests/data/scheduler/proofs_fri/proof_5627090.bin b/prover/crates/bin/witness_generator/tests/data/scheduler/proofs_fri/proof_5627090.bin similarity index 100% rename from prover/witness_generator/tests/data/scheduler/proofs_fri/proof_5627090.bin rename to prover/crates/bin/witness_generator/tests/data/scheduler/proofs_fri/proof_5627090.bin diff --git a/prover/witness_generator/tests/data/scheduler/proofs_fri/proof_5627091.bin b/prover/crates/bin/witness_generator/tests/data/scheduler/proofs_fri/proof_5627091.bin similarity index 100% rename from prover/witness_generator/tests/data/scheduler/proofs_fri/proof_5627091.bin rename to prover/crates/bin/witness_generator/tests/data/scheduler/proofs_fri/proof_5627091.bin diff --git a/prover/witness_generator/tests/data/scheduler/proofs_fri/proof_5627092.bin b/prover/crates/bin/witness_generator/tests/data/scheduler/proofs_fri/proof_5627092.bin similarity index 100% rename from prover/witness_generator/tests/data/scheduler/proofs_fri/proof_5627092.bin rename to prover/crates/bin/witness_generator/tests/data/scheduler/proofs_fri/proof_5627092.bin diff --git a/prover/witness_generator/tests/data/scheduler/proofs_fri/proof_5627093.bin b/prover/crates/bin/witness_generator/tests/data/scheduler/proofs_fri/proof_5627093.bin similarity index 100% rename from prover/witness_generator/tests/data/scheduler/proofs_fri/proof_5627093.bin rename to prover/crates/bin/witness_generator/tests/data/scheduler/proofs_fri/proof_5627093.bin diff --git a/prover/witness_generator/tests/data/scheduler/proofs_fri/proof_5627094.bin b/prover/crates/bin/witness_generator/tests/data/scheduler/proofs_fri/proof_5627094.bin similarity index 100% rename from prover/witness_generator/tests/data/scheduler/proofs_fri/proof_5627094.bin rename to prover/crates/bin/witness_generator/tests/data/scheduler/proofs_fri/proof_5627094.bin diff --git a/prover/witness_generator/tests/data/scheduler/proofs_fri/proof_5629097.bin b/prover/crates/bin/witness_generator/tests/data/scheduler/proofs_fri/proof_5629097.bin similarity index 100% rename from prover/witness_generator/tests/data/scheduler/proofs_fri/proof_5629097.bin rename to prover/crates/bin/witness_generator/tests/data/scheduler/proofs_fri/proof_5629097.bin diff --git a/prover/witness_generator/tests/data/scheduler/proofs_fri/proof_5631320.bin b/prover/crates/bin/witness_generator/tests/data/scheduler/proofs_fri/proof_5631320.bin similarity index 100% rename from prover/witness_generator/tests/data/scheduler/proofs_fri/proof_5631320.bin rename to prover/crates/bin/witness_generator/tests/data/scheduler/proofs_fri/proof_5631320.bin diff --git a/prover/witness_generator/tests/data/scheduler/proofs_fri/proof_5639969.bin b/prover/crates/bin/witness_generator/tests/data/scheduler/proofs_fri/proof_5639969.bin similarity index 100% rename from prover/witness_generator/tests/data/scheduler/proofs_fri/proof_5639969.bin rename to prover/crates/bin/witness_generator/tests/data/scheduler/proofs_fri/proof_5639969.bin diff --git a/prover/witness_generator/tests/data/scheduler/prover_jobs_fri/128599_0_1_Scheduler_0.bin b/prover/crates/bin/witness_generator/tests/data/scheduler/prover_jobs_fri/128599_0_1_Scheduler_0.bin similarity index 100% rename from prover/witness_generator/tests/data/scheduler/prover_jobs_fri/128599_0_1_Scheduler_0.bin rename to prover/crates/bin/witness_generator/tests/data/scheduler/prover_jobs_fri/128599_0_1_Scheduler_0.bin diff --git a/prover/witness_generator/tests/data/scheduler/scheduler_witness_jobs_fri/scheduler_witness_128599.bin b/prover/crates/bin/witness_generator/tests/data/scheduler/scheduler_witness_jobs_fri/scheduler_witness_128599.bin similarity index 100% rename from prover/witness_generator/tests/data/scheduler/scheduler_witness_jobs_fri/scheduler_witness_128599.bin rename to prover/crates/bin/witness_generator/tests/data/scheduler/scheduler_witness_jobs_fri/scheduler_witness_128599.bin diff --git a/prover/witness_vector_generator/Cargo.toml b/prover/crates/bin/witness_vector_generator/Cargo.toml similarity index 100% rename from prover/witness_vector_generator/Cargo.toml rename to prover/crates/bin/witness_vector_generator/Cargo.toml diff --git a/prover/witness_vector_generator/README.md b/prover/crates/bin/witness_vector_generator/README.md similarity index 100% rename from prover/witness_vector_generator/README.md rename to prover/crates/bin/witness_vector_generator/README.md diff --git a/prover/witness_vector_generator/src/generator.rs b/prover/crates/bin/witness_vector_generator/src/generator.rs similarity index 100% rename from prover/witness_vector_generator/src/generator.rs rename to prover/crates/bin/witness_vector_generator/src/generator.rs diff --git a/prover/witness_vector_generator/src/lib.rs b/prover/crates/bin/witness_vector_generator/src/lib.rs similarity index 100% rename from prover/witness_vector_generator/src/lib.rs rename to prover/crates/bin/witness_vector_generator/src/lib.rs diff --git a/prover/witness_vector_generator/src/main.rs b/prover/crates/bin/witness_vector_generator/src/main.rs similarity index 100% rename from prover/witness_vector_generator/src/main.rs rename to prover/crates/bin/witness_vector_generator/src/main.rs diff --git a/prover/witness_vector_generator/src/metrics.rs b/prover/crates/bin/witness_vector_generator/src/metrics.rs similarity index 100% rename from prover/witness_vector_generator/src/metrics.rs rename to prover/crates/bin/witness_vector_generator/src/metrics.rs diff --git a/prover/witness_vector_generator/tests/basic_test.rs b/prover/crates/bin/witness_vector_generator/tests/basic_test.rs similarity index 100% rename from prover/witness_vector_generator/tests/basic_test.rs rename to prover/crates/bin/witness_vector_generator/tests/basic_test.rs diff --git a/prover/witness_vector_generator/tests/data/base_layer_main_vm.bin b/prover/crates/bin/witness_vector_generator/tests/data/base_layer_main_vm.bin similarity index 100% rename from prover/witness_vector_generator/tests/data/base_layer_main_vm.bin rename to prover/crates/bin/witness_vector_generator/tests/data/base_layer_main_vm.bin diff --git a/prover/prover_dal/.sqlx/query-00b88ec7fcf40bb18e0018b7c76f6e1df560ab1e8935564355236e90b6147d2f.json b/prover/crates/lib/prover_dal/.sqlx/query-00b88ec7fcf40bb18e0018b7c76f6e1df560ab1e8935564355236e90b6147d2f.json similarity index 100% rename from prover/prover_dal/.sqlx/query-00b88ec7fcf40bb18e0018b7c76f6e1df560ab1e8935564355236e90b6147d2f.json rename to prover/crates/lib/prover_dal/.sqlx/query-00b88ec7fcf40bb18e0018b7c76f6e1df560ab1e8935564355236e90b6147d2f.json diff --git a/prover/prover_dal/.sqlx/query-02f2010c60dfa5b93d3f2ee7594579b23540815afa1c6a8d4c36bba951861fe7.json b/prover/crates/lib/prover_dal/.sqlx/query-02f2010c60dfa5b93d3f2ee7594579b23540815afa1c6a8d4c36bba951861fe7.json similarity index 100% rename from prover/prover_dal/.sqlx/query-02f2010c60dfa5b93d3f2ee7594579b23540815afa1c6a8d4c36bba951861fe7.json rename to prover/crates/lib/prover_dal/.sqlx/query-02f2010c60dfa5b93d3f2ee7594579b23540815afa1c6a8d4c36bba951861fe7.json diff --git a/prover/prover_dal/.sqlx/query-069f04bdfafbe2e3628ac3ded93dab9b63eee7f21c450a723e4ba011edc8e2bb.json b/prover/crates/lib/prover_dal/.sqlx/query-069f04bdfafbe2e3628ac3ded93dab9b63eee7f21c450a723e4ba011edc8e2bb.json similarity index 100% rename from prover/prover_dal/.sqlx/query-069f04bdfafbe2e3628ac3ded93dab9b63eee7f21c450a723e4ba011edc8e2bb.json rename to prover/crates/lib/prover_dal/.sqlx/query-069f04bdfafbe2e3628ac3ded93dab9b63eee7f21c450a723e4ba011edc8e2bb.json diff --git a/prover/prover_dal/.sqlx/query-081e2b928f0816c41d6645c1dedbb3402044d201e85e114ff4582394c32bd2bf.json b/prover/crates/lib/prover_dal/.sqlx/query-081e2b928f0816c41d6645c1dedbb3402044d201e85e114ff4582394c32bd2bf.json similarity index 100% rename from prover/prover_dal/.sqlx/query-081e2b928f0816c41d6645c1dedbb3402044d201e85e114ff4582394c32bd2bf.json rename to prover/crates/lib/prover_dal/.sqlx/query-081e2b928f0816c41d6645c1dedbb3402044d201e85e114ff4582394c32bd2bf.json diff --git a/prover/prover_dal/.sqlx/query-0a1ec4690d6b4a67d6ad16badcbf113a19feb73c4cf9876855523499998b99c0.json b/prover/crates/lib/prover_dal/.sqlx/query-0a1ec4690d6b4a67d6ad16badcbf113a19feb73c4cf9876855523499998b99c0.json similarity index 100% rename from prover/prover_dal/.sqlx/query-0a1ec4690d6b4a67d6ad16badcbf113a19feb73c4cf9876855523499998b99c0.json rename to prover/crates/lib/prover_dal/.sqlx/query-0a1ec4690d6b4a67d6ad16badcbf113a19feb73c4cf9876855523499998b99c0.json diff --git a/prover/prover_dal/.sqlx/query-0b70c98c2edd8370ad09ac553c18dbc21cccb9a95e3db1c93da239845a5e9036.json b/prover/crates/lib/prover_dal/.sqlx/query-0b70c98c2edd8370ad09ac553c18dbc21cccb9a95e3db1c93da239845a5e9036.json similarity index 100% rename from prover/prover_dal/.sqlx/query-0b70c98c2edd8370ad09ac553c18dbc21cccb9a95e3db1c93da239845a5e9036.json rename to prover/crates/lib/prover_dal/.sqlx/query-0b70c98c2edd8370ad09ac553c18dbc21cccb9a95e3db1c93da239845a5e9036.json diff --git a/prover/prover_dal/.sqlx/query-16548daf69e9ff0528904be2e142254a457665179d9cf0a3c0b18c3fe09e4838.json b/prover/crates/lib/prover_dal/.sqlx/query-16548daf69e9ff0528904be2e142254a457665179d9cf0a3c0b18c3fe09e4838.json similarity index 100% rename from prover/prover_dal/.sqlx/query-16548daf69e9ff0528904be2e142254a457665179d9cf0a3c0b18c3fe09e4838.json rename to prover/crates/lib/prover_dal/.sqlx/query-16548daf69e9ff0528904be2e142254a457665179d9cf0a3c0b18c3fe09e4838.json diff --git a/prover/prover_dal/.sqlx/query-1849cfa3167eed2809e7724a63198f5e2450cc4faee2f80b37fbd5626324dbeb.json b/prover/crates/lib/prover_dal/.sqlx/query-1849cfa3167eed2809e7724a63198f5e2450cc4faee2f80b37fbd5626324dbeb.json similarity index 100% rename from prover/prover_dal/.sqlx/query-1849cfa3167eed2809e7724a63198f5e2450cc4faee2f80b37fbd5626324dbeb.json rename to prover/crates/lib/prover_dal/.sqlx/query-1849cfa3167eed2809e7724a63198f5e2450cc4faee2f80b37fbd5626324dbeb.json diff --git a/prover/prover_dal/.sqlx/query-2095e5646c382ccbc6e3bafdeddaae31358088e142dff51c9f0bde8f386900d3.json b/prover/crates/lib/prover_dal/.sqlx/query-2095e5646c382ccbc6e3bafdeddaae31358088e142dff51c9f0bde8f386900d3.json similarity index 100% rename from prover/prover_dal/.sqlx/query-2095e5646c382ccbc6e3bafdeddaae31358088e142dff51c9f0bde8f386900d3.json rename to prover/crates/lib/prover_dal/.sqlx/query-2095e5646c382ccbc6e3bafdeddaae31358088e142dff51c9f0bde8f386900d3.json diff --git a/prover/prover_dal/.sqlx/query-21621153e545859d71188e2421f5d2832571464e74b5fed92cf54617573c84ec.json b/prover/crates/lib/prover_dal/.sqlx/query-21621153e545859d71188e2421f5d2832571464e74b5fed92cf54617573c84ec.json similarity index 100% rename from prover/prover_dal/.sqlx/query-21621153e545859d71188e2421f5d2832571464e74b5fed92cf54617573c84ec.json rename to prover/crates/lib/prover_dal/.sqlx/query-21621153e545859d71188e2421f5d2832571464e74b5fed92cf54617573c84ec.json diff --git a/prover/prover_dal/.sqlx/query-28397b5a0b7af832d2a4d3d7011a68a48db6a64afcd41bbe0e17d98fa38fdb19.json b/prover/crates/lib/prover_dal/.sqlx/query-28397b5a0b7af832d2a4d3d7011a68a48db6a64afcd41bbe0e17d98fa38fdb19.json similarity index 100% rename from prover/prover_dal/.sqlx/query-28397b5a0b7af832d2a4d3d7011a68a48db6a64afcd41bbe0e17d98fa38fdb19.json rename to prover/crates/lib/prover_dal/.sqlx/query-28397b5a0b7af832d2a4d3d7011a68a48db6a64afcd41bbe0e17d98fa38fdb19.json diff --git a/prover/prover_dal/.sqlx/query-285d0ff850fa5c9af36564fcb14dd8547a1ad20492ec37c3c0be5639e5d49952.json b/prover/crates/lib/prover_dal/.sqlx/query-285d0ff850fa5c9af36564fcb14dd8547a1ad20492ec37c3c0be5639e5d49952.json similarity index 100% rename from prover/prover_dal/.sqlx/query-285d0ff850fa5c9af36564fcb14dd8547a1ad20492ec37c3c0be5639e5d49952.json rename to prover/crates/lib/prover_dal/.sqlx/query-285d0ff850fa5c9af36564fcb14dd8547a1ad20492ec37c3c0be5639e5d49952.json diff --git a/prover/prover_dal/.sqlx/query-28f03acf565c4b50fe86f606c18a8b699386b3c5b4e02d5ce046f0f2e0ddc388.json b/prover/crates/lib/prover_dal/.sqlx/query-28f03acf565c4b50fe86f606c18a8b699386b3c5b4e02d5ce046f0f2e0ddc388.json similarity index 100% rename from prover/prover_dal/.sqlx/query-28f03acf565c4b50fe86f606c18a8b699386b3c5b4e02d5ce046f0f2e0ddc388.json rename to prover/crates/lib/prover_dal/.sqlx/query-28f03acf565c4b50fe86f606c18a8b699386b3c5b4e02d5ce046f0f2e0ddc388.json diff --git a/prover/prover_dal/.sqlx/query-29ff260b02f7b955f9fe0b657b87def3a97275b66ad33d214054dc9048ddf584.json b/prover/crates/lib/prover_dal/.sqlx/query-29ff260b02f7b955f9fe0b657b87def3a97275b66ad33d214054dc9048ddf584.json similarity index 100% rename from prover/prover_dal/.sqlx/query-29ff260b02f7b955f9fe0b657b87def3a97275b66ad33d214054dc9048ddf584.json rename to prover/crates/lib/prover_dal/.sqlx/query-29ff260b02f7b955f9fe0b657b87def3a97275b66ad33d214054dc9048ddf584.json diff --git a/prover/prover_dal/.sqlx/query-2ab2f83b273c5aa88c1eefc8f70a8ea23052f714cd74c1d28ae1203ce8f0eaa9.json b/prover/crates/lib/prover_dal/.sqlx/query-2ab2f83b273c5aa88c1eefc8f70a8ea23052f714cd74c1d28ae1203ce8f0eaa9.json similarity index 100% rename from prover/prover_dal/.sqlx/query-2ab2f83b273c5aa88c1eefc8f70a8ea23052f714cd74c1d28ae1203ce8f0eaa9.json rename to prover/crates/lib/prover_dal/.sqlx/query-2ab2f83b273c5aa88c1eefc8f70a8ea23052f714cd74c1d28ae1203ce8f0eaa9.json diff --git a/prover/prover_dal/.sqlx/query-2b626262c8003817ee02978f77452554ccfb5b83f00efdc12bed0f60ef439785.json b/prover/crates/lib/prover_dal/.sqlx/query-2b626262c8003817ee02978f77452554ccfb5b83f00efdc12bed0f60ef439785.json similarity index 100% rename from prover/prover_dal/.sqlx/query-2b626262c8003817ee02978f77452554ccfb5b83f00efdc12bed0f60ef439785.json rename to prover/crates/lib/prover_dal/.sqlx/query-2b626262c8003817ee02978f77452554ccfb5b83f00efdc12bed0f60ef439785.json diff --git a/prover/prover_dal/.sqlx/query-2dc6b7bf08cced8791354fc47e319d03f894f40d2ec528382b5643c3d51ec8e7.json b/prover/crates/lib/prover_dal/.sqlx/query-2dc6b7bf08cced8791354fc47e319d03f894f40d2ec528382b5643c3d51ec8e7.json similarity index 100% rename from prover/prover_dal/.sqlx/query-2dc6b7bf08cced8791354fc47e319d03f894f40d2ec528382b5643c3d51ec8e7.json rename to prover/crates/lib/prover_dal/.sqlx/query-2dc6b7bf08cced8791354fc47e319d03f894f40d2ec528382b5643c3d51ec8e7.json diff --git a/prover/prover_dal/.sqlx/query-2df88abaae97b6f916b104375bd7249ec09c0daf4368021788207370213a6d94.json b/prover/crates/lib/prover_dal/.sqlx/query-2df88abaae97b6f916b104375bd7249ec09c0daf4368021788207370213a6d94.json similarity index 100% rename from prover/prover_dal/.sqlx/query-2df88abaae97b6f916b104375bd7249ec09c0daf4368021788207370213a6d94.json rename to prover/crates/lib/prover_dal/.sqlx/query-2df88abaae97b6f916b104375bd7249ec09c0daf4368021788207370213a6d94.json diff --git a/prover/prover_dal/.sqlx/query-2e6e2b68efc28114f44616b68fcf1d4d9a4b83a8b42846d8373ea13b96d612cf.json b/prover/crates/lib/prover_dal/.sqlx/query-2e6e2b68efc28114f44616b68fcf1d4d9a4b83a8b42846d8373ea13b96d612cf.json similarity index 100% rename from prover/prover_dal/.sqlx/query-2e6e2b68efc28114f44616b68fcf1d4d9a4b83a8b42846d8373ea13b96d612cf.json rename to prover/crates/lib/prover_dal/.sqlx/query-2e6e2b68efc28114f44616b68fcf1d4d9a4b83a8b42846d8373ea13b96d612cf.json diff --git a/prover/prover_dal/.sqlx/query-3902f6a8e09cd5ad560d23fe0269fd5b3d210a117bb0027d58c6cb4debd63f33.json b/prover/crates/lib/prover_dal/.sqlx/query-3902f6a8e09cd5ad560d23fe0269fd5b3d210a117bb0027d58c6cb4debd63f33.json similarity index 100% rename from prover/prover_dal/.sqlx/query-3902f6a8e09cd5ad560d23fe0269fd5b3d210a117bb0027d58c6cb4debd63f33.json rename to prover/crates/lib/prover_dal/.sqlx/query-3902f6a8e09cd5ad560d23fe0269fd5b3d210a117bb0027d58c6cb4debd63f33.json diff --git a/prover/prover_dal/.sqlx/query-3941da180ee62a7c5d4e392ff4fe2d3a6ebb3657862b91e3ece34119f098fc2d.json b/prover/crates/lib/prover_dal/.sqlx/query-3941da180ee62a7c5d4e392ff4fe2d3a6ebb3657862b91e3ece34119f098fc2d.json similarity index 100% rename from prover/prover_dal/.sqlx/query-3941da180ee62a7c5d4e392ff4fe2d3a6ebb3657862b91e3ece34119f098fc2d.json rename to prover/crates/lib/prover_dal/.sqlx/query-3941da180ee62a7c5d4e392ff4fe2d3a6ebb3657862b91e3ece34119f098fc2d.json diff --git a/prover/prover_dal/.sqlx/query-3c3abbf689fa64c6da7de69fd916769dbb04d3a61cf232892236c974660ffe64.json b/prover/crates/lib/prover_dal/.sqlx/query-3c3abbf689fa64c6da7de69fd916769dbb04d3a61cf232892236c974660ffe64.json similarity index 100% rename from prover/prover_dal/.sqlx/query-3c3abbf689fa64c6da7de69fd916769dbb04d3a61cf232892236c974660ffe64.json rename to prover/crates/lib/prover_dal/.sqlx/query-3c3abbf689fa64c6da7de69fd916769dbb04d3a61cf232892236c974660ffe64.json diff --git a/prover/prover_dal/.sqlx/query-3e0a1ebc684810c09ff83784bdd0ad195b0dd2a8ce56b1a9eb531103130b5e3e.json b/prover/crates/lib/prover_dal/.sqlx/query-3e0a1ebc684810c09ff83784bdd0ad195b0dd2a8ce56b1a9eb531103130b5e3e.json similarity index 100% rename from prover/prover_dal/.sqlx/query-3e0a1ebc684810c09ff83784bdd0ad195b0dd2a8ce56b1a9eb531103130b5e3e.json rename to prover/crates/lib/prover_dal/.sqlx/query-3e0a1ebc684810c09ff83784bdd0ad195b0dd2a8ce56b1a9eb531103130b5e3e.json diff --git a/prover/prover_dal/.sqlx/query-3ec365c5c81f4678a905ae5bbd48b87ead36f593488437c6f67da629ca81e4fa.json b/prover/crates/lib/prover_dal/.sqlx/query-3ec365c5c81f4678a905ae5bbd48b87ead36f593488437c6f67da629ca81e4fa.json similarity index 100% rename from prover/prover_dal/.sqlx/query-3ec365c5c81f4678a905ae5bbd48b87ead36f593488437c6f67da629ca81e4fa.json rename to prover/crates/lib/prover_dal/.sqlx/query-3ec365c5c81f4678a905ae5bbd48b87ead36f593488437c6f67da629ca81e4fa.json diff --git a/prover/prover_dal/.sqlx/query-412ef600a2f6025d8c22c2df8a497ed410fa47b268a66f1fc56d469c06ae50af.json b/prover/crates/lib/prover_dal/.sqlx/query-412ef600a2f6025d8c22c2df8a497ed410fa47b268a66f1fc56d469c06ae50af.json similarity index 100% rename from prover/prover_dal/.sqlx/query-412ef600a2f6025d8c22c2df8a497ed410fa47b268a66f1fc56d469c06ae50af.json rename to prover/crates/lib/prover_dal/.sqlx/query-412ef600a2f6025d8c22c2df8a497ed410fa47b268a66f1fc56d469c06ae50af.json diff --git a/prover/prover_dal/.sqlx/query-41af30620f8a1f20b8a6c46be162601d35fd2881ac1fd070f0f1a8add4bc388d.json b/prover/crates/lib/prover_dal/.sqlx/query-41af30620f8a1f20b8a6c46be162601d35fd2881ac1fd070f0f1a8add4bc388d.json similarity index 100% rename from prover/prover_dal/.sqlx/query-41af30620f8a1f20b8a6c46be162601d35fd2881ac1fd070f0f1a8add4bc388d.json rename to prover/crates/lib/prover_dal/.sqlx/query-41af30620f8a1f20b8a6c46be162601d35fd2881ac1fd070f0f1a8add4bc388d.json diff --git a/prover/prover_dal/.sqlx/query-46c4696fff5a4b8cc5cb46b05645da82065836fe17687ffad04126a6a8b2b27c.json b/prover/crates/lib/prover_dal/.sqlx/query-46c4696fff5a4b8cc5cb46b05645da82065836fe17687ffad04126a6a8b2b27c.json similarity index 100% rename from prover/prover_dal/.sqlx/query-46c4696fff5a4b8cc5cb46b05645da82065836fe17687ffad04126a6a8b2b27c.json rename to prover/crates/lib/prover_dal/.sqlx/query-46c4696fff5a4b8cc5cb46b05645da82065836fe17687ffad04126a6a8b2b27c.json diff --git a/prover/prover_dal/.sqlx/query-534822a226068cde83ad8c30b569a8f447824a5ab466bb6eea1710e8aeaa2c56.json b/prover/crates/lib/prover_dal/.sqlx/query-534822a226068cde83ad8c30b569a8f447824a5ab466bb6eea1710e8aeaa2c56.json similarity index 100% rename from prover/prover_dal/.sqlx/query-534822a226068cde83ad8c30b569a8f447824a5ab466bb6eea1710e8aeaa2c56.json rename to prover/crates/lib/prover_dal/.sqlx/query-534822a226068cde83ad8c30b569a8f447824a5ab466bb6eea1710e8aeaa2c56.json diff --git a/prover/prover_dal/.sqlx/query-53f78fdee39b113d2f55f6f951bd94f28b7b2b60d551d552a9b0bab1f1791e39.json b/prover/crates/lib/prover_dal/.sqlx/query-53f78fdee39b113d2f55f6f951bd94f28b7b2b60d551d552a9b0bab1f1791e39.json similarity index 100% rename from prover/prover_dal/.sqlx/query-53f78fdee39b113d2f55f6f951bd94f28b7b2b60d551d552a9b0bab1f1791e39.json rename to prover/crates/lib/prover_dal/.sqlx/query-53f78fdee39b113d2f55f6f951bd94f28b7b2b60d551d552a9b0bab1f1791e39.json diff --git a/prover/prover_dal/.sqlx/query-542af2ff4259182310363ac0213592895215e22fd4cf0dfe69b83277f8d05db3.json b/prover/crates/lib/prover_dal/.sqlx/query-542af2ff4259182310363ac0213592895215e22fd4cf0dfe69b83277f8d05db3.json similarity index 100% rename from prover/prover_dal/.sqlx/query-542af2ff4259182310363ac0213592895215e22fd4cf0dfe69b83277f8d05db3.json rename to prover/crates/lib/prover_dal/.sqlx/query-542af2ff4259182310363ac0213592895215e22fd4cf0dfe69b83277f8d05db3.json diff --git a/prover/prover_dal/.sqlx/query-5db868e03dc6901a0afa06f82a37a1a04821495487a80595cc9b523dac6ac8e9.json b/prover/crates/lib/prover_dal/.sqlx/query-5db868e03dc6901a0afa06f82a37a1a04821495487a80595cc9b523dac6ac8e9.json similarity index 100% rename from prover/prover_dal/.sqlx/query-5db868e03dc6901a0afa06f82a37a1a04821495487a80595cc9b523dac6ac8e9.json rename to prover/crates/lib/prover_dal/.sqlx/query-5db868e03dc6901a0afa06f82a37a1a04821495487a80595cc9b523dac6ac8e9.json diff --git a/prover/prover_dal/.sqlx/query-5e781f84ec41edd0941fa84de837effac442434c6e734d977e6682a7484abe7f.json b/prover/crates/lib/prover_dal/.sqlx/query-5e781f84ec41edd0941fa84de837effac442434c6e734d977e6682a7484abe7f.json similarity index 100% rename from prover/prover_dal/.sqlx/query-5e781f84ec41edd0941fa84de837effac442434c6e734d977e6682a7484abe7f.json rename to prover/crates/lib/prover_dal/.sqlx/query-5e781f84ec41edd0941fa84de837effac442434c6e734d977e6682a7484abe7f.json diff --git a/prover/prover_dal/.sqlx/query-61b2b858d4636809c21838635aa52aeb5f06c26f68d131dd242f6ed68816c513.json b/prover/crates/lib/prover_dal/.sqlx/query-61b2b858d4636809c21838635aa52aeb5f06c26f68d131dd242f6ed68816c513.json similarity index 100% rename from prover/prover_dal/.sqlx/query-61b2b858d4636809c21838635aa52aeb5f06c26f68d131dd242f6ed68816c513.json rename to prover/crates/lib/prover_dal/.sqlx/query-61b2b858d4636809c21838635aa52aeb5f06c26f68d131dd242f6ed68816c513.json diff --git a/prover/prover_dal/.sqlx/query-67f5f3a015dc478f02f4f701c90d0fc9ac9a7f3dce2ba48c2d0e6f38b6ba455a.json b/prover/crates/lib/prover_dal/.sqlx/query-67f5f3a015dc478f02f4f701c90d0fc9ac9a7f3dce2ba48c2d0e6f38b6ba455a.json similarity index 100% rename from prover/prover_dal/.sqlx/query-67f5f3a015dc478f02f4f701c90d0fc9ac9a7f3dce2ba48c2d0e6f38b6ba455a.json rename to prover/crates/lib/prover_dal/.sqlx/query-67f5f3a015dc478f02f4f701c90d0fc9ac9a7f3dce2ba48c2d0e6f38b6ba455a.json diff --git a/prover/prover_dal/.sqlx/query-6cfc59d2fc039c706f30ae91b7d9d0c658093dede5eb61489205aa751ad5b8ec.json b/prover/crates/lib/prover_dal/.sqlx/query-6cfc59d2fc039c706f30ae91b7d9d0c658093dede5eb61489205aa751ad5b8ec.json similarity index 100% rename from prover/prover_dal/.sqlx/query-6cfc59d2fc039c706f30ae91b7d9d0c658093dede5eb61489205aa751ad5b8ec.json rename to prover/crates/lib/prover_dal/.sqlx/query-6cfc59d2fc039c706f30ae91b7d9d0c658093dede5eb61489205aa751ad5b8ec.json diff --git a/prover/prover_dal/.sqlx/query-6f20d468efe916f8e92cbf259b37ac83cd32a628d3e01e5cd1949c519683a352.json b/prover/crates/lib/prover_dal/.sqlx/query-6f20d468efe916f8e92cbf259b37ac83cd32a628d3e01e5cd1949c519683a352.json similarity index 100% rename from prover/prover_dal/.sqlx/query-6f20d468efe916f8e92cbf259b37ac83cd32a628d3e01e5cd1949c519683a352.json rename to prover/crates/lib/prover_dal/.sqlx/query-6f20d468efe916f8e92cbf259b37ac83cd32a628d3e01e5cd1949c519683a352.json diff --git a/prover/prover_dal/.sqlx/query-764693ceeb45f8478a20242b592d419667f11d80036cda021ecbf23b0b5f7f42.json b/prover/crates/lib/prover_dal/.sqlx/query-764693ceeb45f8478a20242b592d419667f11d80036cda021ecbf23b0b5f7f42.json similarity index 100% rename from prover/prover_dal/.sqlx/query-764693ceeb45f8478a20242b592d419667f11d80036cda021ecbf23b0b5f7f42.json rename to prover/crates/lib/prover_dal/.sqlx/query-764693ceeb45f8478a20242b592d419667f11d80036cda021ecbf23b0b5f7f42.json diff --git a/prover/prover_dal/.sqlx/query-7a2145e2234a7896031bbc1ce82715e903f3b399886c2c73e838bd924fed6776.json b/prover/crates/lib/prover_dal/.sqlx/query-7a2145e2234a7896031bbc1ce82715e903f3b399886c2c73e838bd924fed6776.json similarity index 100% rename from prover/prover_dal/.sqlx/query-7a2145e2234a7896031bbc1ce82715e903f3b399886c2c73e838bd924fed6776.json rename to prover/crates/lib/prover_dal/.sqlx/query-7a2145e2234a7896031bbc1ce82715e903f3b399886c2c73e838bd924fed6776.json diff --git a/prover/prover_dal/.sqlx/query-7effbacbdcc4bd762386351755f4f32042dfead8a37401558f5fd3b03480f2dd.json b/prover/crates/lib/prover_dal/.sqlx/query-7effbacbdcc4bd762386351755f4f32042dfead8a37401558f5fd3b03480f2dd.json similarity index 100% rename from prover/prover_dal/.sqlx/query-7effbacbdcc4bd762386351755f4f32042dfead8a37401558f5fd3b03480f2dd.json rename to prover/crates/lib/prover_dal/.sqlx/query-7effbacbdcc4bd762386351755f4f32042dfead8a37401558f5fd3b03480f2dd.json diff --git a/prover/prover_dal/.sqlx/query-806b82a9effd885ba537a2a1c7d7227120a8279db1875d26ccae5ee0785f46a9.json b/prover/crates/lib/prover_dal/.sqlx/query-806b82a9effd885ba537a2a1c7d7227120a8279db1875d26ccae5ee0785f46a9.json similarity index 100% rename from prover/prover_dal/.sqlx/query-806b82a9effd885ba537a2a1c7d7227120a8279db1875d26ccae5ee0785f46a9.json rename to prover/crates/lib/prover_dal/.sqlx/query-806b82a9effd885ba537a2a1c7d7227120a8279db1875d26ccae5ee0785f46a9.json diff --git a/prover/prover_dal/.sqlx/query-85a69b433c08847876bf6e7af9bc39ae8a6e053a0e03afd3fb5e02ee17157067.json b/prover/crates/lib/prover_dal/.sqlx/query-85a69b433c08847876bf6e7af9bc39ae8a6e053a0e03afd3fb5e02ee17157067.json similarity index 100% rename from prover/prover_dal/.sqlx/query-85a69b433c08847876bf6e7af9bc39ae8a6e053a0e03afd3fb5e02ee17157067.json rename to prover/crates/lib/prover_dal/.sqlx/query-85a69b433c08847876bf6e7af9bc39ae8a6e053a0e03afd3fb5e02ee17157067.json diff --git a/prover/prover_dal/.sqlx/query-860846c9bcad1edd1a2906542c178815e29440592b2bb00adacf02730b526458.json b/prover/crates/lib/prover_dal/.sqlx/query-860846c9bcad1edd1a2906542c178815e29440592b2bb00adacf02730b526458.json similarity index 100% rename from prover/prover_dal/.sqlx/query-860846c9bcad1edd1a2906542c178815e29440592b2bb00adacf02730b526458.json rename to prover/crates/lib/prover_dal/.sqlx/query-860846c9bcad1edd1a2906542c178815e29440592b2bb00adacf02730b526458.json diff --git a/prover/prover_dal/.sqlx/query-8719c090a9ad2488d556e495238cdce6412e2725cf5162ce7a733f6dceaecb11.json b/prover/crates/lib/prover_dal/.sqlx/query-8719c090a9ad2488d556e495238cdce6412e2725cf5162ce7a733f6dceaecb11.json similarity index 100% rename from prover/prover_dal/.sqlx/query-8719c090a9ad2488d556e495238cdce6412e2725cf5162ce7a733f6dceaecb11.json rename to prover/crates/lib/prover_dal/.sqlx/query-8719c090a9ad2488d556e495238cdce6412e2725cf5162ce7a733f6dceaecb11.json diff --git a/prover/prover_dal/.sqlx/query-8720d411e0c9640afd61e927a89c0b6c018e6a4d279acd24a4ea7d81b5cc5123.json b/prover/crates/lib/prover_dal/.sqlx/query-8720d411e0c9640afd61e927a89c0b6c018e6a4d279acd24a4ea7d81b5cc5123.json similarity index 100% rename from prover/prover_dal/.sqlx/query-8720d411e0c9640afd61e927a89c0b6c018e6a4d279acd24a4ea7d81b5cc5123.json rename to prover/crates/lib/prover_dal/.sqlx/query-8720d411e0c9640afd61e927a89c0b6c018e6a4d279acd24a4ea7d81b5cc5123.json diff --git a/prover/prover_dal/.sqlx/query-87a73aa95a85efeb065428f9e56e085ea80cf93c2fd66fd3949aab428bbdc560.json b/prover/crates/lib/prover_dal/.sqlx/query-87a73aa95a85efeb065428f9e56e085ea80cf93c2fd66fd3949aab428bbdc560.json similarity index 100% rename from prover/prover_dal/.sqlx/query-87a73aa95a85efeb065428f9e56e085ea80cf93c2fd66fd3949aab428bbdc560.json rename to prover/crates/lib/prover_dal/.sqlx/query-87a73aa95a85efeb065428f9e56e085ea80cf93c2fd66fd3949aab428bbdc560.json diff --git a/prover/prover_dal/.sqlx/query-8bcad2be3dd29e36ea731417b68023678f31a1b7f5ee33b643dd551c40e88329.json b/prover/crates/lib/prover_dal/.sqlx/query-8bcad2be3dd29e36ea731417b68023678f31a1b7f5ee33b643dd551c40e88329.json similarity index 100% rename from prover/prover_dal/.sqlx/query-8bcad2be3dd29e36ea731417b68023678f31a1b7f5ee33b643dd551c40e88329.json rename to prover/crates/lib/prover_dal/.sqlx/query-8bcad2be3dd29e36ea731417b68023678f31a1b7f5ee33b643dd551c40e88329.json diff --git a/prover/prover_dal/.sqlx/query-8ffb62f6a17c68af701e790044989daacb88fe5aaf368c5f81a885821522b99c.json b/prover/crates/lib/prover_dal/.sqlx/query-8ffb62f6a17c68af701e790044989daacb88fe5aaf368c5f81a885821522b99c.json similarity index 100% rename from prover/prover_dal/.sqlx/query-8ffb62f6a17c68af701e790044989daacb88fe5aaf368c5f81a885821522b99c.json rename to prover/crates/lib/prover_dal/.sqlx/query-8ffb62f6a17c68af701e790044989daacb88fe5aaf368c5f81a885821522b99c.json diff --git a/prover/prover_dal/.sqlx/query-93b9706aa8eb840d574d7c156cc866e8f67a380302762c272bfb27307682d62e.json b/prover/crates/lib/prover_dal/.sqlx/query-93b9706aa8eb840d574d7c156cc866e8f67a380302762c272bfb27307682d62e.json similarity index 100% rename from prover/prover_dal/.sqlx/query-93b9706aa8eb840d574d7c156cc866e8f67a380302762c272bfb27307682d62e.json rename to prover/crates/lib/prover_dal/.sqlx/query-93b9706aa8eb840d574d7c156cc866e8f67a380302762c272bfb27307682d62e.json diff --git a/prover/prover_dal/.sqlx/query-94a75b05ecbab75d6ebf39cca029bfb838c787fc58d7536f9e9976e5e515431a.json b/prover/crates/lib/prover_dal/.sqlx/query-94a75b05ecbab75d6ebf39cca029bfb838c787fc58d7536f9e9976e5e515431a.json similarity index 100% rename from prover/prover_dal/.sqlx/query-94a75b05ecbab75d6ebf39cca029bfb838c787fc58d7536f9e9976e5e515431a.json rename to prover/crates/lib/prover_dal/.sqlx/query-94a75b05ecbab75d6ebf39cca029bfb838c787fc58d7536f9e9976e5e515431a.json diff --git a/prover/prover_dal/.sqlx/query-9b713312b539b4eefa58346f0070767a2cd4488e670674cd9149f7a332c0198d.json b/prover/crates/lib/prover_dal/.sqlx/query-9b713312b539b4eefa58346f0070767a2cd4488e670674cd9149f7a332c0198d.json similarity index 100% rename from prover/prover_dal/.sqlx/query-9b713312b539b4eefa58346f0070767a2cd4488e670674cd9149f7a332c0198d.json rename to prover/crates/lib/prover_dal/.sqlx/query-9b713312b539b4eefa58346f0070767a2cd4488e670674cd9149f7a332c0198d.json diff --git a/prover/prover_dal/.sqlx/query-a0f60a97f09b2467ca73bb6fbebb210d65149cdd4a3411a79b717aadbffb43af.json b/prover/crates/lib/prover_dal/.sqlx/query-a0f60a97f09b2467ca73bb6fbebb210d65149cdd4a3411a79b717aadbffb43af.json similarity index 100% rename from prover/prover_dal/.sqlx/query-a0f60a97f09b2467ca73bb6fbebb210d65149cdd4a3411a79b717aadbffb43af.json rename to prover/crates/lib/prover_dal/.sqlx/query-a0f60a97f09b2467ca73bb6fbebb210d65149cdd4a3411a79b717aadbffb43af.json diff --git a/prover/prover_dal/.sqlx/query-a84ee70bec8c03bd51e1c6bad44c9a64904026506914abae2946e5d353d6a604.json b/prover/crates/lib/prover_dal/.sqlx/query-a84ee70bec8c03bd51e1c6bad44c9a64904026506914abae2946e5d353d6a604.json similarity index 100% rename from prover/prover_dal/.sqlx/query-a84ee70bec8c03bd51e1c6bad44c9a64904026506914abae2946e5d353d6a604.json rename to prover/crates/lib/prover_dal/.sqlx/query-a84ee70bec8c03bd51e1c6bad44c9a64904026506914abae2946e5d353d6a604.json diff --git a/prover/prover_dal/.sqlx/query-a94fffdbc1827dc5df908ea1e99ef3ad13840d2c497760e9bd0513f68dc4271c.json b/prover/crates/lib/prover_dal/.sqlx/query-a94fffdbc1827dc5df908ea1e99ef3ad13840d2c497760e9bd0513f68dc4271c.json similarity index 100% rename from prover/prover_dal/.sqlx/query-a94fffdbc1827dc5df908ea1e99ef3ad13840d2c497760e9bd0513f68dc4271c.json rename to prover/crates/lib/prover_dal/.sqlx/query-a94fffdbc1827dc5df908ea1e99ef3ad13840d2c497760e9bd0513f68dc4271c.json diff --git a/prover/prover_dal/.sqlx/query-abc93d27a8673b23e18d050e84c43c868c63c853edb5c4f41e48a3cc6378eca9.json b/prover/crates/lib/prover_dal/.sqlx/query-abc93d27a8673b23e18d050e84c43c868c63c853edb5c4f41e48a3cc6378eca9.json similarity index 100% rename from prover/prover_dal/.sqlx/query-abc93d27a8673b23e18d050e84c43c868c63c853edb5c4f41e48a3cc6378eca9.json rename to prover/crates/lib/prover_dal/.sqlx/query-abc93d27a8673b23e18d050e84c43c868c63c853edb5c4f41e48a3cc6378eca9.json diff --git a/prover/prover_dal/.sqlx/query-adaa3126792aac4e3afb805068f01ab8ae3f32526d9b5eadcfe52d139f7d6e66.json b/prover/crates/lib/prover_dal/.sqlx/query-adaa3126792aac4e3afb805068f01ab8ae3f32526d9b5eadcfe52d139f7d6e66.json similarity index 100% rename from prover/prover_dal/.sqlx/query-adaa3126792aac4e3afb805068f01ab8ae3f32526d9b5eadcfe52d139f7d6e66.json rename to prover/crates/lib/prover_dal/.sqlx/query-adaa3126792aac4e3afb805068f01ab8ae3f32526d9b5eadcfe52d139f7d6e66.json diff --git a/prover/prover_dal/.sqlx/query-b25c66b9705b3f2fb8a3492f1bd20222e177262292241bd8cb89dbb9c1e74c2d.json b/prover/crates/lib/prover_dal/.sqlx/query-b25c66b9705b3f2fb8a3492f1bd20222e177262292241bd8cb89dbb9c1e74c2d.json similarity index 100% rename from prover/prover_dal/.sqlx/query-b25c66b9705b3f2fb8a3492f1bd20222e177262292241bd8cb89dbb9c1e74c2d.json rename to prover/crates/lib/prover_dal/.sqlx/query-b25c66b9705b3f2fb8a3492f1bd20222e177262292241bd8cb89dbb9c1e74c2d.json diff --git a/prover/prover_dal/.sqlx/query-b321c5ba22358cbb1fd9c627f1e7b56187686173327498ac75424593547c19c5.json b/prover/crates/lib/prover_dal/.sqlx/query-b321c5ba22358cbb1fd9c627f1e7b56187686173327498ac75424593547c19c5.json similarity index 100% rename from prover/prover_dal/.sqlx/query-b321c5ba22358cbb1fd9c627f1e7b56187686173327498ac75424593547c19c5.json rename to prover/crates/lib/prover_dal/.sqlx/query-b321c5ba22358cbb1fd9c627f1e7b56187686173327498ac75424593547c19c5.json diff --git a/prover/prover_dal/.sqlx/query-b367ecb1ebee86ec598c4079591f8c12deeca6b8843fe3869cc2b02b30da5de6.json b/prover/crates/lib/prover_dal/.sqlx/query-b367ecb1ebee86ec598c4079591f8c12deeca6b8843fe3869cc2b02b30da5de6.json similarity index 100% rename from prover/prover_dal/.sqlx/query-b367ecb1ebee86ec598c4079591f8c12deeca6b8843fe3869cc2b02b30da5de6.json rename to prover/crates/lib/prover_dal/.sqlx/query-b367ecb1ebee86ec598c4079591f8c12deeca6b8843fe3869cc2b02b30da5de6.json diff --git a/prover/prover_dal/.sqlx/query-b3d71dbe14bcd94131b29b64dcb49b6370c211a7fc24ad03a5f0e327f9d18040.json b/prover/crates/lib/prover_dal/.sqlx/query-b3d71dbe14bcd94131b29b64dcb49b6370c211a7fc24ad03a5f0e327f9d18040.json similarity index 100% rename from prover/prover_dal/.sqlx/query-b3d71dbe14bcd94131b29b64dcb49b6370c211a7fc24ad03a5f0e327f9d18040.json rename to prover/crates/lib/prover_dal/.sqlx/query-b3d71dbe14bcd94131b29b64dcb49b6370c211a7fc24ad03a5f0e327f9d18040.json diff --git a/prover/prover_dal/.sqlx/query-b4794e6a0c2366d5d95ab373c310103263af3ff5cb6c9dc5df59d3cd2a5e56b4.json b/prover/crates/lib/prover_dal/.sqlx/query-b4794e6a0c2366d5d95ab373c310103263af3ff5cb6c9dc5df59d3cd2a5e56b4.json similarity index 100% rename from prover/prover_dal/.sqlx/query-b4794e6a0c2366d5d95ab373c310103263af3ff5cb6c9dc5df59d3cd2a5e56b4.json rename to prover/crates/lib/prover_dal/.sqlx/query-b4794e6a0c2366d5d95ab373c310103263af3ff5cb6c9dc5df59d3cd2a5e56b4.json diff --git a/prover/prover_dal/.sqlx/query-b568f9cb9c2bd53b5dcde15f368a9dc31c7d51476f18cffa80cad653298ad252.json b/prover/crates/lib/prover_dal/.sqlx/query-b568f9cb9c2bd53b5dcde15f368a9dc31c7d51476f18cffa80cad653298ad252.json similarity index 100% rename from prover/prover_dal/.sqlx/query-b568f9cb9c2bd53b5dcde15f368a9dc31c7d51476f18cffa80cad653298ad252.json rename to prover/crates/lib/prover_dal/.sqlx/query-b568f9cb9c2bd53b5dcde15f368a9dc31c7d51476f18cffa80cad653298ad252.json diff --git a/prover/prover_dal/.sqlx/query-bf06bd08d8ccf67fc00bbc364715263556f258565f79cbb40f5ecc1a4f6402f5.json b/prover/crates/lib/prover_dal/.sqlx/query-bf06bd08d8ccf67fc00bbc364715263556f258565f79cbb40f5ecc1a4f6402f5.json similarity index 100% rename from prover/prover_dal/.sqlx/query-bf06bd08d8ccf67fc00bbc364715263556f258565f79cbb40f5ecc1a4f6402f5.json rename to prover/crates/lib/prover_dal/.sqlx/query-bf06bd08d8ccf67fc00bbc364715263556f258565f79cbb40f5ecc1a4f6402f5.json diff --git a/prover/prover_dal/.sqlx/query-bfb80956a18eabf266f5b5a9d62912d57f8eb2a38bdb7884fc812a2897a3a660.json b/prover/crates/lib/prover_dal/.sqlx/query-bfb80956a18eabf266f5b5a9d62912d57f8eb2a38bdb7884fc812a2897a3a660.json similarity index 100% rename from prover/prover_dal/.sqlx/query-bfb80956a18eabf266f5b5a9d62912d57f8eb2a38bdb7884fc812a2897a3a660.json rename to prover/crates/lib/prover_dal/.sqlx/query-bfb80956a18eabf266f5b5a9d62912d57f8eb2a38bdb7884fc812a2897a3a660.json diff --git a/prover/prover_dal/.sqlx/query-c156004a0e5ad5bcc33d3b894fd69718349ac4fc08b455c7f4265d7443f2ec13.json b/prover/crates/lib/prover_dal/.sqlx/query-c156004a0e5ad5bcc33d3b894fd69718349ac4fc08b455c7f4265d7443f2ec13.json similarity index 100% rename from prover/prover_dal/.sqlx/query-c156004a0e5ad5bcc33d3b894fd69718349ac4fc08b455c7f4265d7443f2ec13.json rename to prover/crates/lib/prover_dal/.sqlx/query-c156004a0e5ad5bcc33d3b894fd69718349ac4fc08b455c7f4265d7443f2ec13.json diff --git a/prover/prover_dal/.sqlx/query-c173743af526d8150b6091ea52e6997fcfbc7ad688f2eee3dfab1029344d2382.json b/prover/crates/lib/prover_dal/.sqlx/query-c173743af526d8150b6091ea52e6997fcfbc7ad688f2eee3dfab1029344d2382.json similarity index 100% rename from prover/prover_dal/.sqlx/query-c173743af526d8150b6091ea52e6997fcfbc7ad688f2eee3dfab1029344d2382.json rename to prover/crates/lib/prover_dal/.sqlx/query-c173743af526d8150b6091ea52e6997fcfbc7ad688f2eee3dfab1029344d2382.json diff --git a/prover/prover_dal/.sqlx/query-c2c140d136df5303d7b3a66ccd0d34a5baece02812f8c950fc84d37eeebd33a4.json b/prover/crates/lib/prover_dal/.sqlx/query-c2c140d136df5303d7b3a66ccd0d34a5baece02812f8c950fc84d37eeebd33a4.json similarity index 100% rename from prover/prover_dal/.sqlx/query-c2c140d136df5303d7b3a66ccd0d34a5baece02812f8c950fc84d37eeebd33a4.json rename to prover/crates/lib/prover_dal/.sqlx/query-c2c140d136df5303d7b3a66ccd0d34a5baece02812f8c950fc84d37eeebd33a4.json diff --git a/prover/prover_dal/.sqlx/query-c340c043c938bf5f4b63d57a1654775c6f7414c7bed75d33b61de00fdbabc349.json b/prover/crates/lib/prover_dal/.sqlx/query-c340c043c938bf5f4b63d57a1654775c6f7414c7bed75d33b61de00fdbabc349.json similarity index 100% rename from prover/prover_dal/.sqlx/query-c340c043c938bf5f4b63d57a1654775c6f7414c7bed75d33b61de00fdbabc349.json rename to prover/crates/lib/prover_dal/.sqlx/query-c340c043c938bf5f4b63d57a1654775c6f7414c7bed75d33b61de00fdbabc349.json diff --git a/prover/prover_dal/.sqlx/query-c706a49ff54f6b424e24d061fe7ac429aac3c030f7e226a1264243d8cdae038d.json b/prover/crates/lib/prover_dal/.sqlx/query-c706a49ff54f6b424e24d061fe7ac429aac3c030f7e226a1264243d8cdae038d.json similarity index 100% rename from prover/prover_dal/.sqlx/query-c706a49ff54f6b424e24d061fe7ac429aac3c030f7e226a1264243d8cdae038d.json rename to prover/crates/lib/prover_dal/.sqlx/query-c706a49ff54f6b424e24d061fe7ac429aac3c030f7e226a1264243d8cdae038d.json diff --git a/prover/prover_dal/.sqlx/query-ca9d06141265b8524ee28c55569cb21a635037d89ce24dd3ad58ffaadb59594a.json b/prover/crates/lib/prover_dal/.sqlx/query-ca9d06141265b8524ee28c55569cb21a635037d89ce24dd3ad58ffaadb59594a.json similarity index 100% rename from prover/prover_dal/.sqlx/query-ca9d06141265b8524ee28c55569cb21a635037d89ce24dd3ad58ffaadb59594a.json rename to prover/crates/lib/prover_dal/.sqlx/query-ca9d06141265b8524ee28c55569cb21a635037d89ce24dd3ad58ffaadb59594a.json diff --git a/prover/prover_dal/.sqlx/query-caff620ae66d7fbe3caff7505173b6da86d1e693be03936730c340121167341f.json b/prover/crates/lib/prover_dal/.sqlx/query-caff620ae66d7fbe3caff7505173b6da86d1e693be03936730c340121167341f.json similarity index 100% rename from prover/prover_dal/.sqlx/query-caff620ae66d7fbe3caff7505173b6da86d1e693be03936730c340121167341f.json rename to prover/crates/lib/prover_dal/.sqlx/query-caff620ae66d7fbe3caff7505173b6da86d1e693be03936730c340121167341f.json diff --git a/prover/prover_dal/.sqlx/query-ce5779092feb8a3d3e2c5e395783e67f08f2ead5f55bfb6594e50346bf9cf2ef.json b/prover/crates/lib/prover_dal/.sqlx/query-ce5779092feb8a3d3e2c5e395783e67f08f2ead5f55bfb6594e50346bf9cf2ef.json similarity index 100% rename from prover/prover_dal/.sqlx/query-ce5779092feb8a3d3e2c5e395783e67f08f2ead5f55bfb6594e50346bf9cf2ef.json rename to prover/crates/lib/prover_dal/.sqlx/query-ce5779092feb8a3d3e2c5e395783e67f08f2ead5f55bfb6594e50346bf9cf2ef.json diff --git a/prover/prover_dal/.sqlx/query-d16278c6025eb3a205266fb5273f029e262be45614404159908af1624349700b.json b/prover/crates/lib/prover_dal/.sqlx/query-d16278c6025eb3a205266fb5273f029e262be45614404159908af1624349700b.json similarity index 100% rename from prover/prover_dal/.sqlx/query-d16278c6025eb3a205266fb5273f029e262be45614404159908af1624349700b.json rename to prover/crates/lib/prover_dal/.sqlx/query-d16278c6025eb3a205266fb5273f029e262be45614404159908af1624349700b.json diff --git a/prover/prover_dal/.sqlx/query-d272c91f1209c277189a31c59ee191a43dc8eafc33ee067bd41e20f25f7625f0.json b/prover/crates/lib/prover_dal/.sqlx/query-d272c91f1209c277189a31c59ee191a43dc8eafc33ee067bd41e20f25f7625f0.json similarity index 100% rename from prover/prover_dal/.sqlx/query-d272c91f1209c277189a31c59ee191a43dc8eafc33ee067bd41e20f25f7625f0.json rename to prover/crates/lib/prover_dal/.sqlx/query-d272c91f1209c277189a31c59ee191a43dc8eafc33ee067bd41e20f25f7625f0.json diff --git a/prover/prover_dal/.sqlx/query-d4949debfe0dc5112204cd196c68b02c44b099e27e3c45c5c810cd5fcd8884ed.json b/prover/crates/lib/prover_dal/.sqlx/query-d4949debfe0dc5112204cd196c68b02c44b099e27e3c45c5c810cd5fcd8884ed.json similarity index 100% rename from prover/prover_dal/.sqlx/query-d4949debfe0dc5112204cd196c68b02c44b099e27e3c45c5c810cd5fcd8884ed.json rename to prover/crates/lib/prover_dal/.sqlx/query-d4949debfe0dc5112204cd196c68b02c44b099e27e3c45c5c810cd5fcd8884ed.json diff --git a/prover/prover_dal/.sqlx/query-d91c931e2a14cf1183a608d041fc6fadb8e12a9218399d189b4d95e2ca4fcc48.json b/prover/crates/lib/prover_dal/.sqlx/query-d91c931e2a14cf1183a608d041fc6fadb8e12a9218399d189b4d95e2ca4fcc48.json similarity index 100% rename from prover/prover_dal/.sqlx/query-d91c931e2a14cf1183a608d041fc6fadb8e12a9218399d189b4d95e2ca4fcc48.json rename to prover/crates/lib/prover_dal/.sqlx/query-d91c931e2a14cf1183a608d041fc6fadb8e12a9218399d189b4d95e2ca4fcc48.json diff --git a/prover/prover_dal/.sqlx/query-db3e74f0e83ffbf84a6d61e560f2060fbea775dc185f639139fbfd23e4d5f3c6.json b/prover/crates/lib/prover_dal/.sqlx/query-db3e74f0e83ffbf84a6d61e560f2060fbea775dc185f639139fbfd23e4d5f3c6.json similarity index 100% rename from prover/prover_dal/.sqlx/query-db3e74f0e83ffbf84a6d61e560f2060fbea775dc185f639139fbfd23e4d5f3c6.json rename to prover/crates/lib/prover_dal/.sqlx/query-db3e74f0e83ffbf84a6d61e560f2060fbea775dc185f639139fbfd23e4d5f3c6.json diff --git a/prover/prover_dal/.sqlx/query-df00e33809768120e395d8f740770a4e629b2a1cde641e74e4e55bb100df809f.json b/prover/crates/lib/prover_dal/.sqlx/query-df00e33809768120e395d8f740770a4e629b2a1cde641e74e4e55bb100df809f.json similarity index 100% rename from prover/prover_dal/.sqlx/query-df00e33809768120e395d8f740770a4e629b2a1cde641e74e4e55bb100df809f.json rename to prover/crates/lib/prover_dal/.sqlx/query-df00e33809768120e395d8f740770a4e629b2a1cde641e74e4e55bb100df809f.json diff --git a/prover/prover_dal/.sqlx/query-e0a6cc885e437aa7ded9def71f3e118cabc67b6e507efefb7b69e102f1b43c58.json b/prover/crates/lib/prover_dal/.sqlx/query-e0a6cc885e437aa7ded9def71f3e118cabc67b6e507efefb7b69e102f1b43c58.json similarity index 100% rename from prover/prover_dal/.sqlx/query-e0a6cc885e437aa7ded9def71f3e118cabc67b6e507efefb7b69e102f1b43c58.json rename to prover/crates/lib/prover_dal/.sqlx/query-e0a6cc885e437aa7ded9def71f3e118cabc67b6e507efefb7b69e102f1b43c58.json diff --git a/prover/prover_dal/.sqlx/query-e3194873d24e67f8d0e98bf8bf2d4f9a3b98458746972c9860fb9473947d59ff.json b/prover/crates/lib/prover_dal/.sqlx/query-e3194873d24e67f8d0e98bf8bf2d4f9a3b98458746972c9860fb9473947d59ff.json similarity index 100% rename from prover/prover_dal/.sqlx/query-e3194873d24e67f8d0e98bf8bf2d4f9a3b98458746972c9860fb9473947d59ff.json rename to prover/crates/lib/prover_dal/.sqlx/query-e3194873d24e67f8d0e98bf8bf2d4f9a3b98458746972c9860fb9473947d59ff.json diff --git a/prover/prover_dal/.sqlx/query-e32c0d85cb2841efb0b7cea6b049bae42849574731d33539bfdcca21c9b64f4e.json b/prover/crates/lib/prover_dal/.sqlx/query-e32c0d85cb2841efb0b7cea6b049bae42849574731d33539bfdcca21c9b64f4e.json similarity index 100% rename from prover/prover_dal/.sqlx/query-e32c0d85cb2841efb0b7cea6b049bae42849574731d33539bfdcca21c9b64f4e.json rename to prover/crates/lib/prover_dal/.sqlx/query-e32c0d85cb2841efb0b7cea6b049bae42849574731d33539bfdcca21c9b64f4e.json diff --git a/prover/prover_dal/.sqlx/query-e495b78add1c942d89d806e228093a4eb2ee0284aa89bca1ba958f470a2d6254.json b/prover/crates/lib/prover_dal/.sqlx/query-e495b78add1c942d89d806e228093a4eb2ee0284aa89bca1ba958f470a2d6254.json similarity index 100% rename from prover/prover_dal/.sqlx/query-e495b78add1c942d89d806e228093a4eb2ee0284aa89bca1ba958f470a2d6254.json rename to prover/crates/lib/prover_dal/.sqlx/query-e495b78add1c942d89d806e228093a4eb2ee0284aa89bca1ba958f470a2d6254.json diff --git a/prover/prover_dal/.sqlx/query-e743af4c18ec91eb46db5a19556fba74ec2cfc3c89c7e4e2ea475c3ce4092849.json b/prover/crates/lib/prover_dal/.sqlx/query-e743af4c18ec91eb46db5a19556fba74ec2cfc3c89c7e4e2ea475c3ce4092849.json similarity index 100% rename from prover/prover_dal/.sqlx/query-e743af4c18ec91eb46db5a19556fba74ec2cfc3c89c7e4e2ea475c3ce4092849.json rename to prover/crates/lib/prover_dal/.sqlx/query-e743af4c18ec91eb46db5a19556fba74ec2cfc3c89c7e4e2ea475c3ce4092849.json diff --git a/prover/prover_dal/.sqlx/query-e8066db420e075306235f728d57567878f347bdaf36294e9b24ee9c0aa1e861b.json b/prover/crates/lib/prover_dal/.sqlx/query-e8066db420e075306235f728d57567878f347bdaf36294e9b24ee9c0aa1e861b.json similarity index 100% rename from prover/prover_dal/.sqlx/query-e8066db420e075306235f728d57567878f347bdaf36294e9b24ee9c0aa1e861b.json rename to prover/crates/lib/prover_dal/.sqlx/query-e8066db420e075306235f728d57567878f347bdaf36294e9b24ee9c0aa1e861b.json diff --git a/prover/prover_dal/.sqlx/query-e9c9b69852fa68f463f17b6d63ab99cf505662036f2dd7a9f1807c4c1bad7c7b.json b/prover/crates/lib/prover_dal/.sqlx/query-e9c9b69852fa68f463f17b6d63ab99cf505662036f2dd7a9f1807c4c1bad7c7b.json similarity index 100% rename from prover/prover_dal/.sqlx/query-e9c9b69852fa68f463f17b6d63ab99cf505662036f2dd7a9f1807c4c1bad7c7b.json rename to prover/crates/lib/prover_dal/.sqlx/query-e9c9b69852fa68f463f17b6d63ab99cf505662036f2dd7a9f1807c4c1bad7c7b.json diff --git a/prover/prover_dal/.sqlx/query-ec04b89218111a5dc8d5ade506ac3465e2211ef3013386feb12d4cc04e0eade9.json b/prover/crates/lib/prover_dal/.sqlx/query-ec04b89218111a5dc8d5ade506ac3465e2211ef3013386feb12d4cc04e0eade9.json similarity index 100% rename from prover/prover_dal/.sqlx/query-ec04b89218111a5dc8d5ade506ac3465e2211ef3013386feb12d4cc04e0eade9.json rename to prover/crates/lib/prover_dal/.sqlx/query-ec04b89218111a5dc8d5ade506ac3465e2211ef3013386feb12d4cc04e0eade9.json diff --git a/prover/prover_dal/.sqlx/query-edc61e1285bf6d3837acc67af4f15aaade450980719933089824eb8c494d64a4.json b/prover/crates/lib/prover_dal/.sqlx/query-edc61e1285bf6d3837acc67af4f15aaade450980719933089824eb8c494d64a4.json similarity index 100% rename from prover/prover_dal/.sqlx/query-edc61e1285bf6d3837acc67af4f15aaade450980719933089824eb8c494d64a4.json rename to prover/crates/lib/prover_dal/.sqlx/query-edc61e1285bf6d3837acc67af4f15aaade450980719933089824eb8c494d64a4.json diff --git a/prover/prover_dal/.sqlx/query-eef1b56e87eff63fcf6ffb98791583a7526ae38ceb4bf80543cfd3fb60492fb9.json b/prover/crates/lib/prover_dal/.sqlx/query-eef1b56e87eff63fcf6ffb98791583a7526ae38ceb4bf80543cfd3fb60492fb9.json similarity index 100% rename from prover/prover_dal/.sqlx/query-eef1b56e87eff63fcf6ffb98791583a7526ae38ceb4bf80543cfd3fb60492fb9.json rename to prover/crates/lib/prover_dal/.sqlx/query-eef1b56e87eff63fcf6ffb98791583a7526ae38ceb4bf80543cfd3fb60492fb9.json diff --git a/prover/prover_dal/.sqlx/query-f99c34313e2717ec74b4f274e33dae905acac53b46eeaeb059d23e48a71df3b4.json b/prover/crates/lib/prover_dal/.sqlx/query-f99c34313e2717ec74b4f274e33dae905acac53b46eeaeb059d23e48a71df3b4.json similarity index 100% rename from prover/prover_dal/.sqlx/query-f99c34313e2717ec74b4f274e33dae905acac53b46eeaeb059d23e48a71df3b4.json rename to prover/crates/lib/prover_dal/.sqlx/query-f99c34313e2717ec74b4f274e33dae905acac53b46eeaeb059d23e48a71df3b4.json diff --git a/prover/prover_dal/.sqlx/query-fcddeb96dcd1611dedb2091c1be304e8a35fd65bf37e976b7106f57c57e70b9b.json b/prover/crates/lib/prover_dal/.sqlx/query-fcddeb96dcd1611dedb2091c1be304e8a35fd65bf37e976b7106f57c57e70b9b.json similarity index 100% rename from prover/prover_dal/.sqlx/query-fcddeb96dcd1611dedb2091c1be304e8a35fd65bf37e976b7106f57c57e70b9b.json rename to prover/crates/lib/prover_dal/.sqlx/query-fcddeb96dcd1611dedb2091c1be304e8a35fd65bf37e976b7106f57c57e70b9b.json diff --git a/prover/prover_dal/Cargo.toml b/prover/crates/lib/prover_dal/Cargo.toml similarity index 100% rename from prover/prover_dal/Cargo.toml rename to prover/crates/lib/prover_dal/Cargo.toml diff --git a/prover/prover_dal/doc/FriProofCompressorDal.md b/prover/crates/lib/prover_dal/doc/FriProofCompressorDal.md similarity index 100% rename from prover/prover_dal/doc/FriProofCompressorDal.md rename to prover/crates/lib/prover_dal/doc/FriProofCompressorDal.md diff --git a/prover/prover_dal/doc/FriProverDal.md b/prover/crates/lib/prover_dal/doc/FriProverDal.md similarity index 100% rename from prover/prover_dal/doc/FriProverDal.md rename to prover/crates/lib/prover_dal/doc/FriProverDal.md diff --git a/prover/prover_dal/doc/FriWitnessGeneratorDal.md b/prover/crates/lib/prover_dal/doc/FriWitnessGeneratorDal.md similarity index 100% rename from prover/prover_dal/doc/FriWitnessGeneratorDal.md rename to prover/crates/lib/prover_dal/doc/FriWitnessGeneratorDal.md diff --git a/prover/prover_dal/migrations/20240131134938_initial-prover-migration.down.sql b/prover/crates/lib/prover_dal/migrations/20240131134938_initial-prover-migration.down.sql similarity index 100% rename from prover/prover_dal/migrations/20240131134938_initial-prover-migration.down.sql rename to prover/crates/lib/prover_dal/migrations/20240131134938_initial-prover-migration.down.sql diff --git a/prover/prover_dal/migrations/20240131134938_initial-prover-migration.up.sql b/prover/crates/lib/prover_dal/migrations/20240131134938_initial-prover-migration.up.sql similarity index 100% rename from prover/prover_dal/migrations/20240131134938_initial-prover-migration.up.sql rename to prover/crates/lib/prover_dal/migrations/20240131134938_initial-prover-migration.up.sql diff --git a/prover/prover_dal/migrations/20240226120310_add_support_for_eip4844.down.sql b/prover/crates/lib/prover_dal/migrations/20240226120310_add_support_for_eip4844.down.sql similarity index 100% rename from prover/prover_dal/migrations/20240226120310_add_support_for_eip4844.down.sql rename to prover/crates/lib/prover_dal/migrations/20240226120310_add_support_for_eip4844.down.sql diff --git a/prover/prover_dal/migrations/20240226120310_add_support_for_eip4844.up.sql b/prover/crates/lib/prover_dal/migrations/20240226120310_add_support_for_eip4844.up.sql similarity index 100% rename from prover/prover_dal/migrations/20240226120310_add_support_for_eip4844.up.sql rename to prover/crates/lib/prover_dal/migrations/20240226120310_add_support_for_eip4844.up.sql diff --git a/prover/prover_dal/migrations/20240403070124_add_archived_at_column_to_prover_queue_archive.down.sql b/prover/crates/lib/prover_dal/migrations/20240403070124_add_archived_at_column_to_prover_queue_archive.down.sql similarity index 100% rename from prover/prover_dal/migrations/20240403070124_add_archived_at_column_to_prover_queue_archive.down.sql rename to prover/crates/lib/prover_dal/migrations/20240403070124_add_archived_at_column_to_prover_queue_archive.down.sql diff --git a/prover/prover_dal/migrations/20240403070124_add_archived_at_column_to_prover_queue_archive.up.sql b/prover/crates/lib/prover_dal/migrations/20240403070124_add_archived_at_column_to_prover_queue_archive.up.sql similarity index 100% rename from prover/prover_dal/migrations/20240403070124_add_archived_at_column_to_prover_queue_archive.up.sql rename to prover/crates/lib/prover_dal/migrations/20240403070124_add_archived_at_column_to_prover_queue_archive.up.sql diff --git a/prover/prover_dal/migrations/20240410141719_add-protocol-versions-to-tables.down.sql b/prover/crates/lib/prover_dal/migrations/20240410141719_add-protocol-versions-to-tables.down.sql similarity index 100% rename from prover/prover_dal/migrations/20240410141719_add-protocol-versions-to-tables.down.sql rename to prover/crates/lib/prover_dal/migrations/20240410141719_add-protocol-versions-to-tables.down.sql diff --git a/prover/prover_dal/migrations/20240410141719_add-protocol-versions-to-tables.up.sql b/prover/crates/lib/prover_dal/migrations/20240410141719_add-protocol-versions-to-tables.up.sql similarity index 100% rename from prover/prover_dal/migrations/20240410141719_add-protocol-versions-to-tables.up.sql rename to prover/crates/lib/prover_dal/migrations/20240410141719_add-protocol-versions-to-tables.up.sql diff --git a/prover/prover_dal/migrations/20240419102606_add_changes_for_recursion_tip.down.sql b/prover/crates/lib/prover_dal/migrations/20240419102606_add_changes_for_recursion_tip.down.sql similarity index 100% rename from prover/prover_dal/migrations/20240419102606_add_changes_for_recursion_tip.down.sql rename to prover/crates/lib/prover_dal/migrations/20240419102606_add_changes_for_recursion_tip.down.sql diff --git a/prover/prover_dal/migrations/20240419102606_add_changes_for_recursion_tip.up.sql b/prover/crates/lib/prover_dal/migrations/20240419102606_add_changes_for_recursion_tip.up.sql similarity index 100% rename from prover/prover_dal/migrations/20240419102606_add_changes_for_recursion_tip.up.sql rename to prover/crates/lib/prover_dal/migrations/20240419102606_add_changes_for_recursion_tip.up.sql diff --git a/prover/prover_dal/migrations/20240524123522_add-patch-columns-for-semver.down.sql b/prover/crates/lib/prover_dal/migrations/20240524123522_add-patch-columns-for-semver.down.sql similarity index 100% rename from prover/prover_dal/migrations/20240524123522_add-patch-columns-for-semver.down.sql rename to prover/crates/lib/prover_dal/migrations/20240524123522_add-patch-columns-for-semver.down.sql diff --git a/prover/prover_dal/migrations/20240524123522_add-patch-columns-for-semver.up.sql b/prover/crates/lib/prover_dal/migrations/20240524123522_add-patch-columns-for-semver.up.sql similarity index 100% rename from prover/prover_dal/migrations/20240524123522_add-patch-columns-for-semver.up.sql rename to prover/crates/lib/prover_dal/migrations/20240524123522_add-patch-columns-for-semver.up.sql diff --git a/prover/prover_dal/migrations/20240613111518_make_number_of_final_node_jobs_mandatory.down.sql b/prover/crates/lib/prover_dal/migrations/20240613111518_make_number_of_final_node_jobs_mandatory.down.sql similarity index 100% rename from prover/prover_dal/migrations/20240613111518_make_number_of_final_node_jobs_mandatory.down.sql rename to prover/crates/lib/prover_dal/migrations/20240613111518_make_number_of_final_node_jobs_mandatory.down.sql diff --git a/prover/prover_dal/migrations/20240613111518_make_number_of_final_node_jobs_mandatory.up.sql b/prover/crates/lib/prover_dal/migrations/20240613111518_make_number_of_final_node_jobs_mandatory.up.sql similarity index 100% rename from prover/prover_dal/migrations/20240613111518_make_number_of_final_node_jobs_mandatory.up.sql rename to prover/crates/lib/prover_dal/migrations/20240613111518_make_number_of_final_node_jobs_mandatory.up.sql diff --git a/prover/prover_dal/migrations/20240703113903_add-vm_run_data-column.down.sql b/prover/crates/lib/prover_dal/migrations/20240703113903_add-vm_run_data-column.down.sql similarity index 100% rename from prover/prover_dal/migrations/20240703113903_add-vm_run_data-column.down.sql rename to prover/crates/lib/prover_dal/migrations/20240703113903_add-vm_run_data-column.down.sql diff --git a/prover/prover_dal/migrations/20240703113903_add-vm_run_data-column.up.sql b/prover/crates/lib/prover_dal/migrations/20240703113903_add-vm_run_data-column.up.sql similarity index 100% rename from prover/prover_dal/migrations/20240703113903_add-vm_run_data-column.up.sql rename to prover/crates/lib/prover_dal/migrations/20240703113903_add-vm_run_data-column.up.sql diff --git a/prover/prover_dal/src/fri_gpu_prover_queue_dal.rs b/prover/crates/lib/prover_dal/src/fri_gpu_prover_queue_dal.rs similarity index 100% rename from prover/prover_dal/src/fri_gpu_prover_queue_dal.rs rename to prover/crates/lib/prover_dal/src/fri_gpu_prover_queue_dal.rs diff --git a/prover/prover_dal/src/fri_proof_compressor_dal.rs b/prover/crates/lib/prover_dal/src/fri_proof_compressor_dal.rs similarity index 100% rename from prover/prover_dal/src/fri_proof_compressor_dal.rs rename to prover/crates/lib/prover_dal/src/fri_proof_compressor_dal.rs diff --git a/prover/prover_dal/src/fri_protocol_versions_dal.rs b/prover/crates/lib/prover_dal/src/fri_protocol_versions_dal.rs similarity index 100% rename from prover/prover_dal/src/fri_protocol_versions_dal.rs rename to prover/crates/lib/prover_dal/src/fri_protocol_versions_dal.rs diff --git a/prover/prover_dal/src/fri_prover_dal.rs b/prover/crates/lib/prover_dal/src/fri_prover_dal.rs similarity index 100% rename from prover/prover_dal/src/fri_prover_dal.rs rename to prover/crates/lib/prover_dal/src/fri_prover_dal.rs diff --git a/prover/prover_dal/src/fri_witness_generator_dal.rs b/prover/crates/lib/prover_dal/src/fri_witness_generator_dal.rs similarity index 100% rename from prover/prover_dal/src/fri_witness_generator_dal.rs rename to prover/crates/lib/prover_dal/src/fri_witness_generator_dal.rs diff --git a/prover/prover_dal/src/lib.rs b/prover/crates/lib/prover_dal/src/lib.rs similarity index 100% rename from prover/prover_dal/src/lib.rs rename to prover/crates/lib/prover_dal/src/lib.rs diff --git a/prover/prover_fri_types/Cargo.toml b/prover/crates/lib/prover_fri_types/Cargo.toml similarity index 100% rename from prover/prover_fri_types/Cargo.toml rename to prover/crates/lib/prover_fri_types/Cargo.toml diff --git a/prover/prover_fri_types/README.md b/prover/crates/lib/prover_fri_types/README.md similarity index 100% rename from prover/prover_fri_types/README.md rename to prover/crates/lib/prover_fri_types/README.md diff --git a/prover/prover_fri_types/src/keys.rs b/prover/crates/lib/prover_fri_types/src/keys.rs similarity index 100% rename from prover/prover_fri_types/src/keys.rs rename to prover/crates/lib/prover_fri_types/src/keys.rs diff --git a/prover/prover_fri_types/src/lib.rs b/prover/crates/lib/prover_fri_types/src/lib.rs similarity index 100% rename from prover/prover_fri_types/src/lib.rs rename to prover/crates/lib/prover_fri_types/src/lib.rs diff --git a/prover/prover_fri_types/src/queue.rs b/prover/crates/lib/prover_fri_types/src/queue.rs similarity index 100% rename from prover/prover_fri_types/src/queue.rs rename to prover/crates/lib/prover_fri_types/src/queue.rs diff --git a/prover/prover_fri_utils/Cargo.toml b/prover/crates/lib/prover_fri_utils/Cargo.toml similarity index 100% rename from prover/prover_fri_utils/Cargo.toml rename to prover/crates/lib/prover_fri_utils/Cargo.toml diff --git a/prover/prover_fri_utils/src/lib.rs b/prover/crates/lib/prover_fri_utils/src/lib.rs similarity index 100% rename from prover/prover_fri_utils/src/lib.rs rename to prover/crates/lib/prover_fri_utils/src/lib.rs diff --git a/prover/prover_fri_utils/src/metrics.rs b/prover/crates/lib/prover_fri_utils/src/metrics.rs similarity index 100% rename from prover/prover_fri_utils/src/metrics.rs rename to prover/crates/lib/prover_fri_utils/src/metrics.rs diff --git a/prover/prover_fri_utils/src/region_fetcher.rs b/prover/crates/lib/prover_fri_utils/src/region_fetcher.rs similarity index 100% rename from prover/prover_fri_utils/src/region_fetcher.rs rename to prover/crates/lib/prover_fri_utils/src/region_fetcher.rs diff --git a/prover/prover_fri_utils/src/socket_utils.rs b/prover/crates/lib/prover_fri_utils/src/socket_utils.rs similarity index 100% rename from prover/prover_fri_utils/src/socket_utils.rs rename to prover/crates/lib/prover_fri_utils/src/socket_utils.rs diff --git a/zk_toolbox/crates/zk_inception/src/consts.rs b/zk_toolbox/crates/zk_inception/src/consts.rs index e0258fb46402..d9b61d49185a 100644 --- a/zk_toolbox/crates/zk_inception/src/consts.rs +++ b/zk_toolbox/crates/zk_inception/src/consts.rs @@ -2,7 +2,7 @@ pub const AMOUNT_FOR_DISTRIBUTION_TO_WALLETS: u128 = 1000000000000000000000; pub const MINIMUM_BALANCE_FOR_WALLET: u128 = 5000000000000000000; pub const SERVER_MIGRATIONS: &str = "core/lib/dal/migrations"; -pub const PROVER_MIGRATIONS: &str = "prover/prover_dal/migrations"; +pub const PROVER_MIGRATIONS: &str = "prover/crates/lib/prover_dal/migrations"; pub const PROVER_STORE_MAX_RETRIES: u16 = 10; pub const DEFAULT_CREDENTIALS_FILE: &str = "~/.config/gcloud/application_default_credentials.json"; pub const DEFAULT_PROOF_STORE_DIR: &str = "artifacts"; diff --git a/zk_toolbox/crates/zk_supervisor/src/dals.rs b/zk_toolbox/crates/zk_supervisor/src/dals.rs index ae8815c96899..854a6b979494 100644 --- a/zk_toolbox/crates/zk_supervisor/src/dals.rs +++ b/zk_toolbox/crates/zk_supervisor/src/dals.rs @@ -7,7 +7,7 @@ use xshell::Shell; use crate::messages::{MSG_CHAIN_NOT_FOUND_ERR, MSG_PROVER_URL_MUST_BE_PRESENTED}; const CORE_DAL_PATH: &str = "core/lib/dal"; -const PROVER_DAL_PATH: &str = "prover/prover_dal"; +const PROVER_DAL_PATH: &str = "prover/crates/lib/prover_dal"; #[derive(Debug, Clone)] pub struct SelectedDals { From 761bda19844fb3935f8a57c47df39010f88ef9dc Mon Sep 17 00:00:00 2001 From: Agustin Aon <21188659+aon@users.noreply.github.com> Date: Mon, 22 Jul 2024 10:25:16 -0300 Subject: [PATCH 09/52] feat: add state override for gas estimates (#1358) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ - Adds state override for gas estimates ## Why ❔ - Solves #947 - Feature parity with geth https://github.com/ethereum/go-ethereum/issues/27800 ## Checklist - [x] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [x] Tests for the changes have been added / updated. - [x] Documentation comments have been added / updated. - [x] Code has been formatted via `zk fmt` and `zk lint`. - [x] Spellcheck has been run via `zk spellcheck`. - [x] Linkcheck has been run via `zk linkcheck`. --------- Co-authored-by: Juan Rigada <62958725+Jrigada@users.noreply.github.com> Co-authored-by: Jrigada Co-authored-by: Danil --- core/lib/state/src/lib.rs | 9 + core/lib/state/src/storage_overrides.rs | 150 +++++++++++ core/lib/state/src/storage_view.rs | 10 +- core/lib/types/src/api/mod.rs | 1 + core/lib/types/src/api/state_override.rs | 70 +++++ core/lib/types/src/transaction_request.rs | 4 +- core/lib/vm_utils/src/lib.rs | 9 +- core/lib/web3_decl/src/namespaces/eth.rs | 19 +- core/lib/web3_decl/src/namespaces/zks.rs | 16 +- .../api_server/src/execution_sandbox/apply.rs | 35 ++- .../src/execution_sandbox/execute.rs | 5 + .../api_server/src/execution_sandbox/tests.rs | 1 + .../src/execution_sandbox/validate.rs | 1 + core/node/api_server/src/tx_sender/mod.rs | 40 ++- .../web3/backend_jsonrpsee/namespaces/eth.rs | 22 +- .../web3/backend_jsonrpsee/namespaces/zks.rs | 21 +- .../api_server/src/web3/namespaces/debug.rs | 1 + .../api_server/src/web3/namespaces/eth.rs | 15 +- .../api_server/src/web3/namespaces/zks.rs | 28 +- core/node/api_server/src/web3/tests/vm.rs | 134 +++++++++- .../src/sdk/operations/deploy_contract.rs | 2 +- .../src/sdk/operations/execute_contract.rs | 2 +- .../loadnext/src/sdk/operations/transfer.rs | 2 +- core/tests/loadnext/src/sdk/wallet.rs | 2 +- .../state-override/StateOverrideTest.sol | 28 ++ .../ts-integration/tests/api/web3.test.ts | 251 +++++++++++++++++- 26 files changed, 803 insertions(+), 75 deletions(-) create mode 100644 core/lib/state/src/storage_overrides.rs create mode 100644 core/lib/types/src/api/state_override.rs create mode 100644 core/tests/ts-integration/contracts/state-override/StateOverrideTest.sol diff --git a/core/lib/state/src/lib.rs b/core/lib/state/src/lib.rs index 66577841fd45..74c60e4a3695 100644 --- a/core/lib/state/src/lib.rs +++ b/core/lib/state/src/lib.rs @@ -12,6 +12,7 @@ use std::{cell::RefCell, collections::HashMap, fmt, rc::Rc}; use zksync_types::{ + api::state_override::StateOverride, get_known_code_key, storage::{StorageKey, StorageValue}, H256, @@ -29,6 +30,7 @@ pub use self::{ }, shadow_storage::ShadowStorage, storage_factory::{BatchDiff, PgOrRocksdbStorage, ReadStorageFactory, RocksdbWithMemory}, + storage_overrides::StorageOverrides, storage_view::{StorageView, StorageViewCache, StorageViewMetrics}, witness::WitnessStorage, }; @@ -40,6 +42,7 @@ mod postgres; mod rocksdb; mod shadow_storage; mod storage_factory; +mod storage_overrides; mod storage_view; #[cfg(test)] mod test_utils; @@ -89,3 +92,9 @@ pub trait WriteStorage: ReadStorage { /// Smart pointer to [`WriteStorage`]. pub type StoragePtr = Rc>; + +/// Functionality to override the storage state. +pub trait OverrideStorage { + /// Apply state override to the storage. + fn apply_state_override(&mut self, overrides: &StateOverride); +} diff --git a/core/lib/state/src/storage_overrides.rs b/core/lib/state/src/storage_overrides.rs new file mode 100644 index 000000000000..f45dd6d3382f --- /dev/null +++ b/core/lib/state/src/storage_overrides.rs @@ -0,0 +1,150 @@ +use std::{cell::RefCell, collections::HashMap, fmt, rc::Rc}; + +use zksync_types::{ + api::state_override::{OverrideState, StateOverride}, + get_code_key, get_nonce_key, + utils::{decompose_full_nonce, nonces_to_full_nonce, storage_key_for_eth_balance}, + AccountTreeId, StorageKey, StorageValue, H256, U256, +}; +use zksync_utils::{bytecode::hash_bytecode, h256_to_u256, u256_to_h256}; + +use crate::{OverrideStorage, ReadStorage}; + +/// A storage view that allows to override some of the storage values. +#[derive(Debug)] +pub struct StorageOverrides { + storage_handle: S, + overridden_factory_deps: HashMap>, + overridden_account_state: HashMap>, + overridden_account_state_diff: HashMap>, + overridden_balance: HashMap, + overridden_nonce: HashMap, + overridden_code: HashMap, +} + +impl StorageOverrides { + /// Creates a new storage view based on the underlying storage. + pub fn new(storage: S) -> Self { + Self { + storage_handle: storage, + overridden_factory_deps: HashMap::new(), + overridden_account_state: HashMap::new(), + overridden_account_state_diff: HashMap::new(), + overridden_balance: HashMap::new(), + overridden_nonce: HashMap::new(), + overridden_code: HashMap::new(), + } + } + + /// Overrides a factory dependency code. + pub fn store_factory_dep(&mut self, hash: H256, code: Vec) { + self.overridden_factory_deps.insert(hash, code); + } + + /// Overrides an account entire state. + pub fn override_account_state(&mut self, account: AccountTreeId, state: HashMap) { + self.overridden_account_state.insert(account, state); + } + + /// Overrides an account state diff. + pub fn override_account_state_diff( + &mut self, + account: AccountTreeId, + state_diff: HashMap, + ) { + self.overridden_account_state_diff + .insert(account, state_diff); + } + + /// Make a Rc RefCell ptr to the storage + pub fn to_rc_ptr(self) -> Rc> { + Rc::new(RefCell::new(self)) + } +} + +impl ReadStorage for StorageOverrides { + fn read_value(&mut self, key: &StorageKey) -> StorageValue { + if let Some(balance) = self.overridden_balance.get(key) { + return u256_to_h256(*balance); + } + if let Some(code) = self.overridden_code.get(key) { + return *code; + } + + if let Some(nonce) = self.overridden_nonce.get(key) { + return u256_to_h256(*nonce); + } + + if let Some(account_state) = self.overridden_account_state.get(key.account()) { + if let Some(value) = account_state.get(key.key()) { + return *value; + } + return H256::zero(); + } + + if let Some(account_state_diff) = self.overridden_account_state_diff.get(key.account()) { + if let Some(value) = account_state_diff.get(key.key()) { + return *value; + } + } + + self.storage_handle.read_value(key) + } + + fn is_write_initial(&mut self, key: &StorageKey) -> bool { + self.storage_handle.is_write_initial(key) + } + + fn load_factory_dep(&mut self, hash: H256) -> Option> { + self.overridden_factory_deps + .get(&hash) + .cloned() + .or_else(|| self.storage_handle.load_factory_dep(hash)) + } + + fn get_enumeration_index(&mut self, key: &StorageKey) -> Option { + self.storage_handle.get_enumeration_index(key) + } +} + +impl OverrideStorage for StorageOverrides { + fn apply_state_override(&mut self, state_override: &StateOverride) { + for (account, overrides) in state_override.iter() { + if let Some(balance) = overrides.balance { + let balance_key = storage_key_for_eth_balance(account); + self.overridden_balance.insert(balance_key, balance); + } + + if let Some(nonce) = overrides.nonce { + let nonce_key = get_nonce_key(account); + let full_nonce = self.read_value(&nonce_key); + let (_, deployment_nonce) = decompose_full_nonce(h256_to_u256(full_nonce)); + let new_full_nonce = nonces_to_full_nonce(nonce, deployment_nonce); + self.overridden_nonce.insert(nonce_key, new_full_nonce); + } + + if let Some(code) = &overrides.code { + let code_key = get_code_key(account); + let code_hash = hash_bytecode(&code.0); + self.overridden_code.insert(code_key, code_hash); + self.store_factory_dep(code_hash, code.0.clone()); + } + + match &overrides.state { + Some(OverrideState::State(state)) => { + self.override_account_state(AccountTreeId::new(*account), state.clone()); + } + Some(OverrideState::StateDiff(state_diff)) => { + for (key, value) in state_diff { + let account_state = self + .overridden_account_state_diff + .entry(AccountTreeId::new(*account)) + .or_default(); + account_state.insert(*key, *value); + } + } + None => {} + } + } + } +} diff --git a/core/lib/state/src/storage_view.rs b/core/lib/state/src/storage_view.rs index 7dcfda2ba406..4d79298101f4 100644 --- a/core/lib/state/src/storage_view.rs +++ b/core/lib/state/src/storage_view.rs @@ -6,9 +6,9 @@ use std::{ time::{Duration, Instant}, }; -use zksync_types::{StorageKey, StorageValue, H256}; +use zksync_types::{api::state_override::StateOverride, StorageKey, StorageValue, H256}; -use crate::{ReadStorage, WriteStorage}; +use crate::{OverrideStorage, ReadStorage, WriteStorage}; /// Metrics for [`StorageView`]. #[derive(Debug, Default, Clone, Copy)] @@ -224,6 +224,12 @@ impl WriteStorage for StorageView { } } +impl OverrideStorage for StorageView { + fn apply_state_override(&mut self, state_override: &StateOverride) { + self.storage_handle.apply_state_override(state_override); + } +} + #[cfg(test)] mod test { use zksync_types::{AccountTreeId, Address, H256}; diff --git a/core/lib/types/src/api/mod.rs b/core/lib/types/src/api/mod.rs index a0039ba05672..751de9bd7040 100644 --- a/core/lib/types/src/api/mod.rs +++ b/core/lib/types/src/api/mod.rs @@ -18,6 +18,7 @@ use crate::{ }; pub mod en; +pub mod state_override; /// Block Number #[derive(Copy, Clone, Debug, PartialEq, Display)] diff --git a/core/lib/types/src/api/state_override.rs b/core/lib/types/src/api/state_override.rs new file mode 100644 index 000000000000..5c2395ae4bf2 --- /dev/null +++ b/core/lib/types/src/api/state_override.rs @@ -0,0 +1,70 @@ +use std::{collections::HashMap, ops::Deref}; + +use serde::{Deserialize, Deserializer, Serialize}; +use zksync_basic_types::{web3::Bytes, H256, U256}; + +use crate::Address; + +/// Collection of overridden accounts +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct StateOverride(HashMap); + +/// Account override for `eth_estimateGas`. +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct OverrideAccount { + pub balance: Option, + pub nonce: Option, + pub code: Option, + #[serde(flatten, deserialize_with = "state_deserializer")] + pub state: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub enum OverrideState { + State(HashMap), + StateDiff(HashMap), +} + +fn state_deserializer<'de, D>(deserializer: D) -> Result, D::Error> +where + D: Deserializer<'de>, +{ + let val = serde_json::Value::deserialize(deserializer)?; + let state: Option> = match val.get("state") { + Some(val) => serde_json::from_value(val.clone()).map_err(serde::de::Error::custom)?, + None => None, + }; + let state_diff: Option> = match val.get("stateDiff") { + Some(val) => serde_json::from_value(val.clone()).map_err(serde::de::Error::custom)?, + None => None, + }; + + match (state, state_diff) { + (Some(state), None) => Ok(Some(OverrideState::State(state))), + (None, Some(state_diff)) => Ok(Some(OverrideState::StateDiff(state_diff))), + (None, None) => Ok(None), + _ => Err(serde::de::Error::custom( + "Both 'state' and 'stateDiff' cannot be set simultaneously", + )), + } +} + +impl StateOverride { + pub fn new(state: HashMap) -> Self { + Self(state) + } + + pub fn get(&self, address: &Address) -> Option<&OverrideAccount> { + self.0.get(address) + } +} + +impl Deref for StateOverride { + type Target = HashMap; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} diff --git a/core/lib/types/src/transaction_request.rs b/core/lib/types/src/transaction_request.rs index a59b21409cd1..887dfcbff378 100644 --- a/core/lib/types/src/transaction_request.rs +++ b/core/lib/types/src/transaction_request.rs @@ -400,7 +400,9 @@ impl TransactionRequest { } // returns packed eth signature if it is present - fn get_packed_signature(&self) -> Result { + pub fn get_packed_signature( + &self, + ) -> Result { let packed_v = self .v .ok_or(SerializationTransactionError::IncompleteSignature)? diff --git a/core/lib/vm_utils/src/lib.rs b/core/lib/vm_utils/src/lib.rs index 9cec0e13be8b..b970d1a8c6b3 100644 --- a/core/lib/vm_utils/src/lib.rs +++ b/core/lib/vm_utils/src/lib.rs @@ -8,14 +8,14 @@ use zksync_multivm::{ vm_latest::HistoryEnabled, VmInstance, }; -use zksync_state::{PostgresStorage, StoragePtr, StorageView, WriteStorage}; +use zksync_state::{PostgresStorage, StorageOverrides, StoragePtr, StorageView, WriteStorage}; use zksync_types::{L1BatchNumber, L2ChainId, Transaction}; use crate::storage::L1BatchParamsProvider; pub type VmAndStorage<'a> = ( - VmInstance>, HistoryEnabled>, - StoragePtr>>, + VmInstance>>, HistoryEnabled>, + StoragePtr>>>, ); pub fn create_vm( @@ -52,7 +52,8 @@ pub fn create_vm( let storage_l2_block_number = first_l2_block_in_batch.number() - 1; let pg_storage = PostgresStorage::new(rt_handle.clone(), connection, storage_l2_block_number, true); - let storage_view = StorageView::new(pg_storage).to_rc_ptr(); + let storage_overrides = StorageOverrides::new(pg_storage); + let storage_view = StorageView::new(storage_overrides).to_rc_ptr(); let vm = VmInstance::new(l1_batch_env, system_env, storage_view.clone()); Ok((vm, storage_view)) diff --git a/core/lib/web3_decl/src/namespaces/eth.rs b/core/lib/web3_decl/src/namespaces/eth.rs index b0e311d339bc..10443443958b 100644 --- a/core/lib/web3_decl/src/namespaces/eth.rs +++ b/core/lib/web3_decl/src/namespaces/eth.rs @@ -2,7 +2,10 @@ use jsonrpsee::core::RpcResult; use jsonrpsee::proc_macros::rpc; use zksync_types::{ - api::{BlockId, BlockIdVariant, BlockNumber, Transaction, TransactionVariant}, + api::{ + state_override::StateOverride, BlockId, BlockIdVariant, BlockNumber, Transaction, + TransactionVariant, + }, transaction_request::CallRequest, Address, H256, }; @@ -31,10 +34,20 @@ pub trait EthNamespace { async fn chain_id(&self) -> RpcResult; #[method(name = "call")] - async fn call(&self, req: CallRequest, block: Option) -> RpcResult; + async fn call( + &self, + req: CallRequest, + block: Option, + state_override: Option, + ) -> RpcResult; #[method(name = "estimateGas")] - async fn estimate_gas(&self, req: CallRequest, _block: Option) -> RpcResult; + async fn estimate_gas( + &self, + req: CallRequest, + _block: Option, + state_override: Option, + ) -> RpcResult; #[method(name = "gasPrice")] async fn gas_price(&self) -> RpcResult; diff --git a/core/lib/web3_decl/src/namespaces/zks.rs b/core/lib/web3_decl/src/namespaces/zks.rs index b6861a9d2dd7..6f443dbded6a 100644 --- a/core/lib/web3_decl/src/namespaces/zks.rs +++ b/core/lib/web3_decl/src/namespaces/zks.rs @@ -5,8 +5,8 @@ use jsonrpsee::core::RpcResult; use jsonrpsee::proc_macros::rpc; use zksync_types::{ api::{ - BlockDetails, BridgeAddresses, L1BatchDetails, L2ToL1LogProof, Proof, ProtocolVersion, - TransactionDetailedResult, TransactionDetails, + state_override::StateOverride, BlockDetails, BridgeAddresses, L1BatchDetails, + L2ToL1LogProof, Proof, ProtocolVersion, TransactionDetailedResult, TransactionDetails, }, fee::Fee, fee_model::{FeeParams, PubdataIndependentBatchFeeModelInput}, @@ -29,10 +29,18 @@ use crate::{ )] pub trait ZksNamespace { #[method(name = "estimateFee")] - async fn estimate_fee(&self, req: CallRequest) -> RpcResult; + async fn estimate_fee( + &self, + req: CallRequest, + state_override: Option, + ) -> RpcResult; #[method(name = "estimateGasL1ToL2")] - async fn estimate_gas_l1_to_l2(&self, req: CallRequest) -> RpcResult; + async fn estimate_gas_l1_to_l2( + &self, + req: CallRequest, + state_override: Option, + ) -> RpcResult; #[method(name = "getBridgehubContract")] async fn get_bridgehub_contract(&self) -> RpcResult>; diff --git a/core/node/api_server/src/execution_sandbox/apply.rs b/core/node/api_server/src/execution_sandbox/apply.rs index 0d607311a445..c30e5bc36c86 100644 --- a/core/node/api_server/src/execution_sandbox/apply.rs +++ b/core/node/api_server/src/execution_sandbox/apply.rs @@ -17,13 +17,16 @@ use zksync_multivm::{ vm_latest::{constants::BATCH_COMPUTATIONAL_GAS_LIMIT, HistoryDisabled}, VmInstance, }; -use zksync_state::{PostgresStorage, ReadStorage, StoragePtr, StorageView, WriteStorage}; +use zksync_state::{ + OverrideStorage, PostgresStorage, ReadStorage, StorageOverrides, StoragePtr, StorageView, + WriteStorage, +}; use zksync_system_constants::{ SYSTEM_CONTEXT_ADDRESS, SYSTEM_CONTEXT_CURRENT_L2_BLOCK_INFO_POSITION, SYSTEM_CONTEXT_CURRENT_TX_ROLLING_HASH_POSITION, ZKPORTER_IS_AVAILABLE, }; use zksync_types::{ - api, + api::{self, state_override::StateOverride}, block::{pack_block_info, unpack_block_info, L2BlockHasher}, fee_model::BatchFeeInput, get_nonce_key, @@ -38,7 +41,8 @@ use super::{ BlockArgs, TxExecutionArgs, TxSharedArgs, VmPermit, }; -type BoxedVm<'a> = Box>, HistoryDisabled>>; +type BoxedVm<'a> = + Box>>, HistoryDisabled>>; #[derive(Debug)] struct Sandbox<'a> { @@ -46,7 +50,7 @@ struct Sandbox<'a> { l1_batch_env: L1BatchEnv, execution_args: &'a TxExecutionArgs, l2_block_info_to_reset: Option, - storage_view: StorageView>, + storage_view: StorageView>>, } impl<'a> Sandbox<'a> { @@ -90,7 +94,9 @@ impl<'a> Sandbox<'a> { .context("cannot create `PostgresStorage`")? .with_caches(shared_args.caches.clone()); - let storage_view = StorageView::new(storage); + let storage_overrides = StorageOverrides::new(storage); + + let storage_view = StorageView::new(storage_overrides); let (system_env, l1_batch_env) = Self::prepare_env( shared_args, execution_args, @@ -259,7 +265,16 @@ impl<'a> Sandbox<'a> { mut self, tx: &Transaction, adjust_pubdata_price: bool, - ) -> (BoxedVm<'a>, StoragePtr>>) { + state_override: Option, + ) -> ( + BoxedVm<'a>, + StoragePtr>>>, + ) { + // Apply state override + if let Some(state_override) = state_override { + // Apply the state override + self.storage_view.apply_state_override(&state_override); + } self.setup_storage_view(tx); let protocol_version = self.system_env.version; if adjust_pubdata_price { @@ -294,9 +309,10 @@ pub(super) fn apply_vm_in_sandbox( execution_args: &TxExecutionArgs, connection_pool: &ConnectionPool, tx: Transaction, - block_args: BlockArgs, + block_args: BlockArgs, // Block arguments for the transaction. + state_override: Option, apply: impl FnOnce( - &mut VmInstance>, HistoryDisabled>, + &mut VmInstance>>, HistoryDisabled>, Transaction, ProtocolVersionId, ) -> T, @@ -321,7 +337,7 @@ pub(super) fn apply_vm_in_sandbox( block_args, ))?; let protocol_version = sandbox.system_env.version; - let (mut vm, storage_view) = sandbox.into_vm(&tx, adjust_pubdata_price); + let (mut vm, storage_view) = sandbox.into_vm(&tx, adjust_pubdata_price, state_override); SANDBOX_METRICS.sandbox[&SandboxStage::Initialization].observe(stage_started_at.elapsed()); span.exit(); @@ -331,6 +347,7 @@ pub(super) fn apply_vm_in_sandbox( tx.initiator_account(), tx.nonce().unwrap_or(Nonce(0)) ); + let execution_latency = SANDBOX_METRICS.sandbox[&SandboxStage::Execution].start(); let result = apply(&mut vm, tx, protocol_version); let vm_execution_took = execution_latency.observe(); diff --git a/core/node/api_server/src/execution_sandbox/execute.rs b/core/node/api_server/src/execution_sandbox/execute.rs index d15cf7a91439..f633b133ab00 100644 --- a/core/node/api_server/src/execution_sandbox/execute.rs +++ b/core/node/api_server/src/execution_sandbox/execute.rs @@ -17,6 +17,7 @@ use super::{ apply, testonly::MockTransactionExecutor, vm_metrics, ApiTracer, BlockArgs, TxSharedArgs, VmPermit, }; +use crate::execution_sandbox::api::state_override::StateOverride; #[derive(Debug)] pub(crate) struct TxExecutionArgs { @@ -111,6 +112,7 @@ impl TransactionExecutor { connection_pool: ConnectionPool, tx: Transaction, block_args: BlockArgs, + state_override: Option, custom_tracers: Vec, ) -> anyhow::Result { if let Self::Mock(mock_executor) = self { @@ -129,6 +131,7 @@ impl TransactionExecutor { &connection_pool, tx, block_args, + state_override, |vm, tx, _| { let storage_invocation_tracer = StorageInvocations::new(execution_args.missed_storage_invocation_limit); @@ -170,6 +173,7 @@ impl TransactionExecutor { block_args: BlockArgs, vm_execution_cache_misses_limit: Option, custom_tracers: Vec, + state_override: Option, ) -> anyhow::Result { let execution_args = TxExecutionArgs::for_eth_call( call_overrides.enforced_base_fee, @@ -189,6 +193,7 @@ impl TransactionExecutor { connection_pool, tx.into(), block_args, + state_override, custom_tracers, ) .await?; diff --git a/core/node/api_server/src/execution_sandbox/tests.rs b/core/node/api_server/src/execution_sandbox/tests.rs index e479066cacc2..0a8af35597b3 100644 --- a/core/node/api_server/src/execution_sandbox/tests.rs +++ b/core/node/api_server/src/execution_sandbox/tests.rs @@ -195,6 +195,7 @@ async fn test_instantiating_vm(pool: ConnectionPool, block_args: BlockArgs &pool, transaction.clone(), block_args, + None, |_, received_tx, _| { assert_eq!(received_tx, transaction); }, diff --git a/core/node/api_server/src/execution_sandbox/validate.rs b/core/node/api_server/src/execution_sandbox/validate.rs index 958fbc8a0742..5e958cada66e 100644 --- a/core/node/api_server/src/execution_sandbox/validate.rs +++ b/core/node/api_server/src/execution_sandbox/validate.rs @@ -72,6 +72,7 @@ impl TransactionExecutor { &connection_pool, tx, block_args, + None, |vm, tx, protocol_version| { let stage_latency = SANDBOX_METRICS.sandbox[&SandboxStage::Validation].start(); let span = tracing::debug_span!("validation").entered(); diff --git a/core/node/api_server/src/tx_sender/mod.rs b/core/node/api_server/src/tx_sender/mod.rs index 50b0be541bf0..15f9271d6428 100644 --- a/core/node/api_server/src/tx_sender/mod.rs +++ b/core/node/api_server/src/tx_sender/mod.rs @@ -24,6 +24,7 @@ use zksync_state_keeper::{ SequencerSealer, }; use zksync_types::{ + api::state_override::StateOverride, fee::{Fee, TransactionExecutionMetrics}, fee_model::BatchFeeInput, get_code_key, get_intrinsic_constants, @@ -385,6 +386,7 @@ impl TxSender { self.0.replica_connection_pool.clone(), tx.clone().into(), block_args, + None, vec![], ) .await?; @@ -656,6 +658,7 @@ impl TxSender { block_args: BlockArgs, base_fee: u64, vm_version: VmVersion, + state_override: Option, ) -> anyhow::Result<(VmExecutionResultAndLogs, TransactionExecutionMetrics)> { let gas_limit_with_overhead = tx_gas_limit + derive_overhead( @@ -703,6 +706,7 @@ impl TxSender { self.0.replica_connection_pool.clone(), tx.clone(), block_args, + state_override, vec![], ) .await?; @@ -733,6 +737,7 @@ impl TxSender { mut tx: Transaction, estimated_fee_scale_factor: f64, acceptable_overestimation: u64, + state_override: Option, ) -> Result { let estimation_started_at = Instant::now(); @@ -786,17 +791,25 @@ impl TxSender { ) })?; - if !tx.is_l1() - && account_code_hash == H256::zero() - && tx.execute.value > self.get_balance(&tx.initiator_account()).await? - { - tracing::info!( - "fee estimation failed on validation step. - account: {} does not have enough funds for for transferring tx.value: {}.", - &tx.initiator_account(), - tx.execute.value - ); - return Err(SubmitTxError::InsufficientFundsForTransfer); + if !tx.is_l1() && account_code_hash == H256::zero() { + let balance = match state_override + .as_ref() + .and_then(|overrides| overrides.get(&tx.initiator_account())) + .and_then(|account| account.balance) + { + Some(balance) => balance.to_owned(), + None => self.get_balance(&tx.initiator_account()).await?, + }; + + if tx.execute.value > balance { + tracing::info!( + "fee estimation failed on validation step. + account: {} does not have enough funds for for transferring tx.value: {}.", + &tx.initiator_account(), + tx.execute.value + ); + return Err(SubmitTxError::InsufficientFundsForTransfer); + } } // For L2 transactions we need a properly formatted signature @@ -836,6 +849,7 @@ impl TxSender { block_args, base_fee, protocol_version.into(), + state_override.clone(), ) .await .context("estimate_gas step failed")?; @@ -871,6 +885,7 @@ impl TxSender { block_args, base_fee, protocol_version.into(), + state_override.clone(), ) .await .context("estimate_gas step failed")?; @@ -903,6 +918,7 @@ impl TxSender { block_args, base_fee, protocol_version.into(), + state_override, ) .await .context("final estimate_gas step failed")?; @@ -973,6 +989,7 @@ impl TxSender { block_args: BlockArgs, call_overrides: CallOverrides, tx: L2Tx, + state_override: Option, ) -> Result, SubmitTxError> { let vm_permit = self.0.vm_concurrency_limiter.acquire().await; let vm_permit = vm_permit.ok_or(SubmitTxError::ServerShuttingDown)?; @@ -989,6 +1006,7 @@ impl TxSender { block_args, vm_execution_cache_misses_limit, vec![], + state_override, ) .await? .into_api_call_result() diff --git a/core/node/api_server/src/web3/backend_jsonrpsee/namespaces/eth.rs b/core/node/api_server/src/web3/backend_jsonrpsee/namespaces/eth.rs index c4a16b132421..ff8ce0356a05 100644 --- a/core/node/api_server/src/web3/backend_jsonrpsee/namespaces/eth.rs +++ b/core/node/api_server/src/web3/backend_jsonrpsee/namespaces/eth.rs @@ -1,7 +1,7 @@ use zksync_types::{ api::{ - Block, BlockId, BlockIdVariant, BlockNumber, Log, Transaction, TransactionId, - TransactionReceipt, TransactionVariant, + state_override::StateOverride, Block, BlockId, BlockIdVariant, BlockNumber, Log, + Transaction, TransactionId, TransactionReceipt, TransactionVariant, }, transaction_request::CallRequest, web3::{Bytes, FeeHistory, Index, SyncState}, @@ -27,14 +27,24 @@ impl EthNamespaceServer for EthNamespace { Ok(self.chain_id_impl()) } - async fn call(&self, req: CallRequest, block: Option) -> RpcResult { - self.call_impl(req, block.map(Into::into)) + async fn call( + &self, + req: CallRequest, + block: Option, + state_override: Option, + ) -> RpcResult { + self.call_impl(req, block.map(Into::into), state_override) .await .map_err(|err| self.current_method().map_err(err)) } - async fn estimate_gas(&self, req: CallRequest, block: Option) -> RpcResult { - self.estimate_gas_impl(req, block) + async fn estimate_gas( + &self, + req: CallRequest, + block: Option, + state_override: Option, + ) -> RpcResult { + self.estimate_gas_impl(req, block, state_override) .await .map_err(|err| self.current_method().map_err(err)) } diff --git a/core/node/api_server/src/web3/backend_jsonrpsee/namespaces/zks.rs b/core/node/api_server/src/web3/backend_jsonrpsee/namespaces/zks.rs index 45cb312dde6e..16bbde13509f 100644 --- a/core/node/api_server/src/web3/backend_jsonrpsee/namespaces/zks.rs +++ b/core/node/api_server/src/web3/backend_jsonrpsee/namespaces/zks.rs @@ -3,8 +3,9 @@ use std::collections::HashMap; use itertools::Itertools; use zksync_types::{ api::{ - ApiStorageLog, BlockDetails, BridgeAddresses, L1BatchDetails, L2ToL1LogProof, Log, Proof, - ProtocolVersion, TransactionDetailedResult, TransactionDetails, + state_override::StateOverride, ApiStorageLog, BlockDetails, BridgeAddresses, + L1BatchDetails, L2ToL1LogProof, Log, Proof, ProtocolVersion, TransactionDetailedResult, + TransactionDetails, }, fee::Fee, fee_model::{FeeParams, PubdataIndependentBatchFeeModelInput}, @@ -22,14 +23,22 @@ use crate::web3::ZksNamespace; #[async_trait] impl ZksNamespaceServer for ZksNamespace { - async fn estimate_fee(&self, req: CallRequest) -> RpcResult { - self.estimate_fee_impl(req) + async fn estimate_fee( + &self, + req: CallRequest, + state_override: Option, + ) -> RpcResult { + self.estimate_fee_impl(req, state_override) .await .map_err(|err| self.current_method().map_err(err)) } - async fn estimate_gas_l1_to_l2(&self, req: CallRequest) -> RpcResult { - self.estimate_l1_to_l2_gas_impl(req) + async fn estimate_gas_l1_to_l2( + &self, + req: CallRequest, + state_override: Option, + ) -> RpcResult { + self.estimate_l1_to_l2_gas_impl(req, state_override) .await .map_err(|err| self.current_method().map_err(err)) } diff --git a/core/node/api_server/src/web3/namespaces/debug.rs b/core/node/api_server/src/web3/namespaces/debug.rs index a2e6e2782ac5..2f2d1d44cba1 100644 --- a/core/node/api_server/src/web3/namespaces/debug.rs +++ b/core/node/api_server/src/web3/namespaces/debug.rs @@ -197,6 +197,7 @@ impl DebugNamespace { block_args, self.sender_config().vm_execution_cache_misses_limit, custom_tracers, + None, ) .await?; diff --git a/core/node/api_server/src/web3/namespaces/eth.rs b/core/node/api_server/src/web3/namespaces/eth.rs index 7b4710d1cd4a..68030763fd60 100644 --- a/core/node/api_server/src/web3/namespaces/eth.rs +++ b/core/node/api_server/src/web3/namespaces/eth.rs @@ -3,8 +3,8 @@ use zksync_dal::{CoreDal, DalError}; use zksync_system_constants::DEFAULT_L2_TX_GAS_PER_PUBDATA_BYTE; use zksync_types::{ api::{ - BlockId, BlockNumber, GetLogsFilter, Transaction, TransactionId, TransactionReceipt, - TransactionVariant, + state_override::StateOverride, BlockId, BlockNumber, GetLogsFilter, Transaction, + TransactionId, TransactionReceipt, TransactionVariant, }, l2::{L2Tx, TransactionType}, transaction_request::CallRequest, @@ -55,6 +55,7 @@ impl EthNamespace { &self, mut request: CallRequest, block_id: Option, + state_override: Option, ) -> Result { let block_id = block_id.unwrap_or(BlockId::Number(BlockNumber::Pending)); self.current_method().set_block_id(block_id); @@ -88,7 +89,7 @@ impl EthNamespace { let call_result: Vec = self .state .tx_sender - .eth_call(block_args, call_overrides, tx) + .eth_call(block_args, call_overrides, tx, state_override) .await?; Ok(call_result.into()) } @@ -97,6 +98,7 @@ impl EthNamespace { &self, request: CallRequest, _block: Option, + state_override: Option, ) -> Result { let mut request_with_gas_per_pubdata_overridden = request; self.state @@ -138,7 +140,12 @@ impl EthNamespace { let fee = self .state .tx_sender - .get_txs_fee_in_wei(tx.into(), scale_factor, acceptable_overestimation as u64) + .get_txs_fee_in_wei( + tx.into(), + scale_factor, + acceptable_overestimation as u64, + state_override, + ) .await?; Ok(fee.gas_limit) } diff --git a/core/node/api_server/src/web3/namespaces/zks.rs b/core/node/api_server/src/web3/namespaces/zks.rs index 2b3fbbcd55ca..4f88eb17e231 100644 --- a/core/node/api_server/src/web3/namespaces/zks.rs +++ b/core/node/api_server/src/web3/namespaces/zks.rs @@ -8,8 +8,8 @@ use zksync_multivm::interface::VmExecutionResultAndLogs; use zksync_system_constants::DEFAULT_L2_TX_GAS_PER_PUBDATA_BYTE; use zksync_types::{ api::{ - BlockDetails, BridgeAddresses, GetLogsFilter, L1BatchDetails, L2ToL1LogProof, Proof, - ProtocolVersion, StorageProof, TransactionDetails, + state_override::StateOverride, BlockDetails, BridgeAddresses, GetLogsFilter, + L1BatchDetails, L2ToL1LogProof, Proof, ProtocolVersion, StorageProof, TransactionDetails, }, fee::Fee, fee_model::{FeeParams, PubdataIndependentBatchFeeModelInput}, @@ -48,7 +48,11 @@ impl ZksNamespace { &self.state.current_method } - pub async fn estimate_fee_impl(&self, request: CallRequest) -> Result { + pub async fn estimate_fee_impl( + &self, + request: CallRequest, + state_override: Option, + ) -> Result { let mut request_with_gas_per_pubdata_overridden = request; self.state .set_nonce_for_call_request(&mut request_with_gas_per_pubdata_overridden) @@ -67,12 +71,13 @@ impl ZksNamespace { // not consider provided ones. tx.common_data.fee.max_priority_fee_per_gas = 0u64.into(); tx.common_data.fee.gas_per_pubdata_limit = U256::from(DEFAULT_L2_TX_GAS_PER_PUBDATA_BYTE); - self.estimate_fee(tx.into()).await + self.estimate_fee(tx.into(), state_override).await } pub async fn estimate_l1_to_l2_gas_impl( &self, request: CallRequest, + state_override: Option, ) -> Result { let mut request_with_gas_per_pubdata_overridden = request; // When we're estimating fee, we are trying to deduce values related to fee, so we should @@ -87,11 +92,15 @@ impl ZksNamespace { .try_into() .map_err(Web3Error::SerializationError)?; - let fee = self.estimate_fee(tx.into()).await?; + let fee = self.estimate_fee(tx.into(), state_override).await?; Ok(fee.gas_limit) } - async fn estimate_fee(&self, tx: Transaction) -> Result { + async fn estimate_fee( + &self, + tx: Transaction, + state_override: Option, + ) -> Result { let scale_factor = self.state.api_config.estimate_gas_scale_factor; let acceptable_overestimation = self.state.api_config.estimate_gas_acceptable_overestimation; @@ -99,7 +108,12 @@ impl ZksNamespace { Ok(self .state .tx_sender - .get_txs_fee_in_wei(tx, scale_factor, acceptable_overestimation as u64) + .get_txs_fee_in_wei( + tx, + scale_factor, + acceptable_overestimation as u64, + state_override, + ) .await?) } diff --git a/core/node/api_server/src/web3/tests/vm.rs b/core/node/api_server/src/web3/tests/vm.rs index 1bce1b732b13..61c24bcf9001 100644 --- a/core/node/api_server/src/web3/tests/vm.rs +++ b/core/node/api_server/src/web3/tests/vm.rs @@ -2,6 +2,7 @@ use std::sync::atomic::{AtomicU32, Ordering}; +use api::state_override::{OverrideAccount, StateOverride}; use itertools::Itertools; use zksync_multivm::{ interface::{ExecutionResult, VmRevertReason}, @@ -63,7 +64,9 @@ impl HttpTest for CallTest { client: &DynClient, _pool: &ConnectionPool, ) -> anyhow::Result<()> { - let call_result = client.call(Self::call_request(b"pending"), None).await?; + let call_result = client + .call(Self::call_request(b"pending"), None, None) + .await?; assert_eq!(call_result.0, b"output"); let valid_block_numbers_and_calldata = [ @@ -74,7 +77,7 @@ impl HttpTest for CallTest { for (number, calldata) in valid_block_numbers_and_calldata { let number = api::BlockIdVariant::BlockNumber(number); let call_result = client - .call(Self::call_request(calldata), Some(number)) + .call(Self::call_request(calldata), Some(number), None) .await?; assert_eq!(call_result.0, b"output"); } @@ -82,7 +85,7 @@ impl HttpTest for CallTest { let invalid_block_number = api::BlockNumber::from(100); let number = api::BlockIdVariant::BlockNumber(invalid_block_number); let error = client - .call(Self::call_request(b"100"), Some(number)) + .call(Self::call_request(b"100"), Some(number), None) .await .unwrap_err(); if let ClientError::Call(error) = error { @@ -120,7 +123,7 @@ impl HttpTest for CallTestAfterSnapshotRecovery { _pool: &ConnectionPool, ) -> anyhow::Result<()> { let call_result = client - .call(CallTest::call_request(b"pending"), None) + .call(CallTest::call_request(b"pending"), None, None) .await?; assert_eq!(call_result.0, b"output"); let pending_block_number = api::BlockIdVariant::BlockNumber(api::BlockNumber::Pending); @@ -128,6 +131,7 @@ impl HttpTest for CallTestAfterSnapshotRecovery { .call( CallTest::call_request(b"pending"), Some(pending_block_number), + None, ) .await?; assert_eq!(call_result.0, b"output"); @@ -137,7 +141,7 @@ impl HttpTest for CallTestAfterSnapshotRecovery { for number in pruned_block_numbers { let number = api::BlockIdVariant::BlockNumber(number.into()); let error = client - .call(CallTest::call_request(b"pruned"), Some(number)) + .call(CallTest::call_request(b"pruned"), Some(number), None) .await .unwrap_err(); assert_pruned_block_error(&error, first_local_l2_block); @@ -147,7 +151,7 @@ impl HttpTest for CallTestAfterSnapshotRecovery { for number in first_l2_block_numbers { let number = api::BlockIdVariant::BlockNumber(number); let call_result = client - .call(CallTest::call_request(b"first"), Some(number)) + .call(CallTest::call_request(b"first"), Some(number), None) .await?; assert_eq!(call_result.0, b"output"); } @@ -499,7 +503,7 @@ impl HttpTest for TraceCallTestAfterSnapshotRecovery { for number in pruned_block_numbers { let number = api::BlockIdVariant::BlockNumber(number.into()); let error = client - .call(CallTest::call_request(b"pruned"), Some(number)) + .call(CallTest::call_request(b"pruned"), Some(number), None) .await .unwrap_err(); assert_pruned_block_error(&error, first_local_l2_block); @@ -579,7 +583,7 @@ impl HttpTest for EstimateGasTest { for threshold in [10_000, 50_000, 100_000, 1_000_000] { self.gas_limit_threshold.store(threshold, Ordering::Relaxed); let output = client - .estimate_gas(l2_transaction.clone().into(), None) + .estimate_gas(l2_transaction.clone().into(), None, None) .await?; assert!( output >= U256::from(threshold), @@ -604,10 +608,15 @@ impl HttpTest for EstimateGasTest { let mut call_request = CallRequest::from(l2_transaction); call_request.from = Some(SendRawTransactionTest::private_key().address()); call_request.value = Some(1_000_000.into()); - client.estimate_gas(call_request.clone(), None).await?; + client + .estimate_gas(call_request.clone(), None, None) + .await?; call_request.value = Some(U256::max_value()); - let error = client.estimate_gas(call_request, None).await.unwrap_err(); + let error = client + .estimate_gas(call_request, None, None) + .await + .unwrap_err(); if let ClientError::Call(error) = error { let error_msg = error.message(); assert!( @@ -630,3 +639,108 @@ async fn estimate_gas_basics() { async fn estimate_gas_after_snapshot_recovery() { test_http_server(EstimateGasTest::new(true)).await; } + +#[derive(Debug)] +struct EstimateGasWithStateOverrideTest { + gas_limit_threshold: Arc, + snapshot_recovery: bool, +} + +impl EstimateGasWithStateOverrideTest { + fn new(snapshot_recovery: bool) -> Self { + Self { + gas_limit_threshold: Arc::default(), + snapshot_recovery, + } + } +} + +#[async_trait] +impl HttpTest for EstimateGasWithStateOverrideTest { + fn storage_initialization(&self) -> StorageInitialization { + let snapshot_recovery = self.snapshot_recovery; + SendRawTransactionTest { snapshot_recovery }.storage_initialization() + } + + fn transaction_executor(&self) -> MockTransactionExecutor { + let mut tx_executor = MockTransactionExecutor::default(); + let pending_block_number = if self.snapshot_recovery { + StorageInitialization::SNAPSHOT_RECOVERY_BLOCK + 2 + } else { + L2BlockNumber(1) + }; + let gas_limit_threshold = self.gas_limit_threshold.clone(); + tx_executor.set_call_responses(move |tx, block_args| { + assert_eq!(tx.execute.calldata(), [] as [u8; 0]); + assert_eq!(tx.nonce(), Some(Nonce(0))); + assert_eq!(block_args.resolved_block_number(), pending_block_number); + + let gas_limit_threshold = gas_limit_threshold.load(Ordering::SeqCst); + if tx.gas_limit() >= U256::from(gas_limit_threshold) { + ExecutionResult::Success { output: vec![] } + } else { + ExecutionResult::Revert { + output: VmRevertReason::VmError, + } + } + }); + tx_executor + } + + async fn test( + &self, + client: &DynClient, + _pool: &ConnectionPool, + ) -> anyhow::Result<()> { + // Transaction with balance override + let l2_transaction = create_l2_transaction(10, 100); + let mut call_request = CallRequest::from(l2_transaction); + call_request.from = Some(Address::random()); + call_request.value = Some(1_000_000.into()); + + let mut state_override_map = HashMap::new(); + state_override_map.insert( + call_request.from.unwrap(), + OverrideAccount { + balance: Some(U256::max_value()), + nonce: None, + code: None, + state: None, + }, + ); + let state_override = StateOverride::new(state_override_map); + + client + .estimate_gas(call_request.clone(), None, Some(state_override)) + .await?; + + // Transaction that should fail without balance override + let l2_transaction = create_l2_transaction(10, 100); + let mut call_request = CallRequest::from(l2_transaction); + call_request.from = Some(Address::random()); + call_request.value = Some(1_000_000.into()); + + let error = client + .estimate_gas(call_request.clone(), None, None) + .await + .unwrap_err(); + + if let ClientError::Call(error) = error { + let error_msg = error.message(); + assert!( + error_msg + .to_lowercase() + .contains("insufficient balance for transfer"), + "{error_msg}" + ); + } else { + panic!("Unexpected error: {error:?}"); + } + Ok(()) + } +} + +#[tokio::test] +async fn estimate_gas_with_state_override() { + test_http_server(EstimateGasWithStateOverrideTest::new(false)).await; +} diff --git a/core/tests/loadnext/src/sdk/operations/deploy_contract.rs b/core/tests/loadnext/src/sdk/operations/deploy_contract.rs index af621249ed8b..3b4c7a5eb53f 100644 --- a/core/tests/loadnext/src/sdk/operations/deploy_contract.rs +++ b/core/tests/loadnext/src/sdk/operations/deploy_contract.rs @@ -155,7 +155,7 @@ where ); self.wallet .provider - .estimate_fee(l2_tx.into()) + .estimate_fee(l2_tx.into(), None) .await .map_err(Into::into) } diff --git a/core/tests/loadnext/src/sdk/operations/execute_contract.rs b/core/tests/loadnext/src/sdk/operations/execute_contract.rs index 18b93008a73a..d5fe57c7b79f 100644 --- a/core/tests/loadnext/src/sdk/operations/execute_contract.rs +++ b/core/tests/loadnext/src/sdk/operations/execute_contract.rs @@ -155,7 +155,7 @@ where ); self.wallet .provider - .estimate_fee(execute.into()) + .estimate_fee(execute.into(), None) .await .map_err(Into::into) } diff --git a/core/tests/loadnext/src/sdk/operations/transfer.rs b/core/tests/loadnext/src/sdk/operations/transfer.rs index 34bab615c7c5..94ee3aeb6082 100644 --- a/core/tests/loadnext/src/sdk/operations/transfer.rs +++ b/core/tests/loadnext/src/sdk/operations/transfer.rs @@ -181,7 +181,7 @@ where }; self.wallet .provider - .estimate_fee(l2_tx.into()) + .estimate_fee(l2_tx.into(), None) .await .map_err(Into::into) } diff --git a/core/tests/loadnext/src/sdk/wallet.rs b/core/tests/loadnext/src/sdk/wallet.rs index c46431f70f48..9d3bd73a9bf2 100644 --- a/core/tests/loadnext/src/sdk/wallet.rs +++ b/core/tests/loadnext/src/sdk/wallet.rs @@ -96,7 +96,7 @@ where }; let bytes = self .provider - .call(req, Some(BlockIdVariant::BlockNumber(block_number))) + .call(req, Some(BlockIdVariant::BlockNumber(block_number)), None) .await?; if bytes.0.len() == 32 { U256::from_big_endian(&bytes.0) diff --git a/core/tests/ts-integration/contracts/state-override/StateOverrideTest.sol b/core/tests/ts-integration/contracts/state-override/StateOverrideTest.sol new file mode 100644 index 000000000000..e8d02737cc15 --- /dev/null +++ b/core/tests/ts-integration/contracts/state-override/StateOverrideTest.sol @@ -0,0 +1,28 @@ +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.0; + +pragma solidity ^0.8.0; + +contract StateOverrideTest { + uint256 public someValue; + uint256 public anotherValue; + uint256 public initialValue = 100; + + function setValue(uint256 value) public { + someValue = value; + } + + function setAnotherValue(uint256 value) public { + anotherValue = value; + } + + function increment(uint256 value) public view returns (uint256) { + require(someValue > 0, "Initial state not set"); + return someValue + value; + } + + function sumValues() public view returns (uint256) { + require(someValue > 0 && anotherValue > 0, "Initial state not set"); + return someValue + anotherValue + initialValue; + } +} diff --git a/core/tests/ts-integration/tests/api/web3.test.ts b/core/tests/ts-integration/tests/api/web3.test.ts index 9b334488fcb8..e78ec452b2f5 100644 --- a/core/tests/ts-integration/tests/api/web3.test.ts +++ b/core/tests/ts-integration/tests/api/web3.test.ts @@ -19,7 +19,8 @@ const contracts = { counter: getTestContract('Counter'), events: getTestContract('Emitter'), outer: getTestContract('Outer'), - inner: getTestContract('Inner') + inner: getTestContract('Inner'), + stateOverride: getTestContract('StateOverrideTest') }; describe('web3 API compatibility tests', () => { @@ -679,13 +680,20 @@ describe('web3 API compatibility tests', () => { // There are around `0.5 * maxLogsLimit` logs in [tx1Receipt.blockNumber, tx1Receipt.blockNumber] range, // so query with such filter should succeed. - await expect(alice.provider.getLogs({ fromBlock: tx1Receipt.blockNumber, toBlock: tx1Receipt.blockNumber })) - .resolves; + await expect( + alice.provider.getLogs({ + fromBlock: tx1Receipt.blockNumber, + toBlock: tx1Receipt.blockNumber + }) + ).resolves; // There are at least `1.5 * maxLogsLimit` logs in [tx1Receipt.blockNumber, tx3Receipt.blockNumber] range, // so query with such filter should fail. await expect( - alice.provider.getLogs({ fromBlock: tx1Receipt.blockNumber, toBlock: tx3Receipt.blockNumber }) + alice.provider.getLogs({ + fromBlock: tx1Receipt.blockNumber, + toBlock: tx3Receipt.blockNumber + }) ).rejects.toThrow(`Query returned more than ${maxLogsLimit} results.`); }); @@ -961,6 +969,241 @@ describe('web3 API compatibility tests', () => { expect(txFromApi.signature.v! === 27 || 28); }); + describe('Storage override', () => { + test('Should be able to estimate_gas overriding the balance of the sender', async () => { + const balance = await alice.getBalance(); + const amount = balance + 1n; + + // Expect the transaction to be reverted without the overridden balance + await expect( + alice.provider.estimateGas({ + from: alice.address, + to: alice.address, + value: amount.toString() + }) + ).toBeRejected(); + + // Call estimate_gas overriding the balance of the sender using the eth_estimateGas endpoint + const response = await alice.provider.send('eth_estimateGas', [ + { + from: alice.address, + to: alice.address, + value: amount.toString() + }, + 'latest', + //override with the balance needed to send the transaction + { + [alice.address]: { + balance: amount.toString() + } + } + ]); + + // Assert that the response is successful + expect(response).toEqual(expect.stringMatching(HEX_VALUE_REGEX)); + }); + test('Should be able to estimate_gas overriding contract code', async () => { + // Deploy the first contract + const contract1 = await deployContract(alice, contracts.events, []); + const contract1Address = await contract1.getAddress(); + + // Deploy the second contract to extract the code that we are overriding the estimation with + const contract2 = await deployContract(alice, contracts.counter, []); + const contract2Address = await contract2.getAddress(); + + // Get the code of contract2 + const code = await alice.provider.getCode(contract2Address); + + // Get the calldata of the increment function of contract2 + const incrementFunctionData = contract2.interface.encodeFunctionData('increment', [1]); + + // Assert that the estimation fails because the increment function is not present in contract1 + expect( + alice.provider.estimateGas({ + to: contract1Address.toString(), + data: incrementFunctionData + }) + ).toBeRejected(); + + // Call estimate_gas overriding the code of contract1 with the code of contract2 using the eth_estimateGas endpoint + const response = await alice.provider.send('eth_estimateGas', [ + { + from: alice.address, + to: contract1Address.toString(), + data: incrementFunctionData + }, + 'latest', + { [contract1Address.toString()]: { code: code } } + ]); + + // Assert that the response is successful + expect(response).toEqual(expect.stringMatching(HEX_VALUE_REGEX)); + }); + + test('Should estimate gas by overriding state with State', async () => { + const contract = await deployContract(alice, contracts.stateOverride, []); + const contractAddress = await contract.getAddress(); + + const sumValuesFunctionData = contract.interface.encodeFunctionData('sumValues', []); + + // Ensure that the initial gas estimation fails due to contract requirements + await expect( + alice.provider.estimateGas({ + to: contractAddress.toString(), + data: sumValuesFunctionData + }) + ).toBeRejected(); + + // Override the entire contract state using State + const state = { + [contractAddress.toString()]: { + state: { + '0x0000000000000000000000000000000000000000000000000000000000000000': + '0x0000000000000000000000000000000000000000000000000000000000000001', + '0x0000000000000000000000000000000000000000000000000000000000000001': + '0x0000000000000000000000000000000000000000000000000000000000000002' + } + } + }; + + const response = await alice.provider.send('eth_estimateGas', [ + { + from: alice.address, + to: contractAddress.toString(), + data: sumValuesFunctionData + }, + 'latest', + state + ]); + + expect(response).toEqual(expect.stringMatching(HEX_VALUE_REGEX)); + }); + + test('Should estimate gas by overriding state with StateDiff', async () => { + const contract = await deployContract(alice, contracts.stateOverride, []); + const contractAddress = await contract.getAddress(); + const incrementFunctionData = contract.interface.encodeFunctionData('increment', [1]); + + // Ensure that the initial gas estimation fails due to contract requirements + await expect( + alice.provider.estimateGas({ + to: contractAddress.toString(), + data: incrementFunctionData + }) + ).toBeRejected(); + + // Override the contract state using StateDiff + const stateDiff = { + [contractAddress.toString()]: { + stateDiff: { + '0x0000000000000000000000000000000000000000000000000000000000000000': + '0x0000000000000000000000000000000000000000000000000000000000000001' + } + } + }; + + const response = await alice.provider.send('eth_estimateGas', [ + { + from: alice.address, + to: contractAddress.toString(), + data: incrementFunctionData + }, + 'latest', + stateDiff + ]); + + expect(response).toEqual(expect.stringMatching(HEX_VALUE_REGEX)); + }); + + test('Should call and succeed with overriding state with State', async () => { + const contract = await deployContract(alice, contracts.stateOverride, []); + const contractAddress = await contract.getAddress(); + const sumValuesFunctionData = contract.interface.encodeFunctionData('sumValues', []); + + // Ensure that the initial call fails due to contract requirements + await alice.provider + .call({ + to: contractAddress.toString(), + data: sumValuesFunctionData + }) + .catch((error) => { + const errorString = 'Initial state not set'; + expect(error.message).toContain(errorString); + }); + + // Override the contract state using State + const state = { + [contractAddress.toString()]: { + state: { + '0x0000000000000000000000000000000000000000000000000000000000000000': + '0x0000000000000000000000000000000000000000000000000000000000000001', + '0x0000000000000000000000000000000000000000000000000000000000000001': + '0x0000000000000000000000000000000000000000000000000000000000000002' + } + } + }; + + const response = await alice.provider.send('eth_call', [ + { + from: alice.address, + to: contractAddress.toString(), + data: sumValuesFunctionData + }, + 'latest', + state + ]); + + // The state replace the entire state of the contract, so the sum now would be + // 1 (0x1) + 2 (0x2) = 3 (0x3) + expect(response).toEqual('0x0000000000000000000000000000000000000000000000000000000000000003'); + }); + + test('Should call and succeed with overriding state with StateDiff', async () => { + const contract = await deployContract(alice, contracts.stateOverride, []); + const contractAddress = await contract.getAddress(); + const sumValuesFunctionData = contract.interface.encodeFunctionData('sumValues', []); + + // Ensure that the initial call fails due to contract requirements + await alice.provider + .call({ + to: contractAddress.toString(), + data: sumValuesFunctionData + }) + .catch((error) => { + const errorString = 'Initial state not set'; + expect(error.message).toContain(errorString); + }); + + // Override the contract state using State + const stateDiff = { + [contractAddress.toString()]: { + stateDiff: { + '0x0000000000000000000000000000000000000000000000000000000000000000': + '0x0000000000000000000000000000000000000000000000000000000000000001', + '0x0000000000000000000000000000000000000000000000000000000000000001': + '0x0000000000000000000000000000000000000000000000000000000000000002' + } + } + }; + + const response = await alice.provider.send('eth_call', [ + { + from: alice.address, + to: contractAddress.toString(), + data: sumValuesFunctionData + }, + 'latest', + stateDiff + ]); + + // The stateDiff only changes the specific slots provided in the override. + // The initial value of the storage slot at key 0x2 remains unchanged, which is 100 (0x64 in hex). + // Therefore, the sum of the values at the three storage slots is: + // 1 (0x1) + 2 (0x2) + 100 (0x64) = 103 (0x67 in hex). + // This is why the expected response is 0x67. + expect(response).toEqual('0x0000000000000000000000000000000000000000000000000000000000000067'); + }); + }); // We want to be sure that correct(outer) contract address is return in the transaction receipt, // when there is a contract that initializa another contract in the constructor test('Should check inner-outer contract address in the receipt of the deploy tx', async () => { From 0a12cc9259b16febd6d391eec22b19b032bd4767 Mon Sep 17 00:00:00 2001 From: Yury Akudovich Date: Mon, 22 Jul 2024 18:18:46 +0200 Subject: [PATCH 10/52] ci: Remove version tag without Prover Protocol Version for all provers. (#2457) --- .github/workflows/build-prover-fri-gpu-gar.yml | 7 ------- .github/workflows/build-prover-template.yml | 6 ------ infrastructure/zk/src/docker.ts | 6 ------ 3 files changed, 19 deletions(-) diff --git a/.github/workflows/build-prover-fri-gpu-gar.yml b/.github/workflows/build-prover-fri-gpu-gar.yml index 4a83af559e50..7805f7ba565b 100644 --- a/.github/workflows/build-prover-fri-gpu-gar.yml +++ b/.github/workflows/build-prover-fri-gpu-gar.yml @@ -47,7 +47,6 @@ jobs: PROVER_IMAGE=${{ inputs.image_tag_suffix }} push: true tags: | - us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/prover-fri-gpu-gar:2.0-${{ inputs.image_tag_suffix }} us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/prover-fri-gpu-gar:2.0-${{ inputs.protocol_version }}-${{ inputs.image_tag_suffix }} - name: Login to Asia GAR @@ -56,9 +55,6 @@ jobs: - name: Build and push to Asia GAR run: | - docker buildx imagetools create \ - --tag asia-docker.pkg.dev/matterlabs-infra/matterlabs-docker/prover-fri-gpu-gar:2.0-${{ inputs.image_tag_suffix }} \ - us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/prover-fri-gpu-gar:2.0-${{ inputs.image_tag_suffix }} docker buildx imagetools create \ --tag asia-docker.pkg.dev/matterlabs-infra/matterlabs-docker/prover-fri-gpu-gar:2.0-${{ inputs.protocol_version }}-${{ inputs.image_tag_suffix }} \ us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/prover-fri-gpu-gar:2.0-${{ inputs.protocol_version }}-${{ inputs.image_tag_suffix }} @@ -69,9 +65,6 @@ jobs: - name: Build and push to Europe GAR run: | - docker buildx imagetools create \ - --tag europe-docker.pkg.dev/matterlabs-infra/matterlabs-docker/prover-fri-gpu-gar:2.0-${{ inputs.image_tag_suffix }} \ - us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/prover-fri-gpu-gar:2.0-${{ inputs.image_tag_suffix }} docker buildx imagetools create \ --tag europe-docker.pkg.dev/matterlabs-infra/matterlabs-docker/prover-fri-gpu-gar:2.0-${{ inputs.protocol_version }}-${{ inputs.image_tag_suffix }} \ us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/prover-fri-gpu-gar:2.0-${{ inputs.protocol_version }}-${{ inputs.image_tag_suffix }} diff --git a/.github/workflows/build-prover-template.yml b/.github/workflows/build-prover-template.yml index d03ae124b176..ba76740ee2df 100644 --- a/.github/workflows/build-prover-template.yml +++ b/.github/workflows/build-prover-template.yml @@ -199,9 +199,6 @@ jobs: - name: Login and push to Asia GAR run: | gcloud auth print-access-token --lifetime=7200 --impersonate-service-account=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com | docker login -u oauth2accesstoken --password-stdin https://asia-docker.pkg.dev - docker buildx imagetools create \ - --tag asia-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.component }}:2.0-${{ inputs.image_tag_suffix }} \ - us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.component }}:2.0-${{ inputs.image_tag_suffix }} docker buildx imagetools create \ --tag asia-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.component }}:2.0-${{ needs.build-images.outputs.protocol_version }}-${{ inputs.image_tag_suffix }} \ us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.component }}:2.0-${{ needs.build-images.outputs.protocol_version }}-${{ inputs.image_tag_suffix }} @@ -209,9 +206,6 @@ jobs: - name: Login and push to Europe GAR run: | gcloud auth print-access-token --lifetime=7200 --impersonate-service-account=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com | docker login -u oauth2accesstoken --password-stdin https://europe-docker.pkg.dev - docker buildx imagetools create \ - --tag europe-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.component }}:2.0-${{ inputs.image_tag_suffix }} \ - us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.component }}:2.0-${{ inputs.image_tag_suffix }} docker buildx imagetools create \ --tag europe-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.component }}:2.0-${{ needs.build-images.outputs.protocol_version }}-${{ inputs.image_tag_suffix }} \ us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.component }}:2.0-${{ needs.build-images.outputs.protocol_version }}-${{ inputs.image_tag_suffix }} diff --git a/infrastructure/zk/src/docker.ts b/infrastructure/zk/src/docker.ts index 7f42fca1d022..19b03bcb2111 100644 --- a/infrastructure/zk/src/docker.ts +++ b/infrastructure/zk/src/docker.ts @@ -75,13 +75,7 @@ function defaultTagList(image: string, imageTagSha: string, imageTagShaTS: strin 'server-v2', 'external-node', 'contract-verifier', - 'witness-generator', - 'prover-fri', - 'prover-gpu-fri', - 'witness-vector-generator', 'prover-fri-gateway', - 'proof-fri-compressor', - 'proof-fri-gpu-compressor', 'snapshots-creator' ].includes(image) ? ['latest', 'latest2.0', `2.0-${imageTagSha}`, `${imageTagSha}`, `2.0-${imageTagShaTS}`, `${imageTagShaTS}`] From c5650a4f1747f59d7a2d4e1986a91ae3fa7d75b0 Mon Sep 17 00:00:00 2001 From: pompon0 Date: Mon, 22 Jul 2024 19:08:52 +0200 Subject: [PATCH 11/52] feat: added consensus_config to general config (#2462) Now that we have a support for file-based configuration, we can embed the consensus configuration in general configs file. --------- Co-authored-by: Danil --- core/bin/zksync_server/src/main.rs | 30 +-- core/bin/zksync_server/src/node_builder.rs | 9 +- core/lib/config/src/configs/general.rs | 4 +- core/lib/config/src/testonly.rs | 195 ++++++++++++++++-- core/lib/protobuf_config/src/general.rs | 2 + .../src/proto/config/general.proto | 60 +++--- core/lib/protobuf_config/src/tests.rs | 12 ++ .../src/temp_config_store/mod.rs | 1 + 8 files changed, 246 insertions(+), 67 deletions(-) diff --git a/core/bin/zksync_server/src/main.rs b/core/bin/zksync_server/src/main.rs index b589d04aed66..a59705b8e587 100644 --- a/core/bin/zksync_server/src/main.rs +++ b/core/bin/zksync_server/src/main.rs @@ -91,12 +91,25 @@ fn main() -> anyhow::Result<()> { let tmp_config = load_env_config()?; let configs = match opt.config_path { - None => tmp_config.general(), + None => { + let mut configs = tmp_config.general(); + configs.consensus_config = + config::read_consensus_config().context("read_consensus_config()")?; + configs + } Some(path) => { let yaml = std::fs::read_to_string(&path).with_context(|| path.display().to_string())?; - decode_yaml_repr::(&yaml) - .context("failed decoding general YAML config")? + let mut configs = + decode_yaml_repr::(&yaml) + .context("failed decoding general YAML config")?; + // Fallback to the consensus_config.yaml file. + // TODO: remove once we move the consensus config to general config on stage + if configs.consensus_config.is_none() { + configs.consensus_config = + config::read_consensus_config().context("read_consensus_config()")?; + } + configs } }; @@ -154,8 +167,6 @@ fn main() -> anyhow::Result<()> { }, }; - let consensus = config::read_consensus_config().context("read_consensus_config()")?; - let contracts_config = match opt.contracts_config_path { None => ContractsConfig::from_env().context("contracts_config")?, Some(path) => { @@ -176,14 +187,7 @@ fn main() -> anyhow::Result<()> { } }; - let node = MainNodeBuilder::new( - configs, - wallets, - genesis, - contracts_config, - secrets, - consensus, - ); + let node = MainNodeBuilder::new(configs, wallets, genesis, contracts_config, secrets); if opt.genesis { // If genesis is requested, we don't need to run the node. diff --git a/core/bin/zksync_server/src/node_builder.rs b/core/bin/zksync_server/src/node_builder.rs index f8173579b57e..0eaa9b651f64 100644 --- a/core/bin/zksync_server/src/node_builder.rs +++ b/core/bin/zksync_server/src/node_builder.rs @@ -3,10 +3,7 @@ use anyhow::Context; use zksync_config::{ - configs::{ - consensus::ConsensusConfig, eth_sender::PubdataSendingMode, wallets::Wallets, - GeneralConfig, Secrets, - }, + configs::{eth_sender::PubdataSendingMode, wallets::Wallets, GeneralConfig, Secrets}, ContractsConfig, GenesisConfig, }; use zksync_core_leftovers::Component; @@ -86,7 +83,6 @@ pub struct MainNodeBuilder { genesis_config: GenesisConfig, contracts_config: ContractsConfig, secrets: Secrets, - consensus_config: Option, } impl MainNodeBuilder { @@ -96,7 +92,6 @@ impl MainNodeBuilder { genesis_config: GenesisConfig, contracts_config: ContractsConfig, secrets: Secrets, - consensus_config: Option, ) -> Self { Self { node: ZkStackServiceBuilder::new(), @@ -105,7 +100,6 @@ impl MainNodeBuilder { genesis_config, contracts_config, secrets, - consensus_config, } } @@ -456,6 +450,7 @@ impl MainNodeBuilder { fn add_consensus_layer(mut self) -> anyhow::Result { self.node.add_layer(MainNodeConsensusLayer { config: self + .configs .consensus_config .clone() .context("Consensus config has to be provided")?, diff --git a/core/lib/config/src/configs/general.rs b/core/lib/config/src/configs/general.rs index e80538b2a4b9..122d1e278553 100644 --- a/core/lib/config/src/configs/general.rs +++ b/core/lib/config/src/configs/general.rs @@ -2,6 +2,7 @@ use crate::{ configs::{ base_token_adjuster::BaseTokenAdjusterConfig, chain::{CircuitBreakerConfig, MempoolConfig, OperationsManagerConfig, StateKeeperConfig}, + consensus::ConsensusConfig, da_dispatcher::DADispatcherConfig, fri_prover_group::FriProverGroupConfig, house_keeper::HouseKeeperConfig, @@ -17,7 +18,7 @@ use crate::{ SnapshotsCreatorConfig, }; -#[derive(Debug)] +#[derive(Debug, PartialEq)] pub struct GeneralConfig { pub postgres_config: Option, pub api_config: Option, @@ -48,4 +49,5 @@ pub struct GeneralConfig { pub core_object_store: Option, pub base_token_adjuster: Option, pub external_price_api_client_config: Option, + pub consensus_config: Option, } diff --git a/core/lib/config/src/testonly.rs b/core/lib/config/src/testonly.rs index e105c3282639..f3d6b98491be 100644 --- a/core/lib/config/src/testonly.rs +++ b/core/lib/config/src/testonly.rs @@ -235,24 +235,24 @@ impl Distribution for EncodeDist { } impl Distribution for EncodeDist { - fn sample(&self, g: &mut R) -> configs::ContractsConfig { + fn sample(&self, rng: &mut R) -> configs::ContractsConfig { configs::ContractsConfig { - governance_addr: g.gen(), - verifier_addr: g.gen(), - default_upgrade_addr: g.gen(), - diamond_proxy_addr: g.gen(), - validator_timelock_addr: g.gen(), - l1_erc20_bridge_proxy_addr: g.gen(), - l2_erc20_bridge_addr: g.gen(), - l1_shared_bridge_proxy_addr: g.gen(), - l2_shared_bridge_addr: g.gen(), - l1_weth_bridge_proxy_addr: g.gen(), - l2_weth_bridge_addr: g.gen(), - l2_testnet_paymaster_addr: g.gen(), - l1_multicall3_addr: g.gen(), - base_token_addr: g.gen(), - chain_admin_addr: g.gen(), - ecosystem_contracts: self.sample(g), + governance_addr: rng.gen(), + verifier_addr: rng.gen(), + default_upgrade_addr: rng.gen(), + diamond_proxy_addr: rng.gen(), + validator_timelock_addr: rng.gen(), + l1_erc20_bridge_proxy_addr: rng.gen(), + l2_erc20_bridge_addr: rng.gen(), + l1_shared_bridge_proxy_addr: rng.gen(), + l2_shared_bridge_addr: rng.gen(), + l1_weth_bridge_proxy_addr: rng.gen(), + l2_weth_bridge_addr: rng.gen(), + l2_testnet_paymaster_addr: rng.gen(), + l1_multicall3_addr: rng.gen(), + base_token_addr: rng.gen(), + chain_admin_addr: rng.gen(), + ecosystem_contracts: self.sample(rng), } } } @@ -887,3 +887,164 @@ impl Distribution for EncodeDist { } } } + +impl Distribution for EncodeDist { + fn sample(&self, rng: &mut R) -> configs::da_dispatcher::DADispatcherConfig { + configs::da_dispatcher::DADispatcherConfig { + polling_interval_ms: self.sample(rng), + max_rows_to_dispatch: self.sample(rng), + max_retries: self.sample(rng), + } + } +} + +impl Distribution for EncodeDist { + fn sample( + &self, + rng: &mut R, + ) -> configs::vm_runner::ProtectiveReadsWriterConfig { + configs::vm_runner::ProtectiveReadsWriterConfig { + db_path: self.sample(rng), + window_size: self.sample(rng), + first_processed_batch: L1BatchNumber(rng.gen()), + } + } +} + +impl Distribution for EncodeDist { + fn sample( + &self, + rng: &mut R, + ) -> configs::vm_runner::BasicWitnessInputProducerConfig { + configs::vm_runner::BasicWitnessInputProducerConfig { + db_path: self.sample(rng), + window_size: self.sample(rng), + first_processed_batch: L1BatchNumber(rng.gen()), + } + } +} + +impl Distribution for EncodeDist { + fn sample(&self, rng: &mut R) -> configs::CommitmentGeneratorConfig { + configs::CommitmentGeneratorConfig { + max_parallelism: self.sample(rng), + } + } +} + +impl Distribution for EncodeDist { + fn sample( + &self, + rng: &mut R, + ) -> configs::snapshot_recovery::TreeRecoveryConfig { + configs::snapshot_recovery::TreeRecoveryConfig { + chunk_size: self.sample(rng), + parallel_persistence_buffer: self.sample_opt(|| rng.gen()), + } + } +} + +impl Distribution for EncodeDist { + fn sample( + &self, + rng: &mut R, + ) -> configs::snapshot_recovery::PostgresRecoveryConfig { + configs::snapshot_recovery::PostgresRecoveryConfig { + max_concurrency: self.sample_opt(|| rng.gen()), + } + } +} + +impl Distribution for EncodeDist { + fn sample( + &self, + rng: &mut R, + ) -> configs::snapshot_recovery::SnapshotRecoveryConfig { + use configs::snapshot_recovery::{SnapshotRecoveryConfig, TreeRecoveryConfig}; + let tree: TreeRecoveryConfig = self.sample(rng); + SnapshotRecoveryConfig { + enabled: self.sample(rng), + l1_batch: self.sample_opt(|| L1BatchNumber(rng.gen())), + drop_storage_key_preimages: (tree != TreeRecoveryConfig::default()) && self.sample(rng), + tree, + postgres: self.sample(rng), + object_store: self.sample(rng), + } + } +} + +impl Distribution for EncodeDist { + fn sample(&self, rng: &mut R) -> configs::pruning::PruningConfig { + configs::pruning::PruningConfig { + enabled: self.sample(rng), + chunk_size: self.sample(rng), + removal_delay_sec: self.sample_opt(|| rng.gen()), + data_retention_sec: self.sample(rng), + } + } +} + +impl Distribution for EncodeDist { + fn sample( + &self, + rng: &mut R, + ) -> configs::base_token_adjuster::BaseTokenAdjusterConfig { + configs::base_token_adjuster::BaseTokenAdjusterConfig { + price_polling_interval_ms: self.sample(rng), + price_cache_update_interval_ms: self.sample(rng), + } + } +} + +impl Distribution for EncodeDist { + fn sample( + &self, + rng: &mut R, + ) -> configs::external_price_api_client::ExternalPriceApiClientConfig { + configs::external_price_api_client::ExternalPriceApiClientConfig { + source: self.sample(rng), + base_url: self.sample(rng), + api_key: self.sample(rng), + client_timeout_ms: self.sample(rng), + forced_numerator: self.sample(rng), + forced_denominator: self.sample(rng), + } + } +} + +impl Distribution for EncodeDist { + fn sample(&self, rng: &mut R) -> configs::GeneralConfig { + configs::GeneralConfig { + postgres_config: self.sample(rng), + api_config: self.sample(rng), + contract_verifier: self.sample(rng), + circuit_breaker_config: self.sample(rng), + mempool_config: self.sample(rng), + operations_manager_config: self.sample(rng), + state_keeper_config: self.sample(rng), + house_keeper_config: self.sample(rng), + proof_compressor_config: self.sample(rng), + prover_config: self.sample(rng), + prover_gateway: self.sample(rng), + witness_vector_generator: self.sample(rng), + prover_group_config: self.sample(rng), + witness_generator: self.sample(rng), + prometheus_config: self.sample(rng), + proof_data_handler_config: self.sample(rng), + db_config: self.sample(rng), + eth: self.sample(rng), + snapshot_creator: self.sample(rng), + observability: self.sample(rng), + da_dispatcher_config: self.sample(rng), + protective_reads_writer_config: self.sample(rng), + basic_witness_input_producer_config: self.sample(rng), + commitment_generator: self.sample(rng), + snapshot_recovery: self.sample(rng), + pruning: self.sample(rng), + core_object_store: self.sample(rng), + base_token_adjuster: self.sample(rng), + external_price_api_client_config: self.sample(rng), + consensus_config: self.sample(rng), + } + } +} diff --git a/core/lib/protobuf_config/src/general.rs b/core/lib/protobuf_config/src/general.rs index 44ce9d8d1eba..31d1ea6bc1b7 100644 --- a/core/lib/protobuf_config/src/general.rs +++ b/core/lib/protobuf_config/src/general.rs @@ -56,6 +56,7 @@ impl ProtoRepr for proto::GeneralConfig { .context("snapshot_recovery")?, external_price_api_client_config: read_optional_repr(&self.external_price_api_client) .context("external_price_api_client")?, + consensus_config: read_optional_repr(&self.consensus).context("consensus")?, }) } @@ -105,6 +106,7 @@ impl ProtoRepr for proto::GeneralConfig { .external_price_api_client_config .as_ref() .map(ProtoRepr::build), + consensus: this.consensus_config.as_ref().map(ProtoRepr::build), } } } diff --git a/core/lib/protobuf_config/src/proto/config/general.proto b/core/lib/protobuf_config/src/proto/config/general.proto index be64f7bb97ee..37d507b9ab62 100644 --- a/core/lib/protobuf_config/src/proto/config/general.proto +++ b/core/lib/protobuf_config/src/proto/config/general.proto @@ -21,35 +21,37 @@ import "zksync/config/pruning.proto"; import "zksync/config/object_store.proto"; import "zksync/config/base_token_adjuster.proto"; import "zksync/config/external_price_api_client.proto"; +import "zksync/core/consensus.proto"; message GeneralConfig { - optional config.database.Postgres postgres = 1; - optional config.api.Api api = 2; - optional config.contract_verifier.ContractVerifier contract_verifier = 3; - optional config.circuit_breaker.CircuitBreaker circuit_breaker = 5; - optional config.chain.Mempool mempool = 6; - optional config.chain.OperationsManager operations_manager = 8; - optional config.chain.StateKeeper state_keeper = 9; - optional config.house_keeper.HouseKeeper house_keeper = 10; - optional config.prover.Prover prover = 12; - optional config.utils.Prometheus prometheus = 15; - optional config.database.DB db = 20; - optional config.eth.ETH eth = 22; - optional config.prover.WitnessGenerator witness_generator = 24; - optional config.prover.WitnessVectorGenerator witness_vector_generator = 25; - optional config.prover.ProofCompressor proof_compressor = 27; - optional config.prover.ProofDataHandler data_handler = 28; - optional config.prover.ProverGroup prover_group = 29; - optional config.prover.ProverGateway prover_gateway = 30; - optional config.snapshot_creator.SnapshotsCreator snapshot_creator = 31; - optional config.observability.Observability observability = 32; - optional config.vm_runner.ProtectiveReadsWriter protective_reads_writer = 33; - optional config.object_store.ObjectStore core_object_store = 34; - optional config.snapshot_recovery.SnapshotRecovery snapshot_recovery = 35; - optional config.pruning.Pruning pruning = 36; - optional config.commitment_generator.CommitmentGenerator commitment_generator = 37; - optional config.da_dispatcher.DataAvailabilityDispatcher da_dispatcher = 38; - optional config.base_token_adjuster.BaseTokenAdjuster base_token_adjuster = 39; - optional config.vm_runner.BasicWitnessInputProducer basic_witness_input_producer = 40; - optional config.external_price_api_client.ExternalPriceApiClient external_price_api_client = 41; + optional database.Postgres postgres = 1; + optional api.Api api = 2; + optional contract_verifier.ContractVerifier contract_verifier = 3; + optional circuit_breaker.CircuitBreaker circuit_breaker = 5; + optional chain.Mempool mempool = 6; + optional chain.OperationsManager operations_manager = 8; + optional chain.StateKeeper state_keeper = 9; + optional house_keeper.HouseKeeper house_keeper = 10; + optional prover.Prover prover = 12; + optional utils.Prometheus prometheus = 15; + optional database.DB db = 20; + optional eth.ETH eth = 22; + optional prover.WitnessGenerator witness_generator = 24; + optional prover.WitnessVectorGenerator witness_vector_generator = 25; + optional prover.ProofCompressor proof_compressor = 27; + optional prover.ProofDataHandler data_handler = 28; + optional prover.ProverGroup prover_group = 29; + optional prover.ProverGateway prover_gateway = 30; + optional snapshot_creator.SnapshotsCreator snapshot_creator = 31; + optional observability.Observability observability = 32; + optional vm_runner.ProtectiveReadsWriter protective_reads_writer = 33; + optional object_store.ObjectStore core_object_store = 34; + optional snapshot_recovery.SnapshotRecovery snapshot_recovery = 35; + optional pruning.Pruning pruning = 36; + optional commitment_generator.CommitmentGenerator commitment_generator = 37; + optional da_dispatcher.DataAvailabilityDispatcher da_dispatcher = 38; + optional base_token_adjuster.BaseTokenAdjuster base_token_adjuster = 39; + optional vm_runner.BasicWitnessInputProducer basic_witness_input_producer = 40; + optional external_price_api_client.ExternalPriceApiClient external_price_api_client = 41; + optional core.consensus.Config consensus = 42; } diff --git a/core/lib/protobuf_config/src/tests.rs b/core/lib/protobuf_config/src/tests.rs index 3cb18c5bbf6d..695f404f64d1 100644 --- a/core/lib/protobuf_config/src/tests.rs +++ b/core/lib/protobuf_config/src/tests.rs @@ -42,6 +42,18 @@ fn test_encoding() { test_encode_all_formats::>(rng); test_encode_all_formats::>(rng); test_encode_all_formats::>(rng); + test_encode_all_formats::>(rng); + test_encode_all_formats::>(rng); + test_encode_all_formats::>(rng); + test_encode_all_formats::>(rng); + test_encode_all_formats::>(rng); + test_encode_all_formats::>(rng); + test_encode_all_formats::>(rng); + test_encode_all_formats::>(rng); + test_encode_all_formats::>( + rng, + ); + test_encode_all_formats::>(rng); } #[test] diff --git a/core/lib/zksync_core_leftovers/src/temp_config_store/mod.rs b/core/lib/zksync_core_leftovers/src/temp_config_store/mod.rs index f1761e8ff8f9..1ad688ed14cb 100644 --- a/core/lib/zksync_core_leftovers/src/temp_config_store/mod.rs +++ b/core/lib/zksync_core_leftovers/src/temp_config_store/mod.rs @@ -111,6 +111,7 @@ impl TempConfigStore { snapshot_recovery: self.snapshot_recovery.clone(), pruning: self.pruning.clone(), external_price_api_client_config: self.external_price_api_client_config.clone(), + consensus_config: None, } } From 990676c5f84afd2ff8cd337f495c82e8d1f305a4 Mon Sep 17 00:00:00 2001 From: Artem Fomiuk <88630083+Artemka374@users.noreply.github.com> Date: Tue, 23 Jul 2024 07:32:12 +0300 Subject: [PATCH 12/52] feat: remove leftovers after BWIP (#2456) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ Removed redundant columns from prover DB after adding BWIP. ## Why ❔ Because they won't be used anymore ## Checklist - [ ] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [ ] Tests for the changes have been added / updated. - [ ] Documentation comments have been added / updated. - [ ] Code has been formatted via `zk fmt` and `zk lint`. --- core/lib/basic_types/src/prover_dal.rs | 9 +---- .../src/proof_gen_data_fetcher.rs | 12 +----- ...5d2832571464e74b5fed92cf54617573c84ec.json | 12 ++---- ...cd21d4645563f93afd4428734196c2b212276.json | 17 +++++++++ ...01ab8ae3f32526d9b5eadcfe52d139f7d6e66.json | 19 ---------- ...d34a5baece02812f8c950fc84d37eeebd33a4.json | 16 +++----- ...191a43dc8eafc33ee067bd41e20f25f7625f0.json | 12 ++---- ...e118cabc67b6e507efefb7b69e102f1b43c58.json | 38 +++++-------------- ..._remove_unused_columns_after_bwip.down.sql | 6 +++ ...19_remove_unused_columns_after_bwip.up.sql | 6 +++ .../lib/prover_dal/src/fri_prover_dal.rs | 1 - .../src/fri_witness_generator_dal.rs | 20 +--------- 12 files changed, 54 insertions(+), 114 deletions(-) create mode 100644 prover/crates/lib/prover_dal/.sqlx/query-929419ad8dcc70e8ce986f17075cd21d4645563f93afd4428734196c2b212276.json delete mode 100644 prover/crates/lib/prover_dal/.sqlx/query-adaa3126792aac4e3afb805068f01ab8ae3f32526d9b5eadcfe52d139f7d6e66.json create mode 100644 prover/crates/lib/prover_dal/migrations/20240722102219_remove_unused_columns_after_bwip.down.sql create mode 100644 prover/crates/lib/prover_dal/migrations/20240722102219_remove_unused_columns_after_bwip.up.sql diff --git a/core/lib/basic_types/src/prover_dal.rs b/core/lib/basic_types/src/prover_dal.rs index 29d36cc91f8f..edaad3798e82 100644 --- a/core/lib/basic_types/src/prover_dal.rs +++ b/core/lib/basic_types/src/prover_dal.rs @@ -5,9 +5,7 @@ use chrono::{DateTime, Duration, NaiveDateTime, NaiveTime, Utc}; use strum::{Display, EnumString}; use crate::{ - basic_fri_types::{AggregationRound, Eip4844Blobs}, - protocol_version::ProtocolVersionId, - L1BatchNumber, + basic_fri_types::AggregationRound, protocol_version::ProtocolVersionId, L1BatchNumber, }; #[derive(Debug, Clone)] @@ -255,7 +253,6 @@ pub struct ProverJobFriInfo { pub created_at: NaiveDateTime, pub updated_at: NaiveDateTime, pub time_taken: Option, - pub is_blob_cleaned: Option, pub depth: u32, pub is_node_final_proof: bool, pub proof_blob_url: Option, @@ -266,7 +263,6 @@ pub struct ProverJobFriInfo { #[derive(Debug, Clone)] pub struct BasicWitnessGeneratorJobInfo { pub l1_batch_number: L1BatchNumber, - pub merkle_tree_paths_blob_url: Option, pub witness_inputs_blob_url: Option, pub attempts: u32, pub status: WitnessJobStatus, @@ -275,10 +271,8 @@ pub struct BasicWitnessGeneratorJobInfo { pub updated_at: NaiveDateTime, pub processing_started_at: Option, pub time_taken: Option, - pub is_blob_cleaned: Option, pub protocol_version: Option, pub picked_by: Option, - pub eip_4844_blobs: Option, } #[derive(Debug, Clone)] @@ -294,7 +288,6 @@ pub struct LeafWitnessGeneratorJobInfo { pub updated_at: NaiveDateTime, pub processing_started_at: Option, pub time_taken: Option, - pub is_blob_cleaned: Option, pub number_of_basic_circuits: Option, pub protocol_version: Option, pub picked_by: Option, diff --git a/prover/crates/bin/prover_fri_gateway/src/proof_gen_data_fetcher.rs b/prover/crates/bin/prover_fri_gateway/src/proof_gen_data_fetcher.rs index e1add827e890..809df8ae8225 100644 --- a/prover/crates/bin/prover_fri_gateway/src/proof_gen_data_fetcher.rs +++ b/prover/crates/bin/prover_fri_gateway/src/proof_gen_data_fetcher.rs @@ -32,10 +32,6 @@ impl ProofGenDataFetcher { impl ProofGenDataFetcher { async fn save_proof_gen_data(&self, data: ProofGenerationData) { let store = &*self.0.blob_store; - let merkle_paths = store - .put(data.l1_batch_number, &data.witness_input_data.merkle_paths) - .await - .expect("Failed to save proof generation data to GCS"); let witness_inputs = store .put(data.l1_batch_number, &data.witness_input_data) .await @@ -49,13 +45,7 @@ impl ProofGenDataFetcher { connection .fri_witness_generator_dal() - .save_witness_inputs( - data.l1_batch_number, - &merkle_paths, - &witness_inputs, - data.protocol_version, - data.witness_input_data.eip_4844_blobs, - ) + .save_witness_inputs(data.l1_batch_number, &witness_inputs, data.protocol_version) .await; } } diff --git a/prover/crates/lib/prover_dal/.sqlx/query-21621153e545859d71188e2421f5d2832571464e74b5fed92cf54617573c84ec.json b/prover/crates/lib/prover_dal/.sqlx/query-21621153e545859d71188e2421f5d2832571464e74b5fed92cf54617573c84ec.json index e24d2c979a35..b5f056e1ecd9 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-21621153e545859d71188e2421f5d2832571464e74b5fed92cf54617573c84ec.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-21621153e545859d71188e2421f5d2832571464e74b5fed92cf54617573c84ec.json @@ -60,26 +60,21 @@ }, { "ordinal": 11, - "name": "is_blob_cleaned", - "type_info": "Bool" - }, - { - "ordinal": 12, "name": "number_of_basic_circuits", "type_info": "Int4" }, { - "ordinal": 13, + "ordinal": 12, "name": "protocol_version", "type_info": "Int4" }, { - "ordinal": 14, + "ordinal": 13, "name": "picked_by", "type_info": "Text" }, { - "ordinal": 15, + "ordinal": 14, "name": "protocol_version_patch", "type_info": "Int4" } @@ -104,7 +99,6 @@ true, true, true, - true, false ] }, diff --git a/prover/crates/lib/prover_dal/.sqlx/query-929419ad8dcc70e8ce986f17075cd21d4645563f93afd4428734196c2b212276.json b/prover/crates/lib/prover_dal/.sqlx/query-929419ad8dcc70e8ce986f17075cd21d4645563f93afd4428734196c2b212276.json new file mode 100644 index 000000000000..cf5fe8117b14 --- /dev/null +++ b/prover/crates/lib/prover_dal/.sqlx/query-929419ad8dcc70e8ce986f17075cd21d4645563f93afd4428734196c2b212276.json @@ -0,0 +1,17 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO\n witness_inputs_fri (\n l1_batch_number,\n witness_inputs_blob_url,\n protocol_version,\n status,\n created_at,\n updated_at,\n protocol_version_patch\n )\n VALUES\n ($1, $2, $3, 'queued', NOW(), NOW(), $4)\n ON CONFLICT (l1_batch_number) DO NOTHING\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8", + "Text", + "Int4", + "Int4" + ] + }, + "nullable": [] + }, + "hash": "929419ad8dcc70e8ce986f17075cd21d4645563f93afd4428734196c2b212276" +} diff --git a/prover/crates/lib/prover_dal/.sqlx/query-adaa3126792aac4e3afb805068f01ab8ae3f32526d9b5eadcfe52d139f7d6e66.json b/prover/crates/lib/prover_dal/.sqlx/query-adaa3126792aac4e3afb805068f01ab8ae3f32526d9b5eadcfe52d139f7d6e66.json deleted file mode 100644 index 1af0943a3dd8..000000000000 --- a/prover/crates/lib/prover_dal/.sqlx/query-adaa3126792aac4e3afb805068f01ab8ae3f32526d9b5eadcfe52d139f7d6e66.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n INSERT INTO\n witness_inputs_fri (\n l1_batch_number,\n merkle_tree_paths_blob_url,\n witness_inputs_blob_url,\n protocol_version,\n eip_4844_blobs,\n status,\n created_at,\n updated_at,\n protocol_version_patch\n )\n VALUES\n ($1, $2, $3, $4, $5, 'queued', NOW(), NOW(), $6)\n ON CONFLICT (l1_batch_number) DO NOTHING\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int8", - "Text", - "Text", - "Int4", - "Bytea", - "Int4" - ] - }, - "nullable": [] - }, - "hash": "adaa3126792aac4e3afb805068f01ab8ae3f32526d9b5eadcfe52d139f7d6e66" -} diff --git a/prover/crates/lib/prover_dal/.sqlx/query-c2c140d136df5303d7b3a66ccd0d34a5baece02812f8c950fc84d37eeebd33a4.json b/prover/crates/lib/prover_dal/.sqlx/query-c2c140d136df5303d7b3a66ccd0d34a5baece02812f8c950fc84d37eeebd33a4.json index 007525bceaef..25a49e191f6e 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-c2c140d136df5303d7b3a66ccd0d34a5baece02812f8c950fc84d37eeebd33a4.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-c2c140d136df5303d7b3a66ccd0d34a5baece02812f8c950fc84d37eeebd33a4.json @@ -70,36 +70,31 @@ }, { "ordinal": 13, - "name": "is_blob_cleaned", - "type_info": "Bool" - }, - { - "ordinal": 14, "name": "depth", "type_info": "Int4" }, { - "ordinal": 15, + "ordinal": 14, "name": "is_node_final_proof", "type_info": "Bool" }, { - "ordinal": 16, + "ordinal": 15, "name": "proof_blob_url", "type_info": "Text" }, { - "ordinal": 17, + "ordinal": 16, "name": "protocol_version", "type_info": "Int4" }, { - "ordinal": 18, + "ordinal": 17, "name": "picked_by", "type_info": "Text" }, { - "ordinal": 19, + "ordinal": 18, "name": "protocol_version_patch", "type_info": "Int4" } @@ -124,7 +119,6 @@ false, false, true, - true, false, false, true, diff --git a/prover/crates/lib/prover_dal/.sqlx/query-d272c91f1209c277189a31c59ee191a43dc8eafc33ee067bd41e20f25f7625f0.json b/prover/crates/lib/prover_dal/.sqlx/query-d272c91f1209c277189a31c59ee191a43dc8eafc33ee067bd41e20f25f7625f0.json index a90da33a3333..2c94853eacff 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-d272c91f1209c277189a31c59ee191a43dc8eafc33ee067bd41e20f25f7625f0.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-d272c91f1209c277189a31c59ee191a43dc8eafc33ee067bd41e20f25f7625f0.json @@ -60,26 +60,21 @@ }, { "ordinal": 11, - "name": "is_blob_cleaned", - "type_info": "Bool" - }, - { - "ordinal": 12, "name": "number_of_basic_circuits", "type_info": "Int4" }, { - "ordinal": 13, + "ordinal": 12, "name": "protocol_version", "type_info": "Int4" }, { - "ordinal": 14, + "ordinal": 13, "name": "picked_by", "type_info": "Text" }, { - "ordinal": 15, + "ordinal": 14, "name": "protocol_version_patch", "type_info": "Int4" } @@ -106,7 +101,6 @@ true, true, true, - true, false ] }, diff --git a/prover/crates/lib/prover_dal/.sqlx/query-e0a6cc885e437aa7ded9def71f3e118cabc67b6e507efefb7b69e102f1b43c58.json b/prover/crates/lib/prover_dal/.sqlx/query-e0a6cc885e437aa7ded9def71f3e118cabc67b6e507efefb7b69e102f1b43c58.json index 79f12689194f..7786dc04a2e7 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-e0a6cc885e437aa7ded9def71f3e118cabc67b6e507efefb7b69e102f1b43c58.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-e0a6cc885e437aa7ded9def71f3e118cabc67b6e507efefb7b69e102f1b43c58.json @@ -10,71 +10,56 @@ }, { "ordinal": 1, - "name": "merkle_tree_paths_blob_url", - "type_info": "Text" - }, - { - "ordinal": 2, "name": "attempts", "type_info": "Int2" }, { - "ordinal": 3, + "ordinal": 2, "name": "status", "type_info": "Text" }, { - "ordinal": 4, + "ordinal": 3, "name": "error", "type_info": "Text" }, { - "ordinal": 5, + "ordinal": 4, "name": "created_at", "type_info": "Timestamp" }, { - "ordinal": 6, + "ordinal": 5, "name": "updated_at", "type_info": "Timestamp" }, { - "ordinal": 7, + "ordinal": 6, "name": "processing_started_at", "type_info": "Timestamp" }, { - "ordinal": 8, + "ordinal": 7, "name": "time_taken", "type_info": "Time" }, { - "ordinal": 9, - "name": "is_blob_cleaned", - "type_info": "Bool" - }, - { - "ordinal": 10, + "ordinal": 8, "name": "protocol_version", "type_info": "Int4" }, { - "ordinal": 11, + "ordinal": 9, "name": "picked_by", "type_info": "Text" }, { - "ordinal": 12, - "name": "eip_4844_blobs", - "type_info": "Bytea" - }, - { - "ordinal": 13, + "ordinal": 10, "name": "protocol_version_patch", "type_info": "Int4" }, { - "ordinal": 14, + "ordinal": 11, "name": "witness_inputs_blob_url", "type_info": "Text" } @@ -86,7 +71,6 @@ }, "nullable": [ false, - true, false, false, true, @@ -96,8 +80,6 @@ true, true, true, - true, - true, false, true ] diff --git a/prover/crates/lib/prover_dal/migrations/20240722102219_remove_unused_columns_after_bwip.down.sql b/prover/crates/lib/prover_dal/migrations/20240722102219_remove_unused_columns_after_bwip.down.sql new file mode 100644 index 000000000000..aa57b5f643d8 --- /dev/null +++ b/prover/crates/lib/prover_dal/migrations/20240722102219_remove_unused_columns_after_bwip.down.sql @@ -0,0 +1,6 @@ +ALTER TABLE witness_inputs_fri ADD COLUMN IF NOT EXISTS merkle_tree_paths_blob_url TEXT; +ALTER TABLE witness_inputs_fri ADD COLUMN IF NOT EXISTS eip_4844_blobs TEXT; +ALTER TABLE witness_inputs_fri ADD COLUMN IF NOT EXISTS is_blob_cleaned BOOLEAN; +ALTER TABLE leaf_aggregation_witness_jobs_fri ADD COLUMN IF NOT EXISTS is_blob_cleaned BOOLEAN; +ALTER TABLE prover_jobs_fri ADD COLUMN IF NOT EXISTS is_blob_cleaned BOOLEAN; +ALTER TABLE prover_jobs_fri_archive ADD COLUMN IF NOT EXISTS is_blob_cleaned BOOLEAN; diff --git a/prover/crates/lib/prover_dal/migrations/20240722102219_remove_unused_columns_after_bwip.up.sql b/prover/crates/lib/prover_dal/migrations/20240722102219_remove_unused_columns_after_bwip.up.sql new file mode 100644 index 000000000000..62b32871167f --- /dev/null +++ b/prover/crates/lib/prover_dal/migrations/20240722102219_remove_unused_columns_after_bwip.up.sql @@ -0,0 +1,6 @@ +ALTER TABLE witness_inputs_fri DROP COLUMN IF EXISTS merkle_tree_paths_blob_url; +ALTER TABLE witness_inputs_fri DROP COLUMN IF EXISTS eip_4844_blobs; +ALTER TABLE witness_inputs_fri DROP COLUMN IF EXISTS is_blob_cleaned; +ALTER TABLE leaf_aggregation_witness_jobs_fri DROP COLUMN IF EXISTS is_blob_cleaned; +ALTER TABLE prover_jobs_fri DROP COLUMN IF EXISTS is_blob_cleaned; +ALTER TABLE prover_jobs_fri_archive DROP COLUMN IF EXISTS is_blob_cleaned; diff --git a/prover/crates/lib/prover_dal/src/fri_prover_dal.rs b/prover/crates/lib/prover_dal/src/fri_prover_dal.rs index 419cb635ac53..f6efc6afa6ad 100644 --- a/prover/crates/lib/prover_dal/src/fri_prover_dal.rs +++ b/prover/crates/lib/prover_dal/src/fri_prover_dal.rs @@ -669,7 +669,6 @@ impl FriProverDal<'_, '_> { created_at: row.created_at, updated_at: row.updated_at, time_taken: row.time_taken, - is_blob_cleaned: row.is_blob_cleaned, depth: row.depth as u32, is_node_final_proof: row.is_node_final_proof, proof_blob_url: row.proof_blob_url.clone(), diff --git a/prover/crates/lib/prover_dal/src/fri_witness_generator_dal.rs b/prover/crates/lib/prover_dal/src/fri_witness_generator_dal.rs index d56d18550e50..bc9cde72fde2 100644 --- a/prover/crates/lib/prover_dal/src/fri_witness_generator_dal.rs +++ b/prover/crates/lib/prover_dal/src/fri_witness_generator_dal.rs @@ -4,7 +4,7 @@ use std::{collections::HashMap, str::FromStr, time::Duration}; use sqlx::{types::chrono::NaiveDateTime, Row}; use zksync_basic_types::{ - basic_fri_types::{AggregationRound, Eip4844Blobs}, + basic_fri_types::AggregationRound, protocol_version::{ProtocolSemanticVersion, ProtocolVersionId, VersionPatch}, prover_dal::{ BasicWitnessGeneratorJobInfo, JobCountStatistics, LeafAggregationJobMetadata, @@ -43,35 +43,28 @@ impl FriWitnessGeneratorDal<'_, '_> { pub async fn save_witness_inputs( &mut self, block_number: L1BatchNumber, - merkle_paths_blob_url: &str, witness_inputs_blob_url: &str, protocol_version: ProtocolSemanticVersion, - eip_4844_blobs: Eip4844Blobs, ) { - let blobs_raw = eip_4844_blobs.encode(); sqlx::query!( r#" INSERT INTO witness_inputs_fri ( l1_batch_number, - merkle_tree_paths_blob_url, witness_inputs_blob_url, protocol_version, - eip_4844_blobs, status, created_at, updated_at, protocol_version_patch ) VALUES - ($1, $2, $3, $4, $5, 'queued', NOW(), NOW(), $6) + ($1, $2, $3, 'queued', NOW(), NOW(), $4) ON CONFLICT (l1_batch_number) DO NOTHING "#, i64::from(block_number.0), - merkle_paths_blob_url, witness_inputs_blob_url, protocol_version.minor as i32, - blobs_raw, protocol_version.patch.0 as i32, ) .fetch_optional(self.storage.conn()) @@ -1464,7 +1457,6 @@ impl FriWitnessGeneratorDal<'_, '_> { .unwrap() .map(|row| BasicWitnessGeneratorJobInfo { l1_batch_number, - merkle_tree_paths_blob_url: row.merkle_tree_paths_blob_url, witness_inputs_blob_url: row.witness_inputs_blob_url, attempts: row.attempts as u32, status: row.status.parse::().unwrap(), @@ -1473,15 +1465,8 @@ impl FriWitnessGeneratorDal<'_, '_> { updated_at: row.updated_at, processing_started_at: row.processing_started_at, time_taken: row.time_taken, - is_blob_cleaned: row.is_blob_cleaned, protocol_version: row.protocol_version, picked_by: row.picked_by, - eip_4844_blobs: row - .eip_4844_blobs - .as_deref() - .map(Eip4844Blobs::decode) - .transpose() - .unwrap(), }) } @@ -1516,7 +1501,6 @@ impl FriWitnessGeneratorDal<'_, '_> { updated_at: row.updated_at, processing_started_at: row.processing_started_at, time_taken: row.time_taken, - is_blob_cleaned: row.is_blob_cleaned, protocol_version: row.protocol_version, picked_by: row.picked_by.clone(), number_of_basic_circuits: row.number_of_basic_circuits, From 3fbbee10be99e8c5a696bfd50d81230141bccbf4 Mon Sep 17 00:00:00 2001 From: Manuel Mauro Date: Tue, 23 Jul 2024 14:05:15 +0300 Subject: [PATCH 13/52] feat: add revert tests (external node) to zk_toolbox (#2408) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ - Adds revert tests (external node) to zk_toolbox ## Why ❔ ## Checklist - [x] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [x] Tests for the changes have been added / updated. - [x] Documentation comments have been added / updated. - [x] Code has been formatted via `zk fmt` and `zk lint`. --------- Co-authored-by: aon <21188659+aon@users.noreply.github.com> Co-authored-by: Manuel --- .github/workflows/ci-zk-toolbox-reusable.yml | 4 + .../tests/revert-and-restart-en.test.ts | 242 +++++++++++++----- core/tests/revert-test/tests/utils.ts | 81 ++++++ etc/utils/src/file-configs.ts | 18 +- .../src/commands/test/args/revert.rs | 4 +- .../zk_supervisor/src/commands/test/revert.rs | 22 +- .../crates/zk_supervisor/src/messages.rs | 11 + 7 files changed, 304 insertions(+), 78 deletions(-) create mode 100644 core/tests/revert-test/tests/utils.ts diff --git a/.github/workflows/ci-zk-toolbox-reusable.yml b/.github/workflows/ci-zk-toolbox-reusable.yml index 7ff5eb3f1cf4..87bd1729db91 100644 --- a/.github/workflows/ci-zk-toolbox-reusable.yml +++ b/.github/workflows/ci-zk-toolbox-reusable.yml @@ -118,6 +118,10 @@ jobs: run: | ci_run zk_supervisor test revert --ignore-prerequisites --verbose + - name: Run revert tests (external node) + run: | + ci_run zk_supervisor test revert --external-node --ignore-prerequisites --verbose + - name: Show server.log logs if: always() run: ci_run cat server.log || true diff --git a/core/tests/revert-test/tests/revert-and-restart-en.test.ts b/core/tests/revert-test/tests/revert-and-restart-en.test.ts index ce306134f51f..2fee9c7be887 100644 --- a/core/tests/revert-test/tests/revert-and-restart-en.test.ts +++ b/core/tests/revert-test/tests/revert-and-restart-en.test.ts @@ -5,23 +5,49 @@ // main_contract.getTotalBatchesExecuted actually checks the number of batches executed. import * as utils from 'utils'; import { Tester } from './tester'; +import { exec, runServerInBackground, runExternalNodeInBackground } from './utils'; import * as zksync from 'zksync-ethers'; import * as ethers from 'ethers'; import { expect, assert } from 'chai'; import fs from 'fs'; import * as child_process from 'child_process'; import * as dotenv from 'dotenv'; +import { + getAllConfigsPath, + loadConfig, + shouldLoadConfigFromFile, + replaceAggregatedBlockExecuteDeadline +} from 'utils/build/file-configs'; +import path from 'path'; + +const pathToHome = path.join(__dirname, '../../../..'); +const fileConfig = shouldLoadConfigFromFile(); let mainEnv: string; let extEnv: string; -if (process.env.DEPLOYMENT_MODE == 'Validium') { + +let deploymentMode: string; + +if (fileConfig.loadFromFile) { + const genesisConfig = loadConfig({ pathToHome, chain: fileConfig.chain, config: 'genesis.yaml' }); + deploymentMode = genesisConfig.deploymentMode; +} else { + if (!process.env.DEPLOYMENT_MODE) { + throw new Error('DEPLOYMENT_MODE is not set'); + } + if (!['Validium', 'Rollup'].includes(process.env.DEPLOYMENT_MODE)) { + throw new Error(`Unknown deployment mode: ${process.env.DEPLOYMENT_MODE}`); + } + deploymentMode = process.env.DEPLOYMENT_MODE; +} + +if (deploymentMode == 'Validium') { mainEnv = process.env.IN_DOCKER ? 'dev_validium_docker' : 'dev_validium'; extEnv = process.env.IN_DOCKER ? 'ext-node-validium-docker' : 'ext-node-validium'; -} else if (process.env.DEPLOYMENT_MODE == 'Rollup') { +} else { + // Rollup deployment mode mainEnv = process.env.IN_DOCKER ? 'docker' : 'dev'; extEnv = process.env.IN_DOCKER ? 'ext-node-docker' : 'ext-node'; -} else { - throw new Error(`Unknown deployment mode: ${process.env.DEPLOYMENT_MODE}`); } const mainLogsPath: string = 'revert_main.log'; const extLogsPath: string = 'revert_ext.log'; @@ -46,10 +72,6 @@ function parseSuggestedValues(jsonString: string): SuggestedValues { }; } -function spawn(cmd: string, args: string[], options: child_process.SpawnOptions): child_process.ChildProcess { - return child_process.spawn(cmd, args, options); -} - function run(cmd: string, args: string[], options: child_process.SpawnOptions): child_process.SpawnSyncReturns { let res = child_process.spawnSync(cmd, args, options); expect(res.error).to.be.undefined; @@ -79,18 +101,33 @@ function fetchEnv(zksyncEnv: string): any { return { ...process.env, ...dotenv.parse(res.stdout) }; } -function runBlockReverter(args: string[]): string { +async function runBlockReverter(args: string[]): Promise { let env = fetchEnv(mainEnv); - env.RUST_LOG = 'off'; - let res = run('./target/release/block_reverter', args, { + + let fileConfigFlags = ''; + if (fileConfig.loadFromFile) { + const configPaths = getAllConfigsPath({ pathToHome, chain: fileConfig.chain }); + fileConfigFlags = ` + --config-path=${configPaths['general.yaml']} + --contracts-config-path=${configPaths['contracts.yaml']} + --secrets-path=${configPaths['secrets.yaml']} + --wallets-path=${configPaths['wallets.yaml']} + --genesis-path=${configPaths['genesis.yaml']} + `; + } + + const cmd = `cd ${pathToHome} && RUST_LOG=off cargo run --bin block_reverter --release -- ${args.join( + ' ' + )} ${fileConfigFlags}`; + const executedProcess = await exec(cmd, { cwd: env.ZKSYNC_HOME, env: { ...env, PATH: process.env.PATH } }); - console.log(res.stderr.toString()); - return res.stdout.toString(); + + return executedProcess.stdout; } async function killServerAndWaitForShutdown(tester: Tester, server: string) { @@ -112,7 +149,7 @@ async function killServerAndWaitForShutdown(tester: Tester, server: string) { } class MainNode { - constructor(public tester: Tester, private proc: child_process.ChildProcess) {} + constructor(public tester: Tester) {} // Terminates all main node processes running. public static async terminateAll() { @@ -129,33 +166,35 @@ class MainNode { public static async spawn( logs: fs.WriteStream, enableConsensus: boolean, - enableExecute: boolean + enableExecute: boolean, + ethClientWeb3Url: string, + apiWeb3JsonRpcHttpUrl: string, + baseTokenAddress: string ): Promise { let env = fetchEnv(mainEnv); env.ETH_SENDER_SENDER_AGGREGATED_BLOCK_EXECUTE_DEADLINE = enableExecute ? '1' : '10000'; // Set full mode for the Merkle tree as it is required to get blocks committed. env.DATABASE_MERKLE_TREE_MODE = 'full'; - console.log(`DATABASE_URL = ${env.DATABASE_URL}`); + + if (fileConfig.loadFromFile) { + replaceAggregatedBlockExecuteDeadline(pathToHome, fileConfig, enableExecute ? 1 : 10000); + } let components = 'api,tree,eth,state_keeper,commitment_generator,da_dispatcher'; if (enableConsensus) { components += ',consensus'; } - let proc = spawn('./target/release/zksync_server', ['--components', components], { - cwd: env.ZKSYNC_HOME, + let proc = runServerInBackground({ + components: [components], stdio: [null, logs, logs], - env: { - ...env, - PATH: process.env.PATH - } + cwd: pathToHome, + env: env, + useZkInception: fileConfig.loadFromFile }); + // Wait until the main node starts responding. - let tester: Tester = await Tester.init( - env.ETH_CLIENT_WEB3_URL, - env.API_WEB3_JSON_RPC_HTTP_URL, - env.CONTRACTS_BASE_TOKEN_ADDR - ); + let tester: Tester = await Tester.init(ethClientWeb3Url, apiWeb3JsonRpcHttpUrl, baseTokenAddress); while (true) { try { await tester.syncWallet.provider.getBlockNumber(); @@ -168,7 +207,7 @@ class MainNode { await utils.sleep(1); } } - return new MainNode(tester, proc); + return new MainNode(tester); } } @@ -186,27 +225,29 @@ class ExtNode { // Spawns an external node. // If enableConsensus is set, the node will use consensus P2P network to fetch blocks. - public static async spawn(logs: fs.WriteStream, enableConsensus: boolean): Promise { + public static async spawn( + logs: fs.WriteStream, + enableConsensus: boolean, + ethClientWeb3Url: string, + enEthClientUrl: string, + baseTokenAddress: string + ): Promise { let env = fetchEnv(extEnv); - console.log(`DATABASE_URL = ${env.DATABASE_URL}`); let args = []; if (enableConsensus) { args.push('--enable-consensus'); } - let proc = spawn('./target/release/zksync_external_node', args, { - cwd: env.ZKSYNC_HOME, + + // Run server in background. + let proc = runExternalNodeInBackground({ stdio: [null, logs, logs], - env: { - ...env, - PATH: process.env.PATH - } + cwd: pathToHome, + env: env, + useZkInception: fileConfig.loadFromFile }); + // Wait until the node starts responding. - let tester: Tester = await Tester.init( - env.EN_ETH_CLIENT_URL, - `http://127.0.0.1:${env.EN_HTTP_PORT}`, - env.CONTRACTS_BASE_TOKEN_ADDR - ); + let tester: Tester = await Tester.init(ethClientWeb3Url, enEthClientUrl, baseTokenAddress); while (true) { try { await tester.syncWallet.provider.getBlockNumber(); @@ -232,15 +273,53 @@ class ExtNode { } describe('Block reverting test', function () { - if (process.env.SKIP_COMPILATION !== 'true') { - compileBinaries(); - } - console.log(`PWD = ${process.env.PWD}`); - const mainLogs: fs.WriteStream = fs.createWriteStream(mainLogsPath, { flags: 'a' }); - const extLogs: fs.WriteStream = fs.createWriteStream(extLogsPath, { flags: 'a' }); - const enableConsensus = process.env.ENABLE_CONSENSUS === 'true'; - console.log(`enableConsensus = ${enableConsensus}`); - const depositAmount = ethers.parseEther('0.001'); + let ethClientWeb3Url: string; + let apiWeb3JsonRpcHttpUrl: string; + let baseTokenAddress: string; + let enEthClientUrl: string; + let operatorAddress: string; + let mainLogs: fs.WriteStream; + let extLogs: fs.WriteStream; + let depositAmount: bigint; + let enableConsensus: boolean; + + before('initialize test', async () => { + if (fileConfig.loadFromFile) { + const secretsConfig = loadConfig({ pathToHome, chain: fileConfig.chain, config: 'secrets.yaml' }); + const generalConfig = loadConfig({ pathToHome, chain: fileConfig.chain, config: 'general.yaml' }); + const contractsConfig = loadConfig({ pathToHome, chain: fileConfig.chain, config: 'contracts.yaml' }); + const externalNodeConfig = loadConfig({ + pathToHome, + chain: fileConfig.chain, + config: 'external_node.yaml' + }); + const walletsConfig = loadConfig({ pathToHome, chain: fileConfig.chain, config: 'wallets.yaml' }); + + ethClientWeb3Url = secretsConfig.l1.l1_rpc_url; + apiWeb3JsonRpcHttpUrl = generalConfig.api.web3_json_rpc.http_url; + baseTokenAddress = contractsConfig.l1.base_token_addr; + enEthClientUrl = externalNodeConfig.main_node_url; + operatorAddress = walletsConfig.operator.address; + } else { + let env = fetchEnv(mainEnv); + ethClientWeb3Url = env.ETH_CLIENT_WEB3_URL; + apiWeb3JsonRpcHttpUrl = env.API_WEB3_JSON_RPC_HTTP_URL; + baseTokenAddress = env.CONTRACTS_BASE_TOKEN_ADDR; + enEthClientUrl = `http://127.0.0.1:${env.EN_HTTP_PORT}`; + // TODO use env variable for this? + operatorAddress = '0xde03a0B5963f75f1C8485B355fF6D30f3093BDE7'; + } + + if (process.env.SKIP_COMPILATION !== 'true' && !fileConfig.loadFromFile) { + compileBinaries(); + } + console.log(`PWD = ${process.env.PWD}`); + mainLogs = fs.createWriteStream(mainLogsPath, { flags: 'a' }); + extLogs = fs.createWriteStream(extLogsPath, { flags: 'a' }); + enableConsensus = process.env.ENABLE_CONSENSUS === 'true'; + console.log(`enableConsensus = ${enableConsensus}`); + depositAmount = ethers.parseEther('0.001'); + }); step('run', async () => { console.log('Make sure that nodes are not running'); @@ -248,23 +327,30 @@ describe('Block reverting test', function () { await MainNode.terminateAll(); console.log('Start main node'); - let mainNode = await MainNode.spawn(mainLogs, enableConsensus, true); + let mainNode = await MainNode.spawn( + mainLogs, + enableConsensus, + true, + ethClientWeb3Url, + apiWeb3JsonRpcHttpUrl, + baseTokenAddress + ); console.log('Start ext node'); - let extNode = await ExtNode.spawn(extLogs, enableConsensus); + let extNode = await ExtNode.spawn(extLogs, enableConsensus, ethClientWeb3Url, enEthClientUrl, baseTokenAddress); await mainNode.tester.fundSyncWallet(); await extNode.tester.fundSyncWallet(); const main_contract = await mainNode.tester.syncWallet.getMainContract(); - const baseTokenAddress = await mainNode.tester.syncWallet.getBaseToken(); - const isETHBasedChain = baseTokenAddress === zksync.utils.ETH_ADDRESS_IN_CONTRACTS; + const baseToken = await mainNode.tester.syncWallet.getBaseToken(); + const isETHBasedChain = baseToken === zksync.utils.ETH_ADDRESS_IN_CONTRACTS; const alice: zksync.Wallet = extNode.tester.emptyWallet(); console.log( 'Finalize an L1 transaction to ensure at least 1 executed L1 batch and that all transactions are processed' ); const h: zksync.types.PriorityOpResponse = await extNode.tester.syncWallet.deposit({ - token: isETHBasedChain ? zksync.utils.LEGACY_ETH_ADDRESS : baseTokenAddress, + token: isETHBasedChain ? zksync.utils.LEGACY_ETH_ADDRESS : baseToken, amount: depositAmount, to: alice.address, approveBaseERC20: true, @@ -274,7 +360,14 @@ describe('Block reverting test', function () { console.log('Restart the main node with L1 batch execution disabled.'); await killServerAndWaitForShutdown(mainNode.tester, 'zksync_server'); - mainNode = await MainNode.spawn(mainLogs, enableConsensus, false); + mainNode = await MainNode.spawn( + mainLogs, + enableConsensus, + false, + ethClientWeb3Url, + apiWeb3JsonRpcHttpUrl, + baseTokenAddress + ); console.log('Commit at least 2 L1 batches which are not executed'); const lastExecuted = await main_contract.getTotalBatchesExecuted(); @@ -282,7 +375,7 @@ describe('Block reverting test', function () { // it gets updated with some batch logs only at the start of the next batch. const initialL1BatchNumber = await main_contract.getTotalBatchesCommitted(); const firstDepositHandle = await extNode.tester.syncWallet.deposit({ - token: isETHBasedChain ? zksync.utils.LEGACY_ETH_ADDRESS : baseTokenAddress, + token: isETHBasedChain ? zksync.utils.LEGACY_ETH_ADDRESS : baseToken, amount: depositAmount, to: alice.address, approveBaseERC20: true, @@ -295,7 +388,7 @@ describe('Block reverting test', function () { } const secondDepositHandle = await extNode.tester.syncWallet.deposit({ - token: isETHBasedChain ? zksync.utils.LEGACY_ETH_ADDRESS : baseTokenAddress, + token: isETHBasedChain ? zksync.utils.LEGACY_ETH_ADDRESS : baseToken, amount: depositAmount, to: alice.address, approveBaseERC20: true, @@ -306,31 +399,31 @@ describe('Block reverting test', function () { await utils.sleep(0.3); } + const alice2 = await alice.getBalance(); while (true) { const lastCommitted = await main_contract.getTotalBatchesCommitted(); console.log(`lastExecuted = ${lastExecuted}, lastCommitted = ${lastCommitted}`); if (lastCommitted - lastExecuted >= 2n) { + console.log('Terminate the main node'); + await killServerAndWaitForShutdown(mainNode.tester, 'zksync_server'); break; } await utils.sleep(0.3); } - const alice2 = await alice.getBalance(); - console.log('Terminate the main node'); - await killServerAndWaitForShutdown(mainNode.tester, 'zksync_server'); console.log('Ask block_reverter to suggest to which L1 batch we should revert'); - const values_json = runBlockReverter([ + const values_json = await runBlockReverter([ 'print-suggested-values', '--json', '--operator-address', - '0xde03a0B5963f75f1C8485B355fF6D30f3093BDE7' + operatorAddress ]); console.log(`values = ${values_json}`); const values = parseSuggestedValues(values_json); assert(lastExecuted === values.lastExecutedL1BatchNumber); console.log('Send reverting transaction to L1'); - runBlockReverter([ + await runBlockReverter([ 'send-eth-transaction', '--l1-batch-number', values.lastExecutedL1BatchNumber.toString(), @@ -346,7 +439,7 @@ describe('Block reverting test', function () { assert(lastCommitted2 === lastExecuted); console.log('Rollback db'); - runBlockReverter([ + await runBlockReverter([ 'rollback-db', '--l1-batch-number', values.lastExecutedL1BatchNumber.toString(), @@ -356,17 +449,24 @@ describe('Block reverting test', function () { ]); console.log('Start main node.'); - mainNode = await MainNode.spawn(mainLogs, enableConsensus, true); + mainNode = await MainNode.spawn( + mainLogs, + enableConsensus, + true, + ethClientWeb3Url, + apiWeb3JsonRpcHttpUrl, + baseTokenAddress + ); console.log('Wait for the external node to detect reorg and terminate'); await extNode.waitForExit(); console.log('Restart external node and wait for it to revert.'); - extNode = await ExtNode.spawn(extLogs, enableConsensus); + extNode = await ExtNode.spawn(extLogs, enableConsensus, ethClientWeb3Url, enEthClientUrl, baseTokenAddress); console.log('Execute an L1 transaction'); const depositHandle = await extNode.tester.syncWallet.deposit({ - token: isETHBasedChain ? zksync.utils.LEGACY_ETH_ADDRESS : baseTokenAddress, + token: isETHBasedChain ? zksync.utils.LEGACY_ETH_ADDRESS : baseToken, amount: depositAmount, to: alice.address, approveBaseERC20: true, @@ -407,9 +507,13 @@ describe('Block reverting test', function () { await checkedRandomTransfer(alice, 1n); }); - after('Terminate nodes', async () => { + after('terminate nodes', async () => { await MainNode.terminateAll(); await ExtNode.terminateAll(); + + if (fileConfig.loadFromFile) { + replaceAggregatedBlockExecuteDeadline(pathToHome, fileConfig, 10); + } }); }); diff --git a/core/tests/revert-test/tests/utils.ts b/core/tests/revert-test/tests/utils.ts new file mode 100644 index 000000000000..4bf38387cccf --- /dev/null +++ b/core/tests/revert-test/tests/utils.ts @@ -0,0 +1,81 @@ +import { exec as _exec, spawn as _spawn, ChildProcessWithoutNullStreams, type ProcessEnvOptions } from 'child_process'; +import { promisify } from 'util'; + +// executes a command in background and returns a child process handle +// by default pipes data to parent's stdio but this can be overridden +export function background({ + command, + stdio = 'inherit', + cwd, + env +}: { + command: string; + stdio: any; + cwd?: ProcessEnvOptions['cwd']; + env?: ProcessEnvOptions['env']; +}): ChildProcessWithoutNullStreams { + command = command.replace(/\n/g, ' '); + return _spawn(command, { stdio: stdio, shell: true, detached: true, cwd, env }); +} + +export function runInBackground({ + command, + components, + stdio, + cwd, + env +}: { + command: string; + components?: string[]; + stdio: any; + cwd?: Parameters[0]['cwd']; + env?: Parameters[0]['env']; +}): ChildProcessWithoutNullStreams { + if (components && components.length > 0) { + command += ` --components=${components.join(',')}`; + } + return background({ command, stdio, cwd, env }); +} + +export function runServerInBackground({ + components, + stdio, + cwd, + env, + useZkInception +}: { + components?: string[]; + stdio: any; + cwd?: Parameters[0]['cwd']; + env?: Parameters[0]['env']; + useZkInception?: boolean; +}): ChildProcessWithoutNullStreams { + let command = useZkInception ? 'zk_inception server' : 'zk server'; + return runInBackground({ command, components, stdio, cwd, env }); +} + +export function runExternalNodeInBackground({ + components, + stdio, + cwd, + env, + useZkInception +}: { + components?: string[]; + stdio: any; + cwd?: Parameters[0]['cwd']; + env?: Parameters[0]['env']; + useZkInception?: boolean; +}): ChildProcessWithoutNullStreams { + let command = useZkInception ? 'zk_inception external-node run' : 'zk external-node'; + return runInBackground({ command, components, stdio, cwd, env }); +} + +// async executor of shell commands +// spawns a new shell and can execute arbitrary commands, like "ls -la | grep .env" +// returns { stdout, stderr } +const promisified = promisify(_exec); +export function exec(command: string, options: ProcessEnvOptions) { + command = command.replace(/\n/g, ' '); + return promisified(command, options); +} diff --git a/etc/utils/src/file-configs.ts b/etc/utils/src/file-configs.ts index 16b89f8f3c97..1675745bca5d 100644 --- a/etc/utils/src/file-configs.ts +++ b/etc/utils/src/file-configs.ts @@ -16,7 +16,14 @@ export function shouldLoadConfigFromFile() { } } -export const configNames = ['contracts.yaml', 'general.yaml', 'genesis.yaml', 'secrets.yaml', 'wallets.yaml'] as const; +export const configNames = [ + 'contracts.yaml', + 'general.yaml', + 'genesis.yaml', + 'secrets.yaml', + 'wallets.yaml', + 'external_node.yaml' +] as const; export type ConfigName = (typeof configNames)[number]; @@ -114,3 +121,12 @@ export function getConfigsFolderPath({ }) { return path.join(pathToHome, 'chains', chain, configsFolder ?? 'configs', configsFolderSuffix ?? ''); } + +export function replaceAggregatedBlockExecuteDeadline(pathToHome: string, fileConfig: any, value: number) { + const generalConfigPath = getConfigPath({ pathToHome, chain: fileConfig.chain, config: 'general.yaml' }); + const generalConfig = fs.readFileSync(generalConfigPath, 'utf8'); + const regex = /aggregated_block_execute_deadline:\s*\d+/g; + const newGeneralConfig = generalConfig.replace(regex, `aggregated_block_execute_deadline: ${value}`); + + fs.writeFileSync(generalConfigPath, newGeneralConfig, 'utf8'); +} diff --git a/zk_toolbox/crates/zk_supervisor/src/commands/test/args/revert.rs b/zk_toolbox/crates/zk_supervisor/src/commands/test/args/revert.rs index dc78282fd0d6..e4305b6796c2 100644 --- a/zk_toolbox/crates/zk_supervisor/src/commands/test/args/revert.rs +++ b/zk_toolbox/crates/zk_supervisor/src/commands/test/args/revert.rs @@ -1,9 +1,11 @@ use clap::Parser; -use crate::messages::MSG_REVERT_TEST_ENABLE_CONSENSUS_HELP; +use crate::messages::{MSG_REVERT_TEST_ENABLE_CONSENSUS_HELP, MSG_TESTS_EXTERNAL_NODE_HELP}; #[derive(Debug, Parser)] pub struct RevertArgs { #[clap(long, help = MSG_REVERT_TEST_ENABLE_CONSENSUS_HELP)] pub enable_consensus: bool, + #[clap(short, long, help = MSG_TESTS_EXTERNAL_NODE_HELP)] + pub external_node: bool, } diff --git a/zk_toolbox/crates/zk_supervisor/src/commands/test/revert.rs b/zk_toolbox/crates/zk_supervisor/src/commands/test/revert.rs index 71de1a2027a3..eead83303eed 100644 --- a/zk_toolbox/crates/zk_supervisor/src/commands/test/revert.rs +++ b/zk_toolbox/crates/zk_supervisor/src/commands/test/revert.rs @@ -1,9 +1,12 @@ -use common::{cmd::Cmd, logger, server::Server, spinner::Spinner}; +use common::{cmd::Cmd, logger, spinner::Spinner}; use config::EcosystemConfig; use xshell::{cmd, Shell}; use super::args::revert::RevertArgs; -use crate::messages::{MSG_REVERT_TEST_RUN_INFO, MSG_REVERT_TEST_RUN_SUCCESS}; +use crate::messages::{ + msg_revert_tests_run, MSG_REVERT_TEST_INSTALLING_DEPENDENCIES, MSG_REVERT_TEST_RUN_INFO, + MSG_REVERT_TEST_RUN_SUCCESS, +}; const REVERT_TESTS_PATH: &str = "core/tests/revert-test"; @@ -12,7 +15,6 @@ pub fn run(shell: &Shell, args: RevertArgs) -> anyhow::Result<()> { shell.change_dir(ecosystem_config.link_to_code.join(REVERT_TESTS_PATH)); logger::info(MSG_REVERT_TEST_RUN_INFO); - Server::new(None, ecosystem_config.link_to_code.clone()).build(shell)?; install_and_build_dependencies(shell, &ecosystem_config)?; run_test(shell, &args, &ecosystem_config)?; logger::outro(MSG_REVERT_TEST_RUN_SUCCESS); @@ -25,9 +27,10 @@ fn install_and_build_dependencies( ecosystem_config: &EcosystemConfig, ) -> anyhow::Result<()> { let _dir_guard = shell.push_dir(&ecosystem_config.link_to_code); - let spinner = Spinner::new("Installing and building dependencies..."); + let spinner = Spinner::new(MSG_REVERT_TEST_INSTALLING_DEPENDENCIES); Cmd::new(cmd!(shell, "yarn install")).run()?; Cmd::new(cmd!(shell, "yarn utils build")).run()?; + spinner.finish(); Ok(()) } @@ -37,10 +40,15 @@ fn run_test( args: &RevertArgs, ecosystem_config: &EcosystemConfig, ) -> anyhow::Result<()> { - Spinner::new("Running test...").freeze(); + Spinner::new(&msg_revert_tests_run(args.external_node)).freeze(); + + let cmd = if args.external_node { + cmd!(shell, "yarn mocha tests/revert-and-restart-en.test.ts") + } else { + cmd!(shell, "yarn mocha tests/revert-and-restart.test.ts") + }; - let mut cmd = Cmd::new(cmd!(shell, "yarn mocha tests/revert-and-restart.test.ts")) - .env("CHAIN_NAME", &ecosystem_config.default_chain); + let mut cmd = Cmd::new(cmd).env("CHAIN_NAME", &ecosystem_config.default_chain); if args.enable_consensus { cmd = cmd.env("ENABLE_CONSENSUS", "true"); } diff --git a/zk_toolbox/crates/zk_supervisor/src/messages.rs b/zk_toolbox/crates/zk_supervisor/src/messages.rs index 3275523ed96e..863f1c4b1aef 100644 --- a/zk_toolbox/crates/zk_supervisor/src/messages.rs +++ b/zk_toolbox/crates/zk_supervisor/src/messages.rs @@ -93,7 +93,18 @@ pub(super) const MSG_INTEGRATION_TESTS_BUILDING_CONTRACTS: &str = "Building test // Revert tests related messages pub(super) const MSG_REVERT_TEST_ENABLE_CONSENSUS_HELP: &str = "Enable consensus"; +pub(super) const MSG_REVERT_TEST_INSTALLING_DEPENDENCIES: &str = + "Building and installing dependencies. This process may take a lot of time..."; pub(super) const MSG_REVERT_TEST_RUN_INFO: &str = "Running revert and restart test"; +pub(super) fn msg_revert_tests_run(external_node: bool) -> String { + let base = "Running integration tests"; + if external_node { + format!("{} for external node", base) + } else { + format!("{} for main server", base) + } +} + pub(super) const MSG_REVERT_TEST_RUN_SUCCESS: &str = "Revert and restart test ran successfully"; // Cleaning related messages From 6ce0b50b04a89e71e63355e080cc4c6b0c692234 Mon Sep 17 00:00:00 2001 From: Yury Akudovich Date: Tue, 23 Jul 2024 15:36:46 +0200 Subject: [PATCH 14/52] ci: Fix prover-fri-gpu-gar build (#2465) --- .github/workflows/build-prover-fri-gpu-gar.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build-prover-fri-gpu-gar.yml b/.github/workflows/build-prover-fri-gpu-gar.yml index 7805f7ba565b..9740cafd9678 100644 --- a/.github/workflows/build-prover-fri-gpu-gar.yml +++ b/.github/workflows/build-prover-fri-gpu-gar.yml @@ -44,7 +44,7 @@ jobs: with: context: docker/prover-gpu-fri-gar build-args: | - PROVER_IMAGE=${{ inputs.image_tag_suffix }} + PROVER_IMAGE=${{ inputs.protocol_version }}-${{ inputs.image_tag_suffix }} push: true tags: | us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/prover-fri-gpu-gar:2.0-${{ inputs.protocol_version }}-${{ inputs.image_tag_suffix }} From d8851c8af2cd4b595f4edb9c36c81e2310835a77 Mon Sep 17 00:00:00 2001 From: EmilLuta Date: Tue, 23 Jul 2024 19:06:38 +0200 Subject: [PATCH 15/52] fix(prover): BWG optimizations (#2469) This PR adds BWG optimizations on crypto side. See more [here](https://github.com/matter-labs/era-zkevm_test_harness/pull/165). --- Cargo.lock | 20 ++-- Cargo.toml | 4 +- prover/Cargo.lock | 95 ++++++------------- prover/Cargo.toml | 8 +- .../witness_generator/src/basic_circuits.rs | 4 +- 5 files changed, 48 insertions(+), 83 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index f3605beb7918..7892e3a2e909 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1099,9 +1099,9 @@ dependencies = [ [[package]] name = "circuit_encodings" -version = "0.150.1" +version = "0.150.2-rc.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21ac98cee014780619ca5fe43984e605b17bcad9308b15cebd2fec549a2d8c92" +checksum = "d4b69893ec5a2112430adaf8e29b52ea9ec4ef2d6663879f7cc279b4479a8880" dependencies = [ "derivative", "serde", @@ -1166,12 +1166,12 @@ dependencies = [ [[package]] name = "circuit_sequencer_api" -version = "0.150.1" +version = "0.150.2-rc.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29bf447d83547c14e728239e7e3287e2f47b4891675315c7c69d9ee3ce56b0a8" +checksum = "121470724079938b8f878e8a95f757d814624795c9a5ca69dd9dd782035fbe39" dependencies = [ "bellman_ce", - "circuit_encodings 0.150.1", + "circuit_encodings 0.150.2-rc.1", "derivative", "rayon", "serde", @@ -8081,7 +8081,7 @@ dependencies = [ "anyhow", "circuit_sequencer_api 0.140.0", "circuit_sequencer_api 0.141.0", - "circuit_sequencer_api 0.150.1", + "circuit_sequencer_api 0.150.2-rc.1", "futures 0.3.28", "itertools 0.10.5", "num_cpus", @@ -8729,9 +8729,9 @@ dependencies = [ [[package]] name = "zksync_kzg" -version = "0.150.1" +version = "0.150.2-rc.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5af1838466ae06e56064fafa8b4563c3bde44b44839de0b6197c293e03d133fc" +checksum = "4672556b6bc06da9dcd38a607e139b8eb3083edfaabcd12981e8a62051ee1f81" dependencies = [ "boojum", "derivative", @@ -8853,7 +8853,7 @@ dependencies = [ "circuit_sequencer_api 0.140.0", "circuit_sequencer_api 0.141.0", "circuit_sequencer_api 0.142.0", - "circuit_sequencer_api 0.150.1", + "circuit_sequencer_api 0.150.2-rc.1", "ethabi", "hex", "itertools 0.10.5", @@ -9292,7 +9292,7 @@ version = "0.1.0" dependencies = [ "bincode", "chrono", - "circuit_sequencer_api 0.150.1", + "circuit_sequencer_api 0.150.2-rc.1", "serde", "serde_json", "serde_with", diff --git a/Cargo.toml b/Cargo.toml index 0ce4be5c8431..b0f98f33e3df 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -198,9 +198,9 @@ circuit_sequencer_api_1_3_3 = { package = "circuit_sequencer_api", version = "=0 circuit_sequencer_api_1_4_0 = { package = "circuit_sequencer_api", version = "=0.140.0" } circuit_sequencer_api_1_4_1 = { package = "circuit_sequencer_api", version = "=0.141.0" } circuit_sequencer_api_1_4_2 = { package = "circuit_sequencer_api", version = "=0.142.0" } -circuit_sequencer_api_1_5_0 = { package = "circuit_sequencer_api", version = "=0.150.1" } +circuit_sequencer_api_1_5_0 = { package = "circuit_sequencer_api", version = "=0.150.2-rc.1" } crypto_codegen = { package = "zksync_solidity_vk_codegen", version = "=0.1.0" } -kzg = { package = "zksync_kzg", version = "=0.150.1" } +kzg = { package = "zksync_kzg", version = "=0.150.2-rc.1" } zk_evm = { version = "=0.133.0" } zk_evm_1_3_1 = { package = "zk_evm", version = "0.131.0-rc.2" } zk_evm_1_3_3 = { package = "zk_evm", version = "0.133.0" } diff --git a/prover/Cargo.lock b/prover/Cargo.lock index a7df00e50da3..376b464babe1 100644 --- a/prover/Cargo.lock +++ b/prover/Cargo.lock @@ -933,11 +933,11 @@ dependencies = [ [[package]] name = "circuit_definitions" -version = "0.150.1" +version = "0.150.2-rc.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38fac8ca08a18d51568d4dd0a8fc51b9c17625020eaf808cacbcdd03be8445c3" +checksum = "45eda61fb4b476ceac2dad7aaf85ba4ed02fb834598dd7aafacebe405f2af612" dependencies = [ - "circuit_encodings 0.150.1", + "circuit_encodings 0.150.2-rc.1", "crossbeam 0.8.4", "derivative", "seq-macro", @@ -983,9 +983,9 @@ dependencies = [ [[package]] name = "circuit_encodings" -version = "0.150.1" +version = "0.150.2-rc.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21ac98cee014780619ca5fe43984e605b17bcad9308b15cebd2fec549a2d8c92" +checksum = "d4b69893ec5a2112430adaf8e29b52ea9ec4ef2d6663879f7cc279b4479a8880" dependencies = [ "derivative", "serde", @@ -1050,12 +1050,12 @@ dependencies = [ [[package]] name = "circuit_sequencer_api" -version = "0.150.1" +version = "0.150.2-rc.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29bf447d83547c14e728239e7e3287e2f47b4891675315c7c69d9ee3ce56b0a8" +checksum = "121470724079938b8f878e8a95f757d814624795c9a5ca69dd9dd782035fbe39" dependencies = [ "bellman_ce 0.7.0", - "circuit_encodings 0.150.1", + "circuit_encodings 0.150.2-rc.1", "derivative", "rayon", "serde", @@ -1486,36 +1486,6 @@ dependencies = [ "windows-sys 0.52.0", ] -[[package]] -name = "curl" -version = "0.4.46" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e2161dd6eba090ff1594084e95fd67aeccf04382ffea77999ea94ed42ec67b6" -dependencies = [ - "curl-sys", - "libc", - "openssl-probe", - "openssl-sys", - "schannel", - "socket2", - "windows-sys 0.52.0", -] - -[[package]] -name = "curl-sys" -version = "0.4.72+curl-8.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29cbdc8314c447d11e8fd156dcdd031d9e02a7a976163e396b548c03153bc9ea" -dependencies = [ - "cc", - "libc", - "libz-sys", - "openssl-sys", - "pkg-config", - "vcpkg", - "windows-sys 0.52.0", -] - [[package]] name = "curve25519-dalek" version = "4.1.3" @@ -3333,7 +3303,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c15da26e5af7e25c90b37a2d75cdbf940cf4a55316de9d84c679c9b8bfabf82e" dependencies = [ "cc", - "libc", "pkg-config", "vcpkg", ] @@ -4537,7 +4506,7 @@ dependencies = [ "anyhow", "bincode", "chrono", - "circuit_definitions 0.150.1", + "circuit_definitions 0.150.2-rc.1", "clap 4.5.4", "colored", "dialoguer", @@ -4548,7 +4517,7 @@ dependencies = [ "tokio", "tracing", "tracing-subscriber", - "zkevm_test_harness 0.150.1", + "zkevm_test_harness 0.150.2-rc.1", "zksync_basic_types", "zksync_config", "zksync_contracts", @@ -5635,15 +5604,15 @@ checksum = "24188a676b6ae68c3b2cb3a01be17fbf7240ce009799bb56d5b1409051e78fde" [[package]] name = "shivini" -version = "0.150.1" +version = "0.150.2-rc.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5bf225052e092432c31c6c574eb16299b6e734476c9c40ac84be44bdda52aa3c" +checksum = "b2e391df42e8e145b12d7c446acd0de300ccc964ee941f5b9013ec970811f70f" dependencies = [ "bincode", "blake2 0.10.6", "boojum", "boojum-cuda", - "circuit_definitions 0.150.1", + "circuit_definitions 0.150.2-rc.1", "derivative", "era_cudart", "era_cudart_sys", @@ -6920,7 +6889,7 @@ version = "0.1.0" dependencies = [ "anyhow", "bincode", - "circuit_definitions 0.150.1", + "circuit_definitions 0.150.2-rc.1", "clap 4.5.4", "hex", "indicatif", @@ -6937,7 +6906,7 @@ dependencies = [ "toml_edit 0.14.4", "tracing", "tracing-subscriber", - "zkevm_test_harness 0.150.1", + "zkevm_test_harness 0.150.2-rc.1", "zksync_config", "zksync_env_config", "zksync_prover_fri_types", @@ -7672,31 +7641,27 @@ dependencies = [ [[package]] name = "zkevm_test_harness" -version = "0.150.1" +version = "0.150.2-rc.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b622fd80164f1d8f9628550c6adf675e51d1e3a859b3762c25e16a40ff5a6d8b" +checksum = "0fdbf14a5793a23aec1b315680b152413a477c8243b7c23a9acf743471b313e4" dependencies = [ "bincode", - "circuit_definitions 0.150.1", - "circuit_sequencer_api 0.150.1", + "circuit_definitions 0.150.2-rc.1", + "circuit_sequencer_api 0.150.2-rc.1", "codegen", "crossbeam 0.8.4", - "curl", "derivative", "env_logger 0.9.3", "hex", - "lazy_static", "rand 0.4.6", "rayon", "regex", - "reqwest 0.11.27", "serde", "serde_json", "smallvec", "structopt", "test-log", "tracing", - "walkdir", "zkevm-assembly 0.150.0", "zksync_kzg", ] @@ -8021,9 +7986,9 @@ dependencies = [ [[package]] name = "zksync_kzg" -version = "0.150.1" +version = "0.150.2-rc.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5af1838466ae06e56064fafa8b4563c3bde44b44839de0b6197c293e03d133fc" +checksum = "4672556b6bc06da9dcd38a607e139b8eb3083edfaabcd12981e8a62051ee1f81" dependencies = [ "boojum", "derivative", @@ -8073,7 +8038,7 @@ dependencies = [ "circuit_sequencer_api 0.140.0", "circuit_sequencer_api 0.141.0", "circuit_sequencer_api 0.142.0", - "circuit_sequencer_api 0.150.1", + "circuit_sequencer_api 0.150.2-rc.1", "hex", "itertools 0.10.5", "once_cell", @@ -8144,7 +8109,7 @@ dependencies = [ "anyhow", "async-trait", "bincode", - "circuit_sequencer_api 0.150.1", + "circuit_sequencer_api 0.150.2-rc.1", "clap 4.5.4", "ctrlc", "futures 0.3.30", @@ -8156,7 +8121,7 @@ dependencies = [ "tracing", "vise", "vk_setup_data_generator_server_fri", - "zkevm_test_harness 0.150.1", + "zkevm_test_harness 0.150.2-rc.1", "zksync-wrapper-prover", "zksync_core_leftovers", "zksync_env_config", @@ -8242,7 +8207,7 @@ version = "0.1.0" dependencies = [ "anyhow", "async-trait", - "circuit_definitions 0.150.1", + "circuit_definitions 0.150.2-rc.1", "clap 4.5.4", "ctrlc", "futures 0.3.30", @@ -8255,7 +8220,7 @@ dependencies = [ "tracing", "vise", "vk_setup_data_generator_server_fri", - "zkevm_test_harness 0.150.1", + "zkevm_test_harness 0.150.2-rc.1", "zksync_config", "zksync_core_leftovers", "zksync_env_config", @@ -8299,7 +8264,7 @@ dependencies = [ name = "zksync_prover_fri_types" version = "0.1.0" dependencies = [ - "circuit_definitions 0.150.1", + "circuit_definitions 0.150.2-rc.1", "serde", "zksync_object_store", "zksync_types", @@ -8328,7 +8293,7 @@ name = "zksync_prover_interface" version = "0.1.0" dependencies = [ "chrono", - "circuit_sequencer_api 0.150.1", + "circuit_sequencer_api 0.150.2-rc.1", "serde", "serde_with", "strum", @@ -8506,7 +8471,7 @@ dependencies = [ "anyhow", "async-trait", "bincode", - "circuit_definitions 0.150.1", + "circuit_definitions 0.150.2-rc.1", "const-decoder", "ctrlc", "futures 0.3.30", @@ -8520,7 +8485,7 @@ dependencies = [ "tracing", "vise", "vk_setup_data_generator_server_fri", - "zkevm_test_harness 0.150.1", + "zkevm_test_harness 0.150.2-rc.1", "zksync_config", "zksync_core_leftovers", "zksync_env_config", diff --git a/prover/Cargo.toml b/prover/Cargo.toml index ffb034059c8a..c06c0774639a 100644 --- a/prover/Cargo.toml +++ b/prover/Cargo.toml @@ -56,13 +56,13 @@ tracing-subscriber = { version = "0.3" } vise = "0.1.0" # Proving dependencies -circuit_definitions = "=0.150.1" -circuit_sequencer_api = "=0.150.1" -zkevm_test_harness = "=0.150.1" +circuit_definitions = "=0.150.2-rc.1" +circuit_sequencer_api = "=0.150.2-rc.1" +zkevm_test_harness = "=0.150.2-rc.1" # GPU proving dependencies wrapper_prover = { package = "zksync-wrapper-prover", version = "=0.140.0-gpu-wrapper.0" } -shivini = "=0.150.1" +shivini = "=0.150.2-rc.1" # Core workspace dependencies zksync_multivm = { path = "../core/lib/multivm", version = "0.1.0" } diff --git a/prover/crates/bin/witness_generator/src/basic_circuits.rs b/prover/crates/bin/witness_generator/src/basic_circuits.rs index c17458ab4338..859b8515805a 100644 --- a/prover/crates/bin/witness_generator/src/basic_circuits.rs +++ b/prover/crates/bin/witness_generator/src/basic_circuits.rs @@ -381,7 +381,7 @@ async fn generate_witness( input.vm_run_data.protocol_version, ); - let mut tree = PrecalculatedMerklePathsProvider::new( + let tree = PrecalculatedMerklePathsProvider::new( input.merkle_paths, input.previous_batch_metadata.root_hash.0, ); @@ -428,7 +428,7 @@ async fn generate_witness( MAX_CYCLES_FOR_TX as usize, geometry_config, storage_oracle, - &mut tree, + tree, path, input.eip_4844_blobs.blobs(), |circuit| { From 986141562646c4d96dca205593e48e4d8df46fba Mon Sep 17 00:00:00 2001 From: pompon0 Date: Tue, 23 Jul 2024 21:48:13 +0200 Subject: [PATCH 16/52] feat: added key generation command to EN (#2461) it will be used by partners running ENs to populate consensus secrets config. Also drafted the documentation on how to enable gossipnet on EN deployment. Following the instructions using a docker image won't be that simple though. --------- Co-authored-by: Denis Kolegov --- Cargo.lock | 1 + core/bin/external_node/Cargo.toml | 1 + core/bin/external_node/src/config/mod.rs | 17 ++++ core/bin/external_node/src/main.rs | 34 +++++++- core/node/consensus/src/en.rs | 4 + .../external-node/09_decentralization.md | 86 +++++++++++++++++++ .../prepared_configs/mainnet-config.env | 3 + .../mainnet_consensus_config.yaml | 10 +++ .../testnet-sepolia-config.env | 3 + .../testnet_consensus_config.yaml | 10 +++ 10 files changed, 167 insertions(+), 2 deletions(-) create mode 100644 docs/guides/external-node/09_decentralization.md create mode 100644 docs/guides/external-node/prepared_configs/mainnet_consensus_config.yaml create mode 100644 docs/guides/external-node/prepared_configs/testnet_consensus_config.yaml diff --git a/Cargo.lock b/Cargo.lock index 7892e3a2e909..40615537255f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8646,6 +8646,7 @@ dependencies = [ "zksync_commitment_generator", "zksync_concurrency", "zksync_config", + "zksync_consensus_crypto", "zksync_consensus_roles", "zksync_consistency_checker", "zksync_contracts", diff --git a/core/bin/external_node/Cargo.toml b/core/bin/external_node/Cargo.toml index 84c0ddd16e09..63389175912f 100644 --- a/core/bin/external_node/Cargo.toml +++ b/core/bin/external_node/Cargo.toml @@ -47,6 +47,7 @@ zksync_vlog.workspace = true zksync_concurrency.workspace = true zksync_consensus_roles.workspace = true +zksync_consensus_crypto.workspace = true vise.workspace = true async-trait.workspace = true diff --git a/core/bin/external_node/src/config/mod.rs b/core/bin/external_node/src/config/mod.rs index 9c4e9657084f..120df5f139fa 100644 --- a/core/bin/external_node/src/config/mod.rs +++ b/core/bin/external_node/src/config/mod.rs @@ -17,6 +17,8 @@ use zksync_config::{ }, ObjectStoreConfig, }; +use zksync_consensus_crypto::TextFmt; +use zksync_consensus_roles as roles; use zksync_core_leftovers::temp_config_store::{decode_yaml_repr, read_yaml_repr}; #[cfg(test)] use zksync_dal::{ConnectionPool, Core}; @@ -1126,6 +1128,21 @@ impl ExperimentalENConfig { } } +/// Generates all possible consensus secrets (from system entropy) +/// and prints them to stdout. +/// They should be copied over to the secrets.yaml/consensus_secrets.yaml file. +pub fn generate_consensus_secrets() { + let validator_key = roles::validator::SecretKey::generate(); + let attester_key = roles::attester::SecretKey::generate(); + let node_key = roles::node::SecretKey::generate(); + println!("# {}", validator_key.public().encode()); + println!("- validator_key: {}", validator_key.encode()); + println!("# {}", attester_key.public().encode()); + println!("- attester_key: {}", attester_key.encode()); + println!("# {}", node_key.public().encode()); + println!("- node_key: {}", node_key.encode()); +} + pub(crate) fn read_consensus_secrets() -> anyhow::Result> { let Ok(path) = env::var("EN_CONSENSUS_SECRETS_PATH") else { return Ok(None); diff --git a/core/bin/external_node/src/main.rs b/core/bin/external_node/src/main.rs index 55b2133250ac..f6696d733482 100644 --- a/core/bin/external_node/src/main.rs +++ b/core/bin/external_node/src/main.rs @@ -54,7 +54,7 @@ use zksync_web3_decl::{ }; use crate::{ - config::ExternalNodeConfig, + config::{generate_consensus_secrets, ExternalNodeConfig}, init::{ensure_storage_initialized, SnapshotRecoveryConfig}, }; @@ -695,10 +695,20 @@ async fn shutdown_components( Ok(()) } +#[derive(Debug, Clone, clap::Subcommand)] +enum Command { + /// Generates consensus secret keys to use in the secrets file. + /// Prints the keys to the stdout, you need to copy the relevant keys into your secrets file. + GenerateSecrets, +} + /// External node for ZKsync Era. #[derive(Debug, Parser)] #[command(author = "Matter Labs", version)] struct Cli { + #[command(subcommand)] + command: Option, + /// Enables consensus-based syncing instead of JSON-RPC based one. This is an experimental and incomplete feature; /// do not use unless you know what you're doing. #[arg(long)] @@ -720,7 +730,14 @@ struct Cli { /// Path to the yaml with external node specific configuration. If set, it will be used instead of env vars. #[arg(long, requires = "config_path", requires = "secrets_path")] external_node_config_path: Option, - /// Path to the yaml with consensus. + /// Path to the yaml with consensus config. If set, it will be used instead of env vars. + #[arg( + long, + requires = "config_path", + requires = "secrets_path", + requires = "external_node_config_path", + requires = "enable_consensus" + )] consensus_path: Option, } @@ -778,9 +795,22 @@ async fn main() -> anyhow::Result<()> { // Initial setup. let opt = Cli::parse(); + if let Some(cmd) = &opt.command { + match cmd { + Command::GenerateSecrets => generate_consensus_secrets(), + } + return Ok(()); + } + let mut config = if let Some(config_path) = opt.config_path.clone() { let secrets_path = opt.secrets_path.clone().unwrap(); let external_node_config_path = opt.external_node_config_path.clone().unwrap(); + if opt.enable_consensus { + anyhow::ensure!( + opt.consensus_path.is_some(), + "if --config-path and --enable-consensus are specified, then --consensus-path should be used to specify the location of the consensus config" + ); + } ExternalNodeConfig::from_files( config_path, external_node_config_path, diff --git a/core/node/consensus/src/en.rs b/core/node/consensus/src/en.rs index e2e1ce480dfb..66bdc822c058 100644 --- a/core/node/consensus/src/en.rs +++ b/core/node/consensus/src/en.rs @@ -129,6 +129,10 @@ impl EN { ctx: &ctx::Ctx, actions: ActionQueueSender, ) -> anyhow::Result<()> { + tracing::warn!("\ + WARNING: this node is using ZKsync API synchronization, which will be deprecated soon. \ + Please follow this instruction to switch to p2p synchronization: \ + https://github.com/matter-labs/zksync-era/blob/main/docs/guides/external-node/09_decentralization.md"); let res: ctx::Result<()> = scope::run!(ctx, |ctx, s| async { // Update sync state in the background. s.spawn_bg(self.fetch_state_loop(ctx)); diff --git a/docs/guides/external-node/09_decentralization.md b/docs/guides/external-node/09_decentralization.md new file mode 100644 index 000000000000..37cd4c502ef1 --- /dev/null +++ b/docs/guides/external-node/09_decentralization.md @@ -0,0 +1,86 @@ +# Decentralization + +In the default setup the ZKsync node will fetch data from the ZKsync API endpoint maintained by Matter Labs. To reduce +the reliance on this centralized endpoint we have developed a decentralized p2p networking stack (aka gossipnet) which +will eventually be used instead of ZKsync API for synchronizing data. + +On the gossipnet, the data integrity will be protected by the BFT (byzantine fault tolerant) consensus algorithm +(currently data is signed just by the main node though). + +## Enabling gossipnet on your node + +> [!NOTE] +> +> Because the data transmitted over the gossipnet is signed by the main node (and eventually by the consensus quorum), +> the signatures need to be backfilled to the node's local storage the first time you switch from centralized (ZKsync +> API based) synchronization to the decentralized (gossipnet based) synchronization (this is a one-time thing). With the +> current implementation it may take a couple of hours and gets faster the more nodes you add to the +> `gossip_static_outbound` list (see below). We are working to remove this inconvenience. + +### Generating secrets + +Each participant node of the gossipnet has to have an identity (a public/secret key pair). When running your node for +the first time, generate the secrets by running: + +``` +cargo run -p zksync_external_node -- generate-secrets > consensus_secrets.yaml +chmod 600 consensus_secrets.yaml +``` + +> [!NOTE] +> +> NEVER reveal the secret keys used by your node. Otherwise someone can impersonate your node on the gossipnet. If you +> suspect that your secret key has been leaked, you can generate fresh keys using the same tool. +> +> If you want someone else to connect to your node, give them your PUBLIC key instead. Both public and secret keys are +> present in the `consensus_secrets.yaml` (public keys are in comments). + +### Preparing configuration file + +Copy the template of the consensus configuration file (for +[mainnet](https://github.com/matter-labs/zksync-era/blob/main/docs/guides/external-node/prepared_configs/mainnet_consensus_config.yaml) +or +[testnet](https://github.com/matter-labs/zksync-era/blob/main/docs/guides/external-node/prepared_configs/testnet_consensus_config.yaml) +). + +> [!NOTE] +> +> You need to fill in the `public_addr` field. This is the address that will (not implemented yet) be advertised over +> gossipnet to other nodes, so that they can establish connections to your node. If you don't want to expose your node +> to the public internet, you can use IP in your local network. + +Currently the config contains the following fields (refer to config +[schema](https://github.com/matter-labs/zksync-era/blob/990676c5f84afd2ff8cd337f495c82e8d1f305a4/core/lib/protobuf_config/src/proto/core/consensus.proto#L66) +for more details): + +- `server_addr` - local TCP socket address that the node should listen on for incoming connections. Note that this is an + additional TCP port that will be opened by the node. +- `public_addr` - the public address of your node that will be advertised over the gossipnet. +- `max_payload_size` - limit (in bytes) on the sized of the ZKsync ERA block received from the gossipnet. This protects + your node from getting DoS`ed by too large network messages. Use the value from the template. +- `gossip_dynamic_inbound_limit` - maximal number of unauthenticated concurrent inbound connections that can be + established to your node. This is a DDoS protection measure. +- `gossip_static_outbound` - list of trusted peers that your node should always try to connect to. The template contains + the nodes maintained by Matterlabs, but you can add more if you know any. Note that the list contains both the network + address AND the public key of the node - this prevents spoofing attacks. + +### Setting environment variables + +Uncomment (or add) the following lines in your `.env` config: + +``` +EN_CONSENSUS_CONFIG_PATH=... +EN_CONSENSUS_SECRETS_PATH=... +``` + +These variables should point to your consensus config and secrets files that we have just created. Tweak the paths to +the files if you have placed them differently. + +### Add `--enable-consensus` flag to your entry point + +For the consensus configuration to take effect you have to add `--enable-consensus` flag to the command line when +running the node: + +``` +cargo run -p zksync_external_node -- --enable-consensus +``` diff --git a/docs/guides/external-node/prepared_configs/mainnet-config.env b/docs/guides/external-node/prepared_configs/mainnet-config.env index 35278205b96f..bce812084665 100644 --- a/docs/guides/external-node/prepared_configs/mainnet-config.env +++ b/docs/guides/external-node/prepared_configs/mainnet-config.env @@ -70,6 +70,9 @@ RUST_LOG=zksync_core=debug,zksync_dal=info,zksync_eth_client=info,zksync_merkle_ RUST_BACKTRACE=full RUST_LIB_BACKTRACE=1 +# Settings related to gossip network, see `09_decentralization.md` +#EN_CONSENSUS_CONFIG_PATH=./mainnet_consensus_config.yaml +#EN_CONSENSUS_SECRETS_PATH=./consensus_secrets.yaml # ------------------------------------------------------------------------ # -------------- THE FOLLOWING VARIABLES DEPEND ON THE ENV --------------- diff --git a/docs/guides/external-node/prepared_configs/mainnet_consensus_config.yaml b/docs/guides/external-node/prepared_configs/mainnet_consensus_config.yaml new file mode 100644 index 000000000000..6d61ef3963eb --- /dev/null +++ b/docs/guides/external-node/prepared_configs/mainnet_consensus_config.yaml @@ -0,0 +1,10 @@ +server_addr: '0.0.0.0:3054' +public_addr: ':3054' +max_payload_size: 5000000 +gossip_dynamic_inbound_limit: 100 +gossip_static_outbound: + # preconfigured ENs owned by Matterlabs that you can connect to + - key: 'node:public:ed25519:68d29127ab03408bf5c838553b19c32bdb3aaaae9bf293e5e078c3a0d265822a' + addr: 'external-node-consensus-mainnet.zksync.dev:3054' + - key: 'node:public:ed25519:b521e1bb173d04bc83d46b859d1296378e94a40427a6beb9e7fdd17cbd934c11' + addr: 'external-node-moby-consensus-mainnet.zksync.dev:3054' diff --git a/docs/guides/external-node/prepared_configs/testnet-sepolia-config.env b/docs/guides/external-node/prepared_configs/testnet-sepolia-config.env index 98e2ee6bd510..182012e2850c 100644 --- a/docs/guides/external-node/prepared_configs/testnet-sepolia-config.env +++ b/docs/guides/external-node/prepared_configs/testnet-sepolia-config.env @@ -70,6 +70,9 @@ RUST_LOG=zksync_core=debug,zksync_dal=info,zksync_eth_client=info,zksync_merkle_ RUST_BACKTRACE=full RUST_LIB_BACKTRACE=1 +# Settings related to gossip network, see `09_decentralization.md` +#EN_CONSENSUS_CONFIG_PATH=./testnet_consensus_config.yaml +#EN_CONSENSUS_SECRETS_PATH=./consensus_secrets.yaml # ------------------------------------------------------------------------ # -------------- THE FOLLOWING VARIABLES DEPEND ON THE ENV --------------- diff --git a/docs/guides/external-node/prepared_configs/testnet_consensus_config.yaml b/docs/guides/external-node/prepared_configs/testnet_consensus_config.yaml new file mode 100644 index 000000000000..25461b5dfc45 --- /dev/null +++ b/docs/guides/external-node/prepared_configs/testnet_consensus_config.yaml @@ -0,0 +1,10 @@ +server_addr: '0.0.0.0:3054' +public_addr: ':3054' +max_payload_size: 5000000 +gossip_dynamic_inbound_limit: 100 +gossip_static_outbound: + # preconfigured ENs owned by Matterlabs that you can connect to + - key: 'node:public:ed25519:4a94067664e7b8d0927ab1443491dab71a1d0c63f861099e1852f2b6d0831c3e' + addr: 'external-node-consensus-sepolia.zksync.dev:3054' + - key: 'node:public:ed25519:cfbbebc74127099680584f07a051a2573e2dd7463abdd000d31aaa44a7985045' + addr: 'external-node-moby-consensus-sepolia.zksync.dev:3054' From 5eab94c76b8384ebd963a11418335ca09dc5a033 Mon Sep 17 00:00:00 2001 From: zksync-era-bot <147085853+zksync-era-bot@users.noreply.github.com> Date: Wed, 24 Jul 2024 09:59:46 +0300 Subject: [PATCH 17/52] chore(main): release core 24.11.0 (#2459) :robot: I have created a release *beep* *boop* --- ## [24.11.0](https://github.com/matter-labs/zksync-era/compare/core-v24.10.0...core-v24.11.0) (2024-07-23) ### Features * add revert tests (external node) to zk_toolbox ([#2408](https://github.com/matter-labs/zksync-era/issues/2408)) ([3fbbee1](https://github.com/matter-labs/zksync-era/commit/3fbbee10be99e8c5a696bfd50d81230141bccbf4)) * add state override for gas estimates ([#1358](https://github.com/matter-labs/zksync-era/issues/1358)) ([761bda1](https://github.com/matter-labs/zksync-era/commit/761bda19844fb3935f8a57c47df39010f88ef9dc)) * added consensus_config to general config ([#2462](https://github.com/matter-labs/zksync-era/issues/2462)) ([c5650a4](https://github.com/matter-labs/zksync-era/commit/c5650a4f1747f59d7a2d4e1986a91ae3fa7d75b0)) * added key generation command to EN ([#2461](https://github.com/matter-labs/zksync-era/issues/2461)) ([9861415](https://github.com/matter-labs/zksync-era/commit/986141562646c4d96dca205593e48e4d8df46fba)) * remove leftovers after BWIP ([#2456](https://github.com/matter-labs/zksync-era/issues/2456)) ([990676c](https://github.com/matter-labs/zksync-era/commit/990676c5f84afd2ff8cd337f495c82e8d1f305a4)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --------- Co-authored-by: zksync-era-bot --- .github/release-please/manifest.json | 2 +- Cargo.lock | 2 +- core/CHANGELOG.md | 11 +++++++++++ core/bin/external_node/Cargo.toml | 2 +- 4 files changed, 14 insertions(+), 3 deletions(-) diff --git a/.github/release-please/manifest.json b/.github/release-please/manifest.json index 058b522b417e..a0344676df21 100644 --- a/.github/release-please/manifest.json +++ b/.github/release-please/manifest.json @@ -1,4 +1,4 @@ { - "core": "24.10.0", + "core": "24.11.0", "prover": "16.0.0" } diff --git a/Cargo.lock b/Cargo.lock index 40615537255f..7319999316be 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8624,7 +8624,7 @@ dependencies = [ [[package]] name = "zksync_external_node" -version = "24.10.0" +version = "24.11.0" dependencies = [ "anyhow", "assert_matches", diff --git a/core/CHANGELOG.md b/core/CHANGELOG.md index 45182e704e5a..d9a944c7efe3 100644 --- a/core/CHANGELOG.md +++ b/core/CHANGELOG.md @@ -1,5 +1,16 @@ # Changelog +## [24.11.0](https://github.com/matter-labs/zksync-era/compare/core-v24.10.0...core-v24.11.0) (2024-07-23) + + +### Features + +* add revert tests (external node) to zk_toolbox ([#2408](https://github.com/matter-labs/zksync-era/issues/2408)) ([3fbbee1](https://github.com/matter-labs/zksync-era/commit/3fbbee10be99e8c5a696bfd50d81230141bccbf4)) +* add state override for gas estimates ([#1358](https://github.com/matter-labs/zksync-era/issues/1358)) ([761bda1](https://github.com/matter-labs/zksync-era/commit/761bda19844fb3935f8a57c47df39010f88ef9dc)) +* added consensus_config to general config ([#2462](https://github.com/matter-labs/zksync-era/issues/2462)) ([c5650a4](https://github.com/matter-labs/zksync-era/commit/c5650a4f1747f59d7a2d4e1986a91ae3fa7d75b0)) +* added key generation command to EN ([#2461](https://github.com/matter-labs/zksync-era/issues/2461)) ([9861415](https://github.com/matter-labs/zksync-era/commit/986141562646c4d96dca205593e48e4d8df46fba)) +* remove leftovers after BWIP ([#2456](https://github.com/matter-labs/zksync-era/issues/2456)) ([990676c](https://github.com/matter-labs/zksync-era/commit/990676c5f84afd2ff8cd337f495c82e8d1f305a4)) + ## [24.10.0](https://github.com/matter-labs/zksync-era/compare/core-v24.9.0...core-v24.10.0) (2024-07-22) diff --git a/core/bin/external_node/Cargo.toml b/core/bin/external_node/Cargo.toml index 63389175912f..c3e8a4bb18e2 100644 --- a/core/bin/external_node/Cargo.toml +++ b/core/bin/external_node/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "zksync_external_node" description = "Non-validator ZKsync node" -version = "24.10.0" # x-release-please-version +version = "24.11.0" # x-release-please-version edition.workspace = true authors.workspace = true homepage.workspace = true From 8cf8fc741dc0857fdf5a8cd1e6c3f716bdb114f5 Mon Sep 17 00:00:00 2001 From: Alex Ostrovski Date: Wed, 24 Jul 2024 11:01:47 +0300 Subject: [PATCH 18/52] refactor(api): Brush up VM storage overrides (#2463) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ Brush up VM storage overrides as introduced in https://github.com/matter-labs/zksync-era/pull/1358. ## Why ❔ The overrides implementation looks overly complex and isn't correctly localized by domain (located in the `state` crate, while the functionality is API server-specific). This worsens maintainability. ## Checklist - [x] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [x] Tests for the changes have been added / updated. - [x] Documentation comments have been added / updated. - [x] Code has been formatted via `zk fmt` and `zk lint`. --- core/lib/state/src/lib.rs | 9 - core/lib/state/src/storage_overrides.rs | 150 ------------- core/lib/state/src/storage_view.rs | 10 +- core/lib/types/src/api/state_override.rs | 176 +++++++++++++-- core/lib/vm_utils/src/lib.rs | 9 +- .../api_server/src/execution_sandbox/apply.rs | 34 ++- .../api_server/src/execution_sandbox/mod.rs | 1 + .../src/execution_sandbox/storage.rs | 201 ++++++++++++++++++ core/node/api_server/src/tx_sender/mod.rs | 4 +- core/node/api_server/src/web3/tests/vm.rs | 16 +- 10 files changed, 386 insertions(+), 224 deletions(-) create mode 100644 core/node/api_server/src/execution_sandbox/storage.rs diff --git a/core/lib/state/src/lib.rs b/core/lib/state/src/lib.rs index 74c60e4a3695..66577841fd45 100644 --- a/core/lib/state/src/lib.rs +++ b/core/lib/state/src/lib.rs @@ -12,7 +12,6 @@ use std::{cell::RefCell, collections::HashMap, fmt, rc::Rc}; use zksync_types::{ - api::state_override::StateOverride, get_known_code_key, storage::{StorageKey, StorageValue}, H256, @@ -30,7 +29,6 @@ pub use self::{ }, shadow_storage::ShadowStorage, storage_factory::{BatchDiff, PgOrRocksdbStorage, ReadStorageFactory, RocksdbWithMemory}, - storage_overrides::StorageOverrides, storage_view::{StorageView, StorageViewCache, StorageViewMetrics}, witness::WitnessStorage, }; @@ -42,7 +40,6 @@ mod postgres; mod rocksdb; mod shadow_storage; mod storage_factory; -mod storage_overrides; mod storage_view; #[cfg(test)] mod test_utils; @@ -92,9 +89,3 @@ pub trait WriteStorage: ReadStorage { /// Smart pointer to [`WriteStorage`]. pub type StoragePtr = Rc>; - -/// Functionality to override the storage state. -pub trait OverrideStorage { - /// Apply state override to the storage. - fn apply_state_override(&mut self, overrides: &StateOverride); -} diff --git a/core/lib/state/src/storage_overrides.rs b/core/lib/state/src/storage_overrides.rs index f45dd6d3382f..e69de29bb2d1 100644 --- a/core/lib/state/src/storage_overrides.rs +++ b/core/lib/state/src/storage_overrides.rs @@ -1,150 +0,0 @@ -use std::{cell::RefCell, collections::HashMap, fmt, rc::Rc}; - -use zksync_types::{ - api::state_override::{OverrideState, StateOverride}, - get_code_key, get_nonce_key, - utils::{decompose_full_nonce, nonces_to_full_nonce, storage_key_for_eth_balance}, - AccountTreeId, StorageKey, StorageValue, H256, U256, -}; -use zksync_utils::{bytecode::hash_bytecode, h256_to_u256, u256_to_h256}; - -use crate::{OverrideStorage, ReadStorage}; - -/// A storage view that allows to override some of the storage values. -#[derive(Debug)] -pub struct StorageOverrides { - storage_handle: S, - overridden_factory_deps: HashMap>, - overridden_account_state: HashMap>, - overridden_account_state_diff: HashMap>, - overridden_balance: HashMap, - overridden_nonce: HashMap, - overridden_code: HashMap, -} - -impl StorageOverrides { - /// Creates a new storage view based on the underlying storage. - pub fn new(storage: S) -> Self { - Self { - storage_handle: storage, - overridden_factory_deps: HashMap::new(), - overridden_account_state: HashMap::new(), - overridden_account_state_diff: HashMap::new(), - overridden_balance: HashMap::new(), - overridden_nonce: HashMap::new(), - overridden_code: HashMap::new(), - } - } - - /// Overrides a factory dependency code. - pub fn store_factory_dep(&mut self, hash: H256, code: Vec) { - self.overridden_factory_deps.insert(hash, code); - } - - /// Overrides an account entire state. - pub fn override_account_state(&mut self, account: AccountTreeId, state: HashMap) { - self.overridden_account_state.insert(account, state); - } - - /// Overrides an account state diff. - pub fn override_account_state_diff( - &mut self, - account: AccountTreeId, - state_diff: HashMap, - ) { - self.overridden_account_state_diff - .insert(account, state_diff); - } - - /// Make a Rc RefCell ptr to the storage - pub fn to_rc_ptr(self) -> Rc> { - Rc::new(RefCell::new(self)) - } -} - -impl ReadStorage for StorageOverrides { - fn read_value(&mut self, key: &StorageKey) -> StorageValue { - if let Some(balance) = self.overridden_balance.get(key) { - return u256_to_h256(*balance); - } - if let Some(code) = self.overridden_code.get(key) { - return *code; - } - - if let Some(nonce) = self.overridden_nonce.get(key) { - return u256_to_h256(*nonce); - } - - if let Some(account_state) = self.overridden_account_state.get(key.account()) { - if let Some(value) = account_state.get(key.key()) { - return *value; - } - return H256::zero(); - } - - if let Some(account_state_diff) = self.overridden_account_state_diff.get(key.account()) { - if let Some(value) = account_state_diff.get(key.key()) { - return *value; - } - } - - self.storage_handle.read_value(key) - } - - fn is_write_initial(&mut self, key: &StorageKey) -> bool { - self.storage_handle.is_write_initial(key) - } - - fn load_factory_dep(&mut self, hash: H256) -> Option> { - self.overridden_factory_deps - .get(&hash) - .cloned() - .or_else(|| self.storage_handle.load_factory_dep(hash)) - } - - fn get_enumeration_index(&mut self, key: &StorageKey) -> Option { - self.storage_handle.get_enumeration_index(key) - } -} - -impl OverrideStorage for StorageOverrides { - fn apply_state_override(&mut self, state_override: &StateOverride) { - for (account, overrides) in state_override.iter() { - if let Some(balance) = overrides.balance { - let balance_key = storage_key_for_eth_balance(account); - self.overridden_balance.insert(balance_key, balance); - } - - if let Some(nonce) = overrides.nonce { - let nonce_key = get_nonce_key(account); - let full_nonce = self.read_value(&nonce_key); - let (_, deployment_nonce) = decompose_full_nonce(h256_to_u256(full_nonce)); - let new_full_nonce = nonces_to_full_nonce(nonce, deployment_nonce); - self.overridden_nonce.insert(nonce_key, new_full_nonce); - } - - if let Some(code) = &overrides.code { - let code_key = get_code_key(account); - let code_hash = hash_bytecode(&code.0); - self.overridden_code.insert(code_key, code_hash); - self.store_factory_dep(code_hash, code.0.clone()); - } - - match &overrides.state { - Some(OverrideState::State(state)) => { - self.override_account_state(AccountTreeId::new(*account), state.clone()); - } - Some(OverrideState::StateDiff(state_diff)) => { - for (key, value) in state_diff { - let account_state = self - .overridden_account_state_diff - .entry(AccountTreeId::new(*account)) - .or_default(); - account_state.insert(*key, *value); - } - } - None => {} - } - } - } -} diff --git a/core/lib/state/src/storage_view.rs b/core/lib/state/src/storage_view.rs index 4d79298101f4..7dcfda2ba406 100644 --- a/core/lib/state/src/storage_view.rs +++ b/core/lib/state/src/storage_view.rs @@ -6,9 +6,9 @@ use std::{ time::{Duration, Instant}, }; -use zksync_types::{api::state_override::StateOverride, StorageKey, StorageValue, H256}; +use zksync_types::{StorageKey, StorageValue, H256}; -use crate::{OverrideStorage, ReadStorage, WriteStorage}; +use crate::{ReadStorage, WriteStorage}; /// Metrics for [`StorageView`]. #[derive(Debug, Default, Clone, Copy)] @@ -224,12 +224,6 @@ impl WriteStorage for StorageView { } } -impl OverrideStorage for StorageView { - fn apply_state_override(&mut self, state_override: &StateOverride) { - self.storage_handle.apply_state_override(state_override); - } -} - #[cfg(test)] mod test { use zksync_types::{AccountTreeId, Address, H256}; diff --git a/core/lib/types/src/api/state_override.rs b/core/lib/types/src/api/state_override.rs index 5c2395ae4bf2..a2497a65c533 100644 --- a/core/lib/types/src/api/state_override.rs +++ b/core/lib/types/src/api/state_override.rs @@ -1,26 +1,81 @@ -use std::{collections::HashMap, ops::Deref}; +use std::collections::HashMap; -use serde::{Deserialize, Deserializer, Serialize}; +use serde::{de, Deserialize, Deserializer, Serialize, Serializer}; use zksync_basic_types::{web3::Bytes, H256, U256}; +use zksync_utils::bytecode::{hash_bytecode, validate_bytecode, InvalidBytecodeError}; use crate::Address; -/// Collection of overridden accounts -#[derive(Debug, Clone, Serialize, Deserialize)] +/// Collection of overridden accounts. +#[derive(Debug, Clone, Default, Serialize, Deserialize)] pub struct StateOverride(HashMap); +impl StateOverride { + /// Wraps the provided account overrides. + pub fn new(state: HashMap) -> Self { + Self(state) + } + + /// Gets overrides for the specified account. + pub fn get(&self, address: &Address) -> Option<&OverrideAccount> { + self.0.get(address) + } + + /// Iterates over all account overrides. + pub fn iter(&self) -> impl Iterator + '_ { + self.0.iter() + } +} + +/// Serialized bytecode representation. +#[derive(Debug, Clone, PartialEq)] +pub struct Bytecode(Bytes); + +impl Bytecode { + pub fn new(bytes: Vec) -> Result { + validate_bytecode(&bytes)?; + Ok(Self(Bytes(bytes))) + } + + /// Returns the canonical hash of this bytecode. + pub fn hash(&self) -> H256 { + hash_bytecode(&self.0 .0) + } + + /// Converts this bytecode into bytes. + pub fn into_bytes(self) -> Vec { + self.0 .0 + } +} + +impl Serialize for Bytecode { + fn serialize(&self, serializer: S) -> Result { + self.0.serialize(serializer) + } +} + +impl<'de> Deserialize<'de> for Bytecode { + fn deserialize>(deserializer: D) -> Result { + let bytes = Bytes::deserialize(deserializer)?; + validate_bytecode(&bytes.0).map_err(de::Error::custom)?; + Ok(Self(bytes)) + } +} + /// Account override for `eth_estimateGas`. -#[derive(Debug, Clone, Serialize, Deserialize)] +#[derive(Debug, Clone, Default, Serialize, Deserialize)] +#[cfg_attr(test, derive(PartialEq))] #[serde(rename_all = "camelCase")] pub struct OverrideAccount { pub balance: Option, pub nonce: Option, - pub code: Option, + pub code: Option, #[serde(flatten, deserialize_with = "state_deserializer")] pub state: Option, } #[derive(Debug, Clone, Serialize, Deserialize)] +#[cfg_attr(test, derive(PartialEq))] #[serde(rename_all = "camelCase")] pub enum OverrideState { State(HashMap), @@ -33,11 +88,11 @@ where { let val = serde_json::Value::deserialize(deserializer)?; let state: Option> = match val.get("state") { - Some(val) => serde_json::from_value(val.clone()).map_err(serde::de::Error::custom)?, + Some(val) => serde_json::from_value(val.clone()).map_err(de::Error::custom)?, None => None, }; let state_diff: Option> = match val.get("stateDiff") { - Some(val) => serde_json::from_value(val.clone()).map_err(serde::de::Error::custom)?, + Some(val) => serde_json::from_value(val.clone()).map_err(de::Error::custom)?, None => None, }; @@ -45,26 +100,109 @@ where (Some(state), None) => Ok(Some(OverrideState::State(state))), (None, Some(state_diff)) => Ok(Some(OverrideState::StateDiff(state_diff))), (None, None) => Ok(None), - _ => Err(serde::de::Error::custom( + _ => Err(de::Error::custom( "Both 'state' and 'stateDiff' cannot be set simultaneously", )), } } -impl StateOverride { - pub fn new(state: HashMap) -> Self { - Self(state) +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn deserializing_bytecode() { + let bytecode_str = "0x0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"; + let json = serde_json::Value::String(bytecode_str.to_owned()); + let bytecode: Bytecode = serde_json::from_value(json).unwrap(); + assert_ne!(bytecode.hash(), H256::zero()); + let bytecode = bytecode.into_bytes(); + assert_eq!(bytecode.len(), 32); + assert_eq!(bytecode[0], 0x01); + assert_eq!(bytecode[31], 0xef); } - pub fn get(&self, address: &Address) -> Option<&OverrideAccount> { - self.0.get(address) + #[test] + fn deserializing_invalid_bytecode() { + let invalid_bytecodes = [ + "1234", // not 0x-prefixed + "0x1234", // length not divisible by 32 + "0x0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef\ + 0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef", // even number of words + ]; + for bytecode_str in invalid_bytecodes { + let json = serde_json::Value::String(bytecode_str.to_owned()); + serde_json::from_value::(json).unwrap_err(); + } + + let long_bytecode = String::from("0x") + + &"0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef".repeat(65_537); + let json = serde_json::Value::String(long_bytecode); + serde_json::from_value::(json).unwrap_err(); } -} -impl Deref for StateOverride { - type Target = HashMap; + #[test] + fn deserializing_state_override() { + let json = serde_json::json!({ + "0x0123456789abcdef0123456789abcdef01234567": { + "balance": "0x123", + "nonce": "0x1", + }, + "0x123456789abcdef0123456789abcdef012345678": { + "stateDiff": { + "0x0000000000000000000000000000000000000000000000000000000000000000": + "0x0000000000000000000000000000000000000000000000000000000000000001", + "0x0000000000000000000000000000000000000000000000000000000000000001": + "0x0000000000000000000000000000000000000000000000000000000000000002", + } + } + }); + + let state_override: StateOverride = serde_json::from_value(json).unwrap(); + assert_eq!(state_override.0.len(), 2); + + let first_address: Address = "0x0123456789abcdef0123456789abcdef01234567" + .parse() + .unwrap(); + let first_override = &state_override.0[&first_address]; + assert_eq!( + *first_override, + OverrideAccount { + balance: Some(0x123.into()), + nonce: Some(1.into()), + ..OverrideAccount::default() + } + ); + + let second_address: Address = "0x123456789abcdef0123456789abcdef012345678" + .parse() + .unwrap(); + let second_override = &state_override.0[&second_address]; + assert_eq!( + *second_override, + OverrideAccount { + state: Some(OverrideState::StateDiff(HashMap::from([ + (H256::from_low_u64_be(0), H256::from_low_u64_be(1)), + (H256::from_low_u64_be(1), H256::from_low_u64_be(2)), + ]))), + ..OverrideAccount::default() + } + ); + } - fn deref(&self) -> &Self::Target { - &self.0 + #[test] + fn deserializing_bogus_account_override() { + let json = serde_json::json!({ + "state": { + "0x0000000000000000000000000000000000000000000000000000000000000001": + "0x0000000000000000000000000000000000000000000000000000000000000002", + }, + "stateDiff": { + "0x0000000000000000000000000000000000000000000000000000000000000000": + "0x0000000000000000000000000000000000000000000000000000000000000001", + }, + }); + let err = serde_json::from_value::(json).unwrap_err(); + assert!(err.to_string().contains("'state' and 'stateDiff'"), "{err}"); } } diff --git a/core/lib/vm_utils/src/lib.rs b/core/lib/vm_utils/src/lib.rs index b970d1a8c6b3..9cec0e13be8b 100644 --- a/core/lib/vm_utils/src/lib.rs +++ b/core/lib/vm_utils/src/lib.rs @@ -8,14 +8,14 @@ use zksync_multivm::{ vm_latest::HistoryEnabled, VmInstance, }; -use zksync_state::{PostgresStorage, StorageOverrides, StoragePtr, StorageView, WriteStorage}; +use zksync_state::{PostgresStorage, StoragePtr, StorageView, WriteStorage}; use zksync_types::{L1BatchNumber, L2ChainId, Transaction}; use crate::storage::L1BatchParamsProvider; pub type VmAndStorage<'a> = ( - VmInstance>>, HistoryEnabled>, - StoragePtr>>>, + VmInstance>, HistoryEnabled>, + StoragePtr>>, ); pub fn create_vm( @@ -52,8 +52,7 @@ pub fn create_vm( let storage_l2_block_number = first_l2_block_in_batch.number() - 1; let pg_storage = PostgresStorage::new(rt_handle.clone(), connection, storage_l2_block_number, true); - let storage_overrides = StorageOverrides::new(pg_storage); - let storage_view = StorageView::new(storage_overrides).to_rc_ptr(); + let storage_view = StorageView::new(pg_storage).to_rc_ptr(); let vm = VmInstance::new(l1_batch_env, system_env, storage_view.clone()); Ok((vm, storage_view)) diff --git a/core/node/api_server/src/execution_sandbox/apply.rs b/core/node/api_server/src/execution_sandbox/apply.rs index c30e5bc36c86..a65538e25025 100644 --- a/core/node/api_server/src/execution_sandbox/apply.rs +++ b/core/node/api_server/src/execution_sandbox/apply.rs @@ -17,10 +17,7 @@ use zksync_multivm::{ vm_latest::{constants::BATCH_COMPUTATIONAL_GAS_LIMIT, HistoryDisabled}, VmInstance, }; -use zksync_state::{ - OverrideStorage, PostgresStorage, ReadStorage, StorageOverrides, StoragePtr, StorageView, - WriteStorage, -}; +use zksync_state::{PostgresStorage, ReadStorage, StoragePtr, StorageView, WriteStorage}; use zksync_system_constants::{ SYSTEM_CONTEXT_ADDRESS, SYSTEM_CONTEXT_CURRENT_L2_BLOCK_INFO_POSITION, SYSTEM_CONTEXT_CURRENT_TX_ROLLING_HASH_POSITION, ZKPORTER_IS_AVAILABLE, @@ -37,12 +34,13 @@ use zksync_types::{ use zksync_utils::{h256_to_u256, time::seconds_since_epoch, u256_to_h256}; use super::{ + storage::StorageWithOverrides, vm_metrics::{self, SandboxStage, SANDBOX_METRICS}, BlockArgs, TxExecutionArgs, TxSharedArgs, VmPermit, }; -type BoxedVm<'a> = - Box>>, HistoryDisabled>>; +type VmStorageView<'a> = StorageView>>; +type BoxedVm<'a> = Box, HistoryDisabled>>; #[derive(Debug)] struct Sandbox<'a> { @@ -50,7 +48,7 @@ struct Sandbox<'a> { l1_batch_env: L1BatchEnv, execution_args: &'a TxExecutionArgs, l2_block_info_to_reset: Option, - storage_view: StorageView>>, + storage_view: VmStorageView<'a>, } impl<'a> Sandbox<'a> { @@ -59,6 +57,7 @@ impl<'a> Sandbox<'a> { shared_args: TxSharedArgs, execution_args: &'a TxExecutionArgs, block_args: BlockArgs, + state_override: &StateOverride, ) -> anyhow::Result> { let resolve_started_at = Instant::now(); let resolved_block_info = block_args @@ -94,9 +93,8 @@ impl<'a> Sandbox<'a> { .context("cannot create `PostgresStorage`")? .with_caches(shared_args.caches.clone()); - let storage_overrides = StorageOverrides::new(storage); - - let storage_view = StorageView::new(storage_overrides); + let storage_with_overrides = StorageWithOverrides::new(storage, state_override); + let storage_view = StorageView::new(storage_with_overrides); let (system_env, l1_batch_env) = Self::prepare_env( shared_args, execution_args, @@ -265,16 +263,7 @@ impl<'a> Sandbox<'a> { mut self, tx: &Transaction, adjust_pubdata_price: bool, - state_override: Option, - ) -> ( - BoxedVm<'a>, - StoragePtr>>>, - ) { - // Apply state override - if let Some(state_override) = state_override { - // Apply the state override - self.storage_view.apply_state_override(&state_override); - } + ) -> (BoxedVm<'a>, StoragePtr>) { self.setup_storage_view(tx); let protocol_version = self.system_env.version; if adjust_pubdata_price { @@ -312,7 +301,7 @@ pub(super) fn apply_vm_in_sandbox( block_args: BlockArgs, // Block arguments for the transaction. state_override: Option, apply: impl FnOnce( - &mut VmInstance>>, HistoryDisabled>, + &mut VmInstance, HistoryDisabled>, Transaction, ProtocolVersionId, ) -> T, @@ -335,9 +324,10 @@ pub(super) fn apply_vm_in_sandbox( shared_args, execution_args, block_args, + state_override.as_ref().unwrap_or(&StateOverride::default()), ))?; let protocol_version = sandbox.system_env.version; - let (mut vm, storage_view) = sandbox.into_vm(&tx, adjust_pubdata_price, state_override); + let (mut vm, storage_view) = sandbox.into_vm(&tx, adjust_pubdata_price); SANDBOX_METRICS.sandbox[&SandboxStage::Initialization].observe(stage_started_at.elapsed()); span.exit(); diff --git a/core/node/api_server/src/execution_sandbox/mod.rs b/core/node/api_server/src/execution_sandbox/mod.rs index 72c6ba9789f5..f7c876679cb0 100644 --- a/core/node/api_server/src/execution_sandbox/mod.rs +++ b/core/node/api_server/src/execution_sandbox/mod.rs @@ -26,6 +26,7 @@ use super::tx_sender::MultiVMBaseSystemContracts; mod apply; mod error; mod execute; +mod storage; pub mod testonly; #[cfg(test)] mod tests; diff --git a/core/node/api_server/src/execution_sandbox/storage.rs b/core/node/api_server/src/execution_sandbox/storage.rs new file mode 100644 index 000000000000..749945b4e341 --- /dev/null +++ b/core/node/api_server/src/execution_sandbox/storage.rs @@ -0,0 +1,201 @@ +//! VM storage functionality specifically used in the VM sandbox. + +use std::{ + collections::{HashMap, HashSet}, + fmt, +}; + +use zksync_state::ReadStorage; +use zksync_types::{ + api::state_override::{OverrideState, StateOverride}, + get_code_key, get_nonce_key, + utils::{decompose_full_nonce, nonces_to_full_nonce, storage_key_for_eth_balance}, + AccountTreeId, StorageKey, StorageValue, H256, +}; +use zksync_utils::{h256_to_u256, u256_to_h256}; + +/// A storage view that allows to override some of the storage values. +#[derive(Debug)] +pub(super) struct StorageWithOverrides { + storage_handle: S, + overridden_slots: HashMap, + overridden_factory_deps: HashMap>, + overridden_accounts: HashSet, +} + +impl StorageWithOverrides { + /// Creates a new storage view based on the underlying storage. + pub(super) fn new(storage: S, state_override: &StateOverride) -> Self { + let mut this = Self { + storage_handle: storage, + overridden_slots: HashMap::new(), + overridden_factory_deps: HashMap::new(), + overridden_accounts: HashSet::new(), + }; + this.apply_state_override(state_override); + this + } + + fn apply_state_override(&mut self, state_override: &StateOverride) { + for (account, overrides) in state_override.iter() { + if let Some(balance) = overrides.balance { + let balance_key = storage_key_for_eth_balance(account); + self.overridden_slots + .insert(balance_key, u256_to_h256(balance)); + } + + if let Some(nonce) = overrides.nonce { + let nonce_key = get_nonce_key(account); + let full_nonce = self.read_value(&nonce_key); + let (_, deployment_nonce) = decompose_full_nonce(h256_to_u256(full_nonce)); + let new_full_nonce = u256_to_h256(nonces_to_full_nonce(nonce, deployment_nonce)); + self.overridden_slots.insert(nonce_key, new_full_nonce); + } + + if let Some(code) = &overrides.code { + let code_key = get_code_key(account); + let code_hash = code.hash(); + self.overridden_slots.insert(code_key, code_hash); + self.store_factory_dep(code_hash, code.clone().into_bytes()); + } + + match &overrides.state { + Some(OverrideState::State(state)) => { + let account = AccountTreeId::new(*account); + self.override_account_state_diff(account, state); + self.overridden_accounts.insert(account); + } + Some(OverrideState::StateDiff(state_diff)) => { + let account = AccountTreeId::new(*account); + self.override_account_state_diff(account, state_diff); + } + None => { /* do nothing */ } + } + } + } + + fn store_factory_dep(&mut self, hash: H256, code: Vec) { + self.overridden_factory_deps.insert(hash, code); + } + + fn override_account_state_diff( + &mut self, + account: AccountTreeId, + state_diff: &HashMap, + ) { + let account_slots = state_diff + .iter() + .map(|(&slot, &value)| (StorageKey::new(account, slot), value)); + self.overridden_slots.extend(account_slots); + } +} + +impl ReadStorage for StorageWithOverrides { + fn read_value(&mut self, key: &StorageKey) -> StorageValue { + if let Some(value) = self.overridden_slots.get(key) { + return *value; + } + if self.overridden_accounts.contains(key.account()) { + return H256::zero(); + } + self.storage_handle.read_value(key) + } + + fn is_write_initial(&mut self, key: &StorageKey) -> bool { + self.storage_handle.is_write_initial(key) + } + + fn load_factory_dep(&mut self, hash: H256) -> Option> { + self.overridden_factory_deps + .get(&hash) + .cloned() + .or_else(|| self.storage_handle.load_factory_dep(hash)) + } + + fn get_enumeration_index(&mut self, key: &StorageKey) -> Option { + self.storage_handle.get_enumeration_index(key) + } +} + +#[cfg(test)] +mod tests { + use zksync_state::InMemoryStorage; + use zksync_types::{ + api::state_override::{Bytecode, OverrideAccount}, + Address, + }; + + use super::*; + + #[test] + fn override_basics() { + let overrides = StateOverride::new(HashMap::from([ + ( + Address::repeat_byte(1), + OverrideAccount { + balance: Some(1.into()), + ..OverrideAccount::default() + }, + ), + ( + Address::repeat_byte(2), + OverrideAccount { + nonce: Some(2.into()), + ..OverrideAccount::default() + }, + ), + ( + Address::repeat_byte(3), + OverrideAccount { + code: Some(Bytecode::new((0..32).collect()).unwrap()), + ..OverrideAccount::default() + }, + ), + ( + Address::repeat_byte(4), + OverrideAccount { + state: Some(OverrideState::StateDiff(HashMap::from([( + H256::zero(), + H256::repeat_byte(1), + )]))), + ..OverrideAccount::default() + }, + ), + ( + Address::repeat_byte(5), + OverrideAccount { + state: Some(OverrideState::State(HashMap::new())), + ..OverrideAccount::default() + }, + ), + ])); + + let mut storage = InMemoryStorage::default(); + let overridden_key = + StorageKey::new(AccountTreeId::new(Address::repeat_byte(4)), H256::zero()); + storage.set_value(overridden_key, H256::repeat_byte(0xff)); + let retained_key = StorageKey::new( + AccountTreeId::new(Address::repeat_byte(4)), + H256::from_low_u64_be(1), + ); + storage.set_value(retained_key, H256::repeat_byte(0xfe)); + let erased_key = StorageKey::new(AccountTreeId::new(Address::repeat_byte(5)), H256::zero()); + storage.set_value(erased_key, H256::repeat_byte(1)); + let mut storage = StorageWithOverrides::new(storage, &overrides); + + let balance = storage.read_value(&storage_key_for_eth_balance(&Address::repeat_byte(1))); + assert_eq!(balance, H256::from_low_u64_be(1)); + let nonce = storage.read_value(&get_nonce_key(&Address::repeat_byte(2))); + assert_eq!(nonce, H256::from_low_u64_be(2)); + let code_hash = storage.read_value(&get_code_key(&Address::repeat_byte(3))); + assert_ne!(code_hash, H256::zero()); + assert!(storage.load_factory_dep(code_hash).is_some()); + + let overridden_value = storage.read_value(&overridden_key); + assert_eq!(overridden_value, H256::repeat_byte(1)); + let retained_value = storage.read_value(&retained_key); + assert_eq!(retained_value, H256::repeat_byte(0xfe)); + let erased_value = storage.read_value(&erased_key); + assert_eq!(erased_value, H256::zero()); + } +} diff --git a/core/node/api_server/src/tx_sender/mod.rs b/core/node/api_server/src/tx_sender/mod.rs index 15f9271d6428..38939937fcda 100644 --- a/core/node/api_server/src/tx_sender/mod.rs +++ b/core/node/api_server/src/tx_sender/mod.rs @@ -797,7 +797,7 @@ impl TxSender { .and_then(|overrides| overrides.get(&tx.initiator_account())) .and_then(|account| account.balance) { - Some(balance) => balance.to_owned(), + Some(balance) => balance, None => self.get_balance(&tx.initiator_account()).await?, }; @@ -805,7 +805,7 @@ impl TxSender { tracing::info!( "fee estimation failed on validation step. account: {} does not have enough funds for for transferring tx.value: {}.", - &tx.initiator_account(), + tx.initiator_account(), tx.execute.value ); return Err(SubmitTxError::InsufficientFundsForTransfer); diff --git a/core/node/api_server/src/web3/tests/vm.rs b/core/node/api_server/src/web3/tests/vm.rs index 61c24bcf9001..50de027174f3 100644 --- a/core/node/api_server/src/web3/tests/vm.rs +++ b/core/node/api_server/src/web3/tests/vm.rs @@ -695,20 +695,18 @@ impl HttpTest for EstimateGasWithStateOverrideTest { // Transaction with balance override let l2_transaction = create_l2_transaction(10, 100); let mut call_request = CallRequest::from(l2_transaction); - call_request.from = Some(Address::random()); + let request_initiator = Address::random(); + call_request.from = Some(request_initiator); call_request.value = Some(1_000_000.into()); - let mut state_override_map = HashMap::new(); - state_override_map.insert( - call_request.from.unwrap(), + let state_override = HashMap::from([( + request_initiator, OverrideAccount { balance: Some(U256::max_value()), - nonce: None, - code: None, - state: None, + ..OverrideAccount::default() }, - ); - let state_override = StateOverride::new(state_override_map); + )]); + let state_override = StateOverride::new(state_override); client .estimate_gas(call_request.clone(), None, Some(state_override)) From 513b56ef983c7e8343f6784f579a8fb32728a238 Mon Sep 17 00:00:00 2001 From: zksync-era-bot <147085853+zksync-era-bot@users.noreply.github.com> Date: Wed, 24 Jul 2024 11:55:38 +0300 Subject: [PATCH 19/52] chore(main): release prover 16.1.0 (#2452) :robot: I have created a release *beep* *boop* --- ## [16.1.0](https://github.com/matter-labs/zksync-era/compare/prover-v16.0.0...prover-v16.1.0) (2024-07-24) ### Features * **prover:** Make it possible to run prover out of GCP ([#2448](https://github.com/matter-labs/zksync-era/issues/2448)) ([c9da549](https://github.com/matter-labs/zksync-era/commit/c9da5497e2aa9d85f204ab7b74fefcfe941793ff)) * remove leftovers after BWIP ([#2456](https://github.com/matter-labs/zksync-era/issues/2456)) ([990676c](https://github.com/matter-labs/zksync-era/commit/990676c5f84afd2ff8cd337f495c82e8d1f305a4)) ### Bug Fixes * **prover:** BWG optimizations ([#2469](https://github.com/matter-labs/zksync-era/issues/2469)) ([d8851c8](https://github.com/matter-labs/zksync-era/commit/d8851c8af2cd4b595f4edb9c36c81e2310835a77)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- .github/release-please/manifest.json | 2 +- prover/CHANGELOG.md | 13 +++++++++++++ 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/.github/release-please/manifest.json b/.github/release-please/manifest.json index a0344676df21..a26bd9fb6b09 100644 --- a/.github/release-please/manifest.json +++ b/.github/release-please/manifest.json @@ -1,4 +1,4 @@ { "core": "24.11.0", - "prover": "16.0.0" + "prover": "16.1.0" } diff --git a/prover/CHANGELOG.md b/prover/CHANGELOG.md index 642a4d54ef62..dc9bb315cb10 100644 --- a/prover/CHANGELOG.md +++ b/prover/CHANGELOG.md @@ -1,5 +1,18 @@ # Changelog +## [16.1.0](https://github.com/matter-labs/zksync-era/compare/prover-v16.0.0...prover-v16.1.0) (2024-07-24) + + +### Features + +* **prover:** Make it possible to run prover out of GCP ([#2448](https://github.com/matter-labs/zksync-era/issues/2448)) ([c9da549](https://github.com/matter-labs/zksync-era/commit/c9da5497e2aa9d85f204ab7b74fefcfe941793ff)) +* remove leftovers after BWIP ([#2456](https://github.com/matter-labs/zksync-era/issues/2456)) ([990676c](https://github.com/matter-labs/zksync-era/commit/990676c5f84afd2ff8cd337f495c82e8d1f305a4)) + + +### Bug Fixes + +* **prover:** BWG optimizations ([#2469](https://github.com/matter-labs/zksync-era/issues/2469)) ([d8851c8](https://github.com/matter-labs/zksync-era/commit/d8851c8af2cd4b595f4edb9c36c81e2310835a77)) + ## [16.0.0](https://github.com/matter-labs/zksync-era/compare/prover-v15.1.0...prover-v16.0.0) (2024-07-11) From e02b411ef7a5d1ba0af4ed4854c289895c073f56 Mon Sep 17 00:00:00 2001 From: Roman Brodetski Date: Wed, 24 Jul 2024 13:16:22 +0100 Subject: [PATCH 20/52] docs(consensus): Update 09_decentralization.md (#2473) --- docs/guides/external-node/09_decentralization.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/docs/guides/external-node/09_decentralization.md b/docs/guides/external-node/09_decentralization.md index 37cd4c502ef1..aa9598a825ca 100644 --- a/docs/guides/external-node/09_decentralization.md +++ b/docs/guides/external-node/09_decentralization.md @@ -17,6 +17,12 @@ On the gossipnet, the data integrity will be protected by the BFT (byzantine fau > current implementation it may take a couple of hours and gets faster the more nodes you add to the > `gossip_static_outbound` list (see below). We are working to remove this inconvenience. + +> [!NOTE] +> +> The minimal supported server version for this is [24.11.0](https://github.com/matter-labs/zksync-era/releases/tag/core-v24.11.0) + + ### Generating secrets Each participant node of the gossipnet has to have an identity (a public/secret key pair). When running your node for From 0f475c949a28c4602539b4d75ee79e605f44e2de Mon Sep 17 00:00:00 2001 From: Manuel Mauro Date: Wed, 24 Jul 2024 17:35:16 +0300 Subject: [PATCH 21/52] feat: add general config and secrets opts to snapshot creator (#2471) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ This PR adds `--config-path` and `--secrets-path` options to `/core/bin/snapshots_creator`. Allowing the binary to source configurations from files. ## Why ❔ Needed in order to run the command within `zk_toolbox`. ## Checklist - [x] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [x] Tests for the changes have been added / updated. - [x] Documentation comments have been added / updated. - [x] Code has been formatted via `zk fmt` and `zk lint`. --- Cargo.lock | 2 + core/bin/snapshots_creator/Cargo.toml | 2 + core/bin/snapshots_creator/src/main.rs | 51 +++++++++++++------ core/bin/snapshots_creator/src/tests.rs | 1 + core/lib/env_config/src/snapshots_creator.rs | 8 ++- .../external-node/09_decentralization.md | 5 +- 6 files changed, 48 insertions(+), 21 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 7319999316be..9840be8502ec 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6030,10 +6030,12 @@ dependencies = [ "anyhow", "futures 0.3.28", "rand 0.8.5", + "structopt", "tokio", "tracing", "vise", "zksync_config", + "zksync_core_leftovers", "zksync_dal", "zksync_env_config", "zksync_object_store", diff --git a/core/bin/snapshots_creator/Cargo.toml b/core/bin/snapshots_creator/Cargo.toml index 763d2374b8c2..33b1fa82a857 100644 --- a/core/bin/snapshots_creator/Cargo.toml +++ b/core/bin/snapshots_creator/Cargo.toml @@ -19,8 +19,10 @@ zksync_env_config.workspace = true zksync_types.workspace = true zksync_object_store.workspace = true zksync_vlog.workspace = true +zksync_core_leftovers.workspace = true anyhow.workspace = true +structopt.workspace = true tokio = { workspace = true, features = ["full"] } tracing.workspace = true futures.workspace = true diff --git a/core/bin/snapshots_creator/src/main.rs b/core/bin/snapshots_creator/src/main.rs index aee3919a4b0a..e07a879746ad 100644 --- a/core/bin/snapshots_creator/src/main.rs +++ b/core/bin/snapshots_creator/src/main.rs @@ -10,13 +10,11 @@ //! at a time). use anyhow::Context as _; +use structopt::StructOpt; use tokio::{sync::watch, task::JoinHandle}; -use zksync_config::{ - configs::{DatabaseSecrets, ObservabilityConfig, PrometheusConfig}, - SnapshotsCreatorConfig, -}; +use zksync_config::configs::PrometheusConfig; +use zksync_core_leftovers::temp_config_store::{load_database_secrets, load_general_config}; use zksync_dal::{ConnectionPool, Core}; -use zksync_env_config::{object_store::SnapshotsObjectStoreConfig, FromEnv}; use zksync_object_store::ObjectStoreFactory; use zksync_vlog::prometheus::PrometheusExporterConfig; @@ -28,9 +26,9 @@ mod metrics; mod tests; async fn maybe_enable_prometheus_metrics( + prometheus_config: Option, stop_receiver: watch::Receiver, ) -> anyhow::Result>>> { - let prometheus_config = PrometheusConfig::from_env().ok(); match prometheus_config.map(|c| (c.gateway_endpoint(), c.push_interval())) { Some((Some(gateway_endpoint), push_interval)) => { tracing::info!("Starting prometheus exporter with gateway {gateway_endpoint:?} and push_interval {push_interval:?}"); @@ -49,18 +47,36 @@ async fn maybe_enable_prometheus_metrics( /// Minimum number of storage log chunks to produce. const MIN_CHUNK_COUNT: u64 = 10; +#[derive(StructOpt)] +#[structopt(name = "ZKsync snapshot creator", author = "Matter Labs")] +struct Opt { + /// Path to the configuration file. + #[structopt(long)] + config_path: Option, + + /// Path to the secrets file. + #[structopt(long)] + secrets_path: Option, +} + #[tokio::main] async fn main() -> anyhow::Result<()> { let (stop_sender, stop_receiver) = watch::channel(false); - let observability_config = - ObservabilityConfig::from_env().context("ObservabilityConfig::from_env()")?; + let opt = Opt::from_args(); + let general_config = load_general_config(opt.config_path).context("general config")?; + let database_secrets = load_database_secrets(opt.secrets_path).context("database secrets")?; + + let observability_config = general_config + .observability + .context("observability config")?; let log_format: zksync_vlog::LogFormat = observability_config .log_format .parse() .context("Invalid log format")?; - let prometheus_exporter_task = maybe_enable_prometheus_metrics(stop_receiver).await?; + let prometheus_exporter_task = + maybe_enable_prometheus_metrics(general_config.prometheus_config, stop_receiver).await?; let mut builder = zksync_vlog::ObservabilityBuilder::new().with_log_format(log_format); if let Some(sentry_url) = observability_config.sentry_url { builder = builder @@ -71,16 +87,19 @@ async fn main() -> anyhow::Result<()> { let _guard = builder.build(); tracing::info!("Starting snapshots creator"); - let object_store_config = - SnapshotsObjectStoreConfig::from_env().context("SnapshotsObjectStoreConfig::from_env()")?; - let blob_store = ObjectStoreFactory::new(object_store_config.0) + let creator_config = general_config + .snapshot_creator + .context("snapshot creator config")?; + + let object_store_config = creator_config + .clone() + .object_store + .context("snapshot creator object storage config")?; + + let blob_store = ObjectStoreFactory::new(object_store_config) .create_store() .await?; - let database_secrets = DatabaseSecrets::from_env().context("DatabaseSecrets")?; - let creator_config = - SnapshotsCreatorConfig::from_env().context("SnapshotsCreatorConfig::from_env")?; - let replica_pool = ConnectionPool::::builder( database_secrets.replica_url()?, creator_config.concurrent_queries_count, diff --git a/core/bin/snapshots_creator/src/tests.rs b/core/bin/snapshots_creator/src/tests.rs index 1c26f1081598..89a3807422be 100644 --- a/core/bin/snapshots_creator/src/tests.rs +++ b/core/bin/snapshots_creator/src/tests.rs @@ -10,6 +10,7 @@ use std::{ }; use rand::{thread_rng, Rng}; +use zksync_config::SnapshotsCreatorConfig; use zksync_dal::{Connection, CoreDal}; use zksync_object_store::{MockObjectStore, ObjectStore}; use zksync_types::{ diff --git a/core/lib/env_config/src/snapshots_creator.rs b/core/lib/env_config/src/snapshots_creator.rs index 6ed80e3780ce..80e1f5ec0b00 100644 --- a/core/lib/env_config/src/snapshots_creator.rs +++ b/core/lib/env_config/src/snapshots_creator.rs @@ -1,9 +1,13 @@ use zksync_config::SnapshotsCreatorConfig; -use crate::{envy_load, FromEnv}; +use crate::{envy_load, object_store::SnapshotsObjectStoreConfig, FromEnv}; impl FromEnv for SnapshotsCreatorConfig { fn from_env() -> anyhow::Result { - envy_load("snapshots_creator", "SNAPSHOTS_CREATOR_") + let mut snapshot_creator: SnapshotsCreatorConfig = + envy_load("snapshots_creator", "SNAPSHOTS_CREATOR_")?; + + snapshot_creator.object_store = SnapshotsObjectStoreConfig::from_env().map(|a| a.0).ok(); + Ok(snapshot_creator) } } diff --git a/docs/guides/external-node/09_decentralization.md b/docs/guides/external-node/09_decentralization.md index aa9598a825ca..fa780ba9ff55 100644 --- a/docs/guides/external-node/09_decentralization.md +++ b/docs/guides/external-node/09_decentralization.md @@ -17,11 +17,10 @@ On the gossipnet, the data integrity will be protected by the BFT (byzantine fau > current implementation it may take a couple of hours and gets faster the more nodes you add to the > `gossip_static_outbound` list (see below). We are working to remove this inconvenience. - > [!NOTE] > -> The minimal supported server version for this is [24.11.0](https://github.com/matter-labs/zksync-era/releases/tag/core-v24.11.0) - +> The minimal supported server version for this is +> [24.11.0](https://github.com/matter-labs/zksync-era/releases/tag/core-v24.11.0) ### Generating secrets From f1cbb74b863b6e0bcfa74ad780beef29844bac6e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mat=C3=ADas=20Ignacio=20Gonz=C3=A1lez?= Date: Wed, 24 Jul 2024 17:37:08 +0200 Subject: [PATCH 22/52] feat(zk_toolbox): Add check for zksync repo path (#2447) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ Add check for zksync repo path --- .../src/commands/ecosystem/args/create.rs | 61 +++++++++++++++++-- .../src/commands/ecosystem/create.rs | 2 +- .../crates/zk_inception/src/messages.rs | 7 +++ 3 files changed, 63 insertions(+), 7 deletions(-) diff --git a/zk_toolbox/crates/zk_inception/src/commands/ecosystem/args/create.rs b/zk_toolbox/crates/zk_inception/src/commands/ecosystem/args/create.rs index f005a98f6b64..d3d5fe129678 100644 --- a/zk_toolbox/crates/zk_inception/src/commands/ecosystem/args/create.rs +++ b/zk_toolbox/crates/zk_inception/src/commands/ecosystem/args/create.rs @@ -1,20 +1,23 @@ -use std::path::PathBuf; +use std::path::{Path, PathBuf}; +use anyhow::bail; use clap::Parser; -use common::{Prompt, PromptConfirm, PromptSelect}; +use common::{cmd::Cmd, logger, Prompt, PromptConfirm, PromptSelect}; use serde::{Deserialize, Serialize}; use slugify_rs::slugify; use strum::IntoEnumIterator; use strum_macros::EnumIter; use types::{L1Network, WalletCreation}; +use xshell::{cmd, Shell}; use crate::{ commands::chain::{args::create::ChainCreateArgs, ChainCreateArgsFinal}, messages::{ + msg_path_to_zksync_does_not_exist_err, MSG_CONFIRM_STILL_USE_FOLDER, MSG_ECOSYSTEM_NAME_PROMPT, MSG_L1_NETWORK_HELP, MSG_L1_NETWORK_PROMPT, MSG_LINK_TO_CODE_HELP, MSG_LINK_TO_CODE_PROMPT, MSG_LINK_TO_CODE_SELECTION_CLONE, - MSG_LINK_TO_CODE_SELECTION_PATH, MSG_REPOSITORY_ORIGIN_PROMPT, MSG_START_CONTAINERS_HELP, - MSG_START_CONTAINERS_PROMPT, + MSG_LINK_TO_CODE_SELECTION_PATH, MSG_NOT_MAIN_REPO_OR_FORK_ERR, + MSG_REPOSITORY_ORIGIN_PROMPT, MSG_START_CONTAINERS_HELP, MSG_START_CONTAINERS_PROMPT, }, }; @@ -34,7 +37,7 @@ pub struct EcosystemCreateArgs { } impl EcosystemCreateArgs { - pub fn fill_values_with_prompt(mut self) -> EcosystemCreateArgsFinal { + pub fn fill_values_with_prompt(mut self, shell: &Shell) -> EcosystemCreateArgsFinal { let mut ecosystem_name = self .ecosystem_name .unwrap_or_else(|| Prompt::new(MSG_ECOSYSTEM_NAME_PROMPT).ask()); @@ -45,7 +48,16 @@ impl EcosystemCreateArgs { PromptSelect::new(MSG_REPOSITORY_ORIGIN_PROMPT, LinkToCodeSelection::iter()).ask(); match link_to_code_selection { LinkToCodeSelection::Clone => "".to_string(), - LinkToCodeSelection::Path => Prompt::new(MSG_LINK_TO_CODE_PROMPT).ask(), + LinkToCodeSelection::Path => { + let mut path: String = Prompt::new(MSG_LINK_TO_CODE_PROMPT).ask(); + if let Err(err) = check_link_to_code(shell, &path) { + logger::warn(err); + if !PromptConfirm::new(MSG_CONFIRM_STILL_USE_FOLDER).ask() { + path = pick_new_link_to_code(shell); + } + } + path + } } }); @@ -105,3 +117,40 @@ impl std::fmt::Display for LinkToCodeSelection { } } } + +fn check_link_to_code(shell: &Shell, path: &str) -> anyhow::Result<()> { + let path = Path::new(path); + if !shell.path_exists(path) { + bail!(msg_path_to_zksync_does_not_exist_err( + path.to_str().unwrap() + )); + } + + let _guard = shell.push_dir(path); + let out = String::from_utf8( + Cmd::new(cmd!(shell, "git remote -v")) + .run_with_output()? + .stdout, + )?; + + if !out.contains("matter-labs/zksync-era") { + bail!(MSG_NOT_MAIN_REPO_OR_FORK_ERR); + } + + Ok(()) +} + +fn pick_new_link_to_code(shell: &Shell) -> String { + let link_to_code: String = Prompt::new(MSG_LINK_TO_CODE_PROMPT).ask(); + match check_link_to_code(shell, &link_to_code) { + Ok(_) => link_to_code, + Err(err) => { + logger::warn(err); + if !PromptConfirm::new(MSG_CONFIRM_STILL_USE_FOLDER).ask() { + pick_new_link_to_code(shell) + } else { + link_to_code + } + } + } +} diff --git a/zk_toolbox/crates/zk_inception/src/commands/ecosystem/create.rs b/zk_toolbox/crates/zk_inception/src/commands/ecosystem/create.rs index b7fdfee855f6..30dffad035ab 100644 --- a/zk_toolbox/crates/zk_inception/src/commands/ecosystem/create.rs +++ b/zk_toolbox/crates/zk_inception/src/commands/ecosystem/create.rs @@ -39,7 +39,7 @@ pub fn run(args: EcosystemCreateArgs, shell: &Shell) -> anyhow::Result<()> { } fn create(args: EcosystemCreateArgs, shell: &Shell) -> anyhow::Result<()> { - let args = args.fill_values_with_prompt(); + let args = args.fill_values_with_prompt(shell); logger::note(MSG_SELECTED_CONFIG, logger::object_to_string(&args)); logger::info(MSG_CREATING_ECOSYSTEM); diff --git a/zk_toolbox/crates/zk_inception/src/messages.rs b/zk_toolbox/crates/zk_inception/src/messages.rs index af40b48e5795..a33143b4bd66 100644 --- a/zk_toolbox/crates/zk_inception/src/messages.rs +++ b/zk_toolbox/crates/zk_inception/src/messages.rs @@ -36,6 +36,13 @@ pub(super) const MSG_ECOSYSTEM_ALREADY_EXISTS_ERR: &str = "Ecosystem already exi pub(super) const MSG_ECOSYSTEM_CONFIG_INVALID_ERR: &str = "Invalid ecosystem configuration"; pub(super) const MSG_LINK_TO_CODE_SELECTION_CLONE: &str = "Clone for me (recommended)"; pub(super) const MSG_LINK_TO_CODE_SELECTION_PATH: &str = "I have the code already"; +pub(super) const MSG_NOT_MAIN_REPO_OR_FORK_ERR: &str = + "It's not a zkSync Era main repository or fork"; +pub(super) const MSG_CONFIRM_STILL_USE_FOLDER: &str = "Do you still want to use this folder?"; + +pub(super) fn msg_path_to_zksync_does_not_exist_err(path: &str) -> String { + format!("Path to zkSync Era repo does not exist: {path:?}") +} /// Ecosystem and chain init related messages pub(super) const MSG_L1_RPC_URL_HELP: &str = "L1 RPC URL"; From ff6b10c4a994cf70297a034202bcb55152748cba Mon Sep 17 00:00:00 2001 From: Akosh Farkash Date: Wed, 24 Jul 2024 17:25:04 +0100 Subject: [PATCH 23/52] feat: Update to consensus 0.1.0-rc.4 (BFT-486) (#2475) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ Updates the `era-consensus` dependency to 0.1.0-rc.4, which brings the following changes: * `BatchStoreState::last` no longer contains the entire `SyncBatch`, just the number of the latest batch, which should reduce the unintended gossip data volume * Makes new batch gossip related metrics available to Prometheus (BFT-486) ## Why ❔ Adding attesters to the genesis is a breaking change, so we would like to cut a new release to external node operators. These changes make the gossip less resource intensive as well as make it more observable. ## Checklist - [x] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [x] Tests for the changes have been added / updated. - [x] Documentation comments have been added / updated. - [x] Code has been formatted via `zk fmt` and `zk lint`. --------- Co-authored-by: Bruno França --- Cargo.lock | 40 +++++++++---------- Cargo.toml | 22 +++++----- core/node/consensus/src/storage/connection.rs | 14 ------- core/node/consensus/src/storage/store.rs | 40 ++++++++++--------- core/node/consensus/src/tests.rs | 14 +++++-- prover/Cargo.lock | 28 ++++++------- zk_toolbox/Cargo.lock | 16 ++++---- 7 files changed, 85 insertions(+), 89 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 9840be8502ec..3b43a01fc5a0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8110,9 +8110,9 @@ dependencies = [ [[package]] name = "zksync_concurrency" -version = "0.1.0-rc.2" +version = "0.1.0-rc.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1af85d9a31c534a29877c88474cf5f1c46ad25f7c48efff61ea40f4aa83c5459" +checksum = "50302b77192891256d180ff2551dc0c3bc4144958b49e9a16c50a0dc218958ba" dependencies = [ "anyhow", "once_cell", @@ -8144,9 +8144,9 @@ dependencies = [ [[package]] name = "zksync_consensus_bft" -version = "0.1.0-rc.2" +version = "0.1.0-rc.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ddbee11ed4fafe461092fb73d3879325f08243fe50351baab6b5f593fee88f06" +checksum = "2325c7486a8280db1c26c10020350bead6eecb3de03f8bbfd878060f000cdce7" dependencies = [ "anyhow", "async-trait", @@ -8166,9 +8166,9 @@ dependencies = [ [[package]] name = "zksync_consensus_crypto" -version = "0.1.0-rc.2" +version = "0.1.0-rc.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b3867f9b4778616d87f157d1049e47290a3bca5ec9db208164f8902524ae92c" +checksum = "f5cb8ed0d59593f6147085b77142628e459ba673aa4d48fce064d5b96e31eb36" dependencies = [ "anyhow", "blst", @@ -8190,9 +8190,9 @@ dependencies = [ [[package]] name = "zksync_consensus_executor" -version = "0.1.0-rc.2" +version = "0.1.0-rc.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e7d50aa34616a9c1f4cdc7c47aae2df61474e137e41125c9d5fbfc1e5a1faaa" +checksum = "247b70ec255781b3b740acb744236e771a192922ffbaa52c462b84c4ea67609f" dependencies = [ "anyhow", "rand 0.8.5", @@ -8210,9 +8210,9 @@ dependencies = [ [[package]] name = "zksync_consensus_network" -version = "0.1.0-rc.2" +version = "0.1.0-rc.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ced7deafe460c74321edf79486980f9f75da121a1e52e5805392946dabafdf82" +checksum = "f10626b79885a9b096cd19ee83d85ef9b0554f061a9db6946f2b7c9d1b2f49ea" dependencies = [ "anyhow", "async-trait", @@ -8245,9 +8245,9 @@ dependencies = [ [[package]] name = "zksync_consensus_roles" -version = "0.1.0-rc.2" +version = "0.1.0-rc.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55dacdf1bad5d9efe7dd9db200421afa0c3bf5cfc7fdce4a64720a5dd0685807" +checksum = "1ffe3e47d99eb943eb94f2f5c9d929b1192bf3e8d1434de0fa6f0090f9c1197e" dependencies = [ "anyhow", "bit-vec", @@ -8267,9 +8267,9 @@ dependencies = [ [[package]] name = "zksync_consensus_storage" -version = "0.1.0-rc.2" +version = "0.1.0-rc.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f796020459775391094b9dcd133f01b5127059fe167cf412b2d1aed23fe0e52f" +checksum = "b9ae9a0ec64ce9c0af346e50cc87dc257c30259101ce9675b408cb883e096087" dependencies = [ "anyhow", "async-trait", @@ -8287,9 +8287,9 @@ dependencies = [ [[package]] name = "zksync_consensus_utils" -version = "0.1.0-rc.2" +version = "0.1.0-rc.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "587de103f745d0b88b49a9fb98cb002c4b7ce6ad042e17845091dce67b8aa984" +checksum = "24dc6135abeefa80f617eb2903fe43d137d362bf673f0651b4894b17069d1fb1" dependencies = [ "anyhow", "rand 0.8.5", @@ -9225,9 +9225,9 @@ dependencies = [ [[package]] name = "zksync_protobuf" -version = "0.1.0-rc.2" +version = "0.1.0-rc.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d86baa84d8bbbbeea269c0f99aca88364e4fd2a08e6ae7051ff87317132b4ef9" +checksum = "b1e7c7820f290db565a1b4ff73aa1175cd7d31498fca8d859eb5aceebd33468c" dependencies = [ "anyhow", "bit-vec", @@ -9246,9 +9246,9 @@ dependencies = [ [[package]] name = "zksync_protobuf_build" -version = "0.1.0-rc.2" +version = "0.1.0-rc.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f221ce83f4622c3d8732d09f4461d116d7b10f1cc9d1d1cd014c1fa836c168e6" +checksum = "f6cafeec1150ae91f1a37c8f0dce6b71b92b93e0c4153d32b4c37e2fd71bce2f" dependencies = [ "anyhow", "heck 0.5.0", diff --git a/Cargo.toml b/Cargo.toml index b0f98f33e3df..7b6ac30be8f1 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -79,7 +79,7 @@ members = [ "core/tests/vm-benchmark/harness", # Parts of prover workspace that are needed for Core workspace - "prover/crates/lib/prover_dal" + "prover/crates/lib/prover_dal", ] resolver = "2" @@ -209,16 +209,16 @@ zk_evm_1_4_1 = { package = "zk_evm", version = "0.141.0" } zk_evm_1_5_0 = { package = "zk_evm", version = "0.150.0" } # Consensus dependencies. -zksync_concurrency = "=0.1.0-rc.2" -zksync_consensus_bft = "=0.1.0-rc.2" -zksync_consensus_crypto = "=0.1.0-rc.2" -zksync_consensus_executor = "=0.1.0-rc.2" -zksync_consensus_network = "=0.1.0-rc.2" -zksync_consensus_roles = "=0.1.0-rc.2" -zksync_consensus_storage = "=0.1.0-rc.2" -zksync_consensus_utils = "=0.1.0-rc.2" -zksync_protobuf = "=0.1.0-rc.2" -zksync_protobuf_build = "=0.1.0-rc.2" +zksync_concurrency = "=0.1.0-rc.4" +zksync_consensus_bft = "=0.1.0-rc.4" +zksync_consensus_crypto = "=0.1.0-rc.4" +zksync_consensus_executor = "=0.1.0-rc.4" +zksync_consensus_network = "=0.1.0-rc.4" +zksync_consensus_roles = "=0.1.0-rc.4" +zksync_consensus_storage = "=0.1.0-rc.4" +zksync_consensus_utils = "=0.1.0-rc.4" +zksync_protobuf = "=0.1.0-rc.4" +zksync_protobuf_build = "=0.1.0-rc.4" # "Local" dependencies zksync_multivm = { version = "0.1.0", path = "core/lib/multivm" } diff --git a/core/node/consensus/src/storage/connection.rs b/core/node/consensus/src/storage/connection.rs index 5d76934d7005..7bff2c4bcf0e 100644 --- a/core/node/consensus/src/storage/connection.rs +++ b/core/node/consensus/src/storage/connection.rs @@ -411,20 +411,6 @@ impl<'a> Connection<'a> { .await .context("get_last_batch_number()")?; - let last = if let Some(last) = last { - // For now it would be unexpected if we couldn't retrieve the payloads - // for the `last` batch number, as an L1 batch is only created if we - // have all the L2 miniblocks for it. - Some( - self.get_batch(ctx, last) - .await - .context("get_batch()")? - .context("last batch not available")?, - ) - } else { - None - }; - Ok(BatchStoreState { first: first .map(|n| attester::BatchNumber(n.0 as u64)) diff --git a/core/node/consensus/src/storage/store.rs b/core/node/consensus/src/storage/store.rs index ad8f4948831b..b1dc3e0b60c2 100644 --- a/core/node/consensus/src/storage/store.rs +++ b/core/node/consensus/src/storage/store.rs @@ -446,10 +446,7 @@ impl storage::PersistentBatchStore for Store { self.batches_persisted.clone() } - /// Get the earliest L1 batch number which has to be (re)signed by a node. - /// - /// Ideally we would make this decision by looking up the last batch submitted to L1, - /// and so it might require a quorum of attesters to sign a certificate for it. + /// Get the earliest L1 batch number which has to be signed by attesters. async fn earliest_batch_number_to_sign( &self, ctx: &ctx::Ctx, @@ -457,31 +454,36 @@ impl storage::PersistentBatchStore for Store { // This is the rough roadmap of how this logic will evolve: // 1. Make best effort at gossiping and collecting votes; the `BatchVotes` in consensus only considers the last vote per attesters. // Still, we can re-sign more than the last batch, anticipating step 2. - // 2. Change `BatchVotes` to handle multiple pending batch numbers, anticipating that batch intervals might decrease dramatically. - // 3. Ask the Main Node what is the earliest batch number that it still expects votes for (ie. what is the last submission + 1). - // 4. Look at L1 to figure out what is the last submssion, and sign after that. + // 2. Ask the Main Node what is the earliest batch number that it still expects votes for (ie. what is the last submission + 1). + // 3. Change `BatchVotes` to handle multiple pending batch numbers, anticipating that batch intervals might decrease dramatically. + // 4. Once QC is required to submit to L1, Look at L1 to figure out what is the last submission, and sign after that. - // Originally this method returned all unsigned batch numbers by doing a DAL query, but we decided it shoudl be okay and cheap + // Originally this method returned all unsigned batch numbers by doing a DAL query, but we decided it should be okay and cheap // to resend signatures for already signed batches, and we don't have to worry about skipping them. Because of that, we also // didn't think it makes sense to query the database for the earliest unsigned batch *after* the submission, because we might // as well just re-sign everything. Until we have a way to argue about the "last submission" we just re-sign the last 10 to // try to produce as many QCs as the voting register allows, within reason. - let Some(last_batch_number) = self.last_batch(ctx).await? else { - return Ok(None); - }; - Ok(Some(attester::BatchNumber( - last_batch_number.0.saturating_sub(10), - ))) - } + // The latest decision is not to store batches with gaps between in the database *of the main node*. + // Once we have an API to serve to external nodes the earliest number the main node wants them to sign, + // we can get rid of this method: on the main node we can sign from what `last_batch_qc` returns, and + // while external nodes we can go from whatever the API returned. - /// Get the highest L1 batch number from storage. - async fn last_batch(&self, ctx: &ctx::Ctx) -> ctx::Result> { - self.conn(ctx) + const NUM_BATCHES_TO_SIGN: u64 = 10; + + let Some(last_batch_number) = self + .conn(ctx) .await? .get_last_batch_number(ctx) .await - .wrap("get_last_batch_number") + .wrap("get_last_batch_number")? + else { + return Ok(None); + }; + + Ok(Some(attester::BatchNumber( + last_batch_number.0.saturating_sub(NUM_BATCHES_TO_SIGN), + ))) } /// Get the L1 batch QC from storage with the highest number. diff --git a/core/node/consensus/src/tests.rs b/core/node/consensus/src/tests.rs index 5506ec6ee8f4..7d269376b65c 100644 --- a/core/node/consensus/src/tests.rs +++ b/core/node/consensus/src/tests.rs @@ -120,17 +120,25 @@ async fn test_connection_get_batch(from_snapshot: bool, version: ProtocolVersion let batches = conn.batches_range(ctx).await?; let last = batches.last.expect("last is set"); let (min, max) = conn - .get_l2_block_range_of_l1_batch(ctx, last.number) + .get_l2_block_range_of_l1_batch(ctx, last) .await? .unwrap(); + let last_batch = conn + .get_batch(ctx, last) + .await? + .expect("last batch can be retrieved"); + assert_eq!( - last.payloads.len(), + last_batch.payloads.len(), (max.0 - min.0) as usize, "all block payloads present" ); - let first_payload = last.payloads.first().expect("last batch has payloads"); + let first_payload = last_batch + .payloads + .first() + .expect("last batch has payloads"); let want_payload = conn.payload(ctx, min).await?.expect("payload is in the DB"); let want_payload = want_payload.encode(); diff --git a/prover/Cargo.lock b/prover/Cargo.lock index 376b464babe1..6be3ab0db106 100644 --- a/prover/Cargo.lock +++ b/prover/Cargo.lock @@ -7728,9 +7728,9 @@ dependencies = [ [[package]] name = "zksync_concurrency" -version = "0.1.0-rc.2" +version = "0.1.0-rc.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1af85d9a31c534a29877c88474cf5f1c46ad25f7c48efff61ea40f4aa83c5459" +checksum = "50302b77192891256d180ff2551dc0c3bc4144958b49e9a16c50a0dc218958ba" dependencies = [ "anyhow", "once_cell", @@ -7762,9 +7762,9 @@ dependencies = [ [[package]] name = "zksync_consensus_crypto" -version = "0.1.0-rc.2" +version = "0.1.0-rc.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b3867f9b4778616d87f157d1049e47290a3bca5ec9db208164f8902524ae92c" +checksum = "f5cb8ed0d59593f6147085b77142628e459ba673aa4d48fce064d5b96e31eb36" dependencies = [ "anyhow", "blst", @@ -7786,9 +7786,9 @@ dependencies = [ [[package]] name = "zksync_consensus_roles" -version = "0.1.0-rc.2" +version = "0.1.0-rc.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55dacdf1bad5d9efe7dd9db200421afa0c3bf5cfc7fdce4a64720a5dd0685807" +checksum = "1ffe3e47d99eb943eb94f2f5c9d929b1192bf3e8d1434de0fa6f0090f9c1197e" dependencies = [ "anyhow", "bit-vec", @@ -7808,9 +7808,9 @@ dependencies = [ [[package]] name = "zksync_consensus_storage" -version = "0.1.0-rc.2" +version = "0.1.0-rc.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f796020459775391094b9dcd133f01b5127059fe167cf412b2d1aed23fe0e52f" +checksum = "b9ae9a0ec64ce9c0af346e50cc87dc257c30259101ce9675b408cb883e096087" dependencies = [ "anyhow", "async-trait", @@ -7828,9 +7828,9 @@ dependencies = [ [[package]] name = "zksync_consensus_utils" -version = "0.1.0-rc.2" +version = "0.1.0-rc.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "587de103f745d0b88b49a9fb98cb002c4b7ce6ad042e17845091dce67b8aa984" +checksum = "24dc6135abeefa80f617eb2903fe43d137d362bf673f0651b4894b17069d1fb1" dependencies = [ "anyhow", "rand 0.8.5", @@ -8137,9 +8137,9 @@ dependencies = [ [[package]] name = "zksync_protobuf" -version = "0.1.0-rc.2" +version = "0.1.0-rc.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d86baa84d8bbbbeea269c0f99aca88364e4fd2a08e6ae7051ff87317132b4ef9" +checksum = "b1e7c7820f290db565a1b4ff73aa1175cd7d31498fca8d859eb5aceebd33468c" dependencies = [ "anyhow", "bit-vec", @@ -8158,9 +8158,9 @@ dependencies = [ [[package]] name = "zksync_protobuf_build" -version = "0.1.0-rc.2" +version = "0.1.0-rc.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f221ce83f4622c3d8732d09f4461d116d7b10f1cc9d1d1cd014c1fa836c168e6" +checksum = "f6cafeec1150ae91f1a37c8f0dce6b71b92b93e0c4153d32b4c37e2fd71bce2f" dependencies = [ "anyhow", "heck 0.5.0", diff --git a/zk_toolbox/Cargo.lock b/zk_toolbox/Cargo.lock index 769b2af8e44c..9d738fdf7231 100644 --- a/zk_toolbox/Cargo.lock +++ b/zk_toolbox/Cargo.lock @@ -6360,9 +6360,9 @@ dependencies = [ [[package]] name = "zksync_concurrency" -version = "0.1.0-rc.2" +version = "0.1.0-rc.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1af85d9a31c534a29877c88474cf5f1c46ad25f7c48efff61ea40f4aa83c5459" +checksum = "50302b77192891256d180ff2551dc0c3bc4144958b49e9a16c50a0dc218958ba" dependencies = [ "anyhow", "once_cell", @@ -6394,9 +6394,9 @@ dependencies = [ [[package]] name = "zksync_consensus_utils" -version = "0.1.0-rc.2" +version = "0.1.0-rc.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "587de103f745d0b88b49a9fb98cb002c4b7ce6ad042e17845091dce67b8aa984" +checksum = "24dc6135abeefa80f617eb2903fe43d137d362bf673f0651b4894b17069d1fb1" dependencies = [ "anyhow", "rand", @@ -6445,9 +6445,9 @@ dependencies = [ [[package]] name = "zksync_protobuf" -version = "0.1.0-rc.2" +version = "0.1.0-rc.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d86baa84d8bbbbeea269c0f99aca88364e4fd2a08e6ae7051ff87317132b4ef9" +checksum = "b1e7c7820f290db565a1b4ff73aa1175cd7d31498fca8d859eb5aceebd33468c" dependencies = [ "anyhow", "bit-vec", @@ -6466,9 +6466,9 @@ dependencies = [ [[package]] name = "zksync_protobuf_build" -version = "0.1.0-rc.2" +version = "0.1.0-rc.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f221ce83f4622c3d8732d09f4461d116d7b10f1cc9d1d1cd014c1fa836c168e6" +checksum = "f6cafeec1150ae91f1a37c8f0dce6b71b92b93e0c4153d32b4c37e2fd71bce2f" dependencies = [ "anyhow", "heck 0.5.0", From 1c443e5ecfd000279830262a4a35cbc83a9aacec Mon Sep 17 00:00:00 2001 From: Joonatan Saarhelo Date: Wed, 24 Jul 2024 18:43:58 +0100 Subject: [PATCH 24/52] perf: writing tx to bootloader memory is no longer quadratic (#2479) It used to grow proportionally to the number of transactions in the L2 block squared. --- .../vm_latest/bootloader_state/l2_block.rs | 8 +------ .../vm_latest/bootloader_state/state.rs | 2 +- .../vm_latest/bootloader_state/utils.rs | 22 +++++++++++++------ 3 files changed, 17 insertions(+), 15 deletions(-) diff --git a/core/lib/multivm/src/versions/vm_latest/bootloader_state/l2_block.rs b/core/lib/multivm/src/versions/vm_latest/bootloader_state/l2_block.rs index bb74f9628946..103c5d16540e 100644 --- a/core/lib/multivm/src/versions/vm_latest/bootloader_state/l2_block.rs +++ b/core/lib/multivm/src/versions/vm_latest/bootloader_state/l2_block.rs @@ -13,7 +13,7 @@ use crate::{ const EMPTY_TXS_ROLLING_HASH: H256 = H256::zero(); -#[derive(Debug, Clone)] +#[derive(Debug)] pub(crate) struct BootloaderL2Block { pub(crate) number: u32, pub(crate) timestamp: u64, @@ -56,12 +56,6 @@ impl BootloaderL2Block { self.txs_rolling_hash = concat_and_hash(self.txs_rolling_hash, tx_hash) } - pub(crate) fn interim_version(&self) -> BootloaderL2Block { - let mut interim = self.clone(); - interim.max_virtual_blocks_to_create = 0; - interim - } - pub(crate) fn make_snapshot(&self) -> L2BlockSnapshot { L2BlockSnapshot { txs_rolling_hash: self.txs_rolling_hash, diff --git a/core/lib/multivm/src/versions/vm_latest/bootloader_state/state.rs b/core/lib/multivm/src/versions/vm_latest/bootloader_state/state.rs index c43d82b0d281..a3f59937d57e 100644 --- a/core/lib/multivm/src/versions/vm_latest/bootloader_state/state.rs +++ b/core/lib/multivm/src/versions/vm_latest/bootloader_state/state.rs @@ -29,7 +29,7 @@ use crate::{ /// Serves two purposes: /// - Tracks where next tx should be pushed to in the bootloader memory. /// - Tracks which transaction should be executed next. -#[derive(Debug, Clone)] +#[derive(Debug)] pub struct BootloaderState { /// ID of the next transaction to be executed. /// See the structure doc-comment for a better explanation of purpose. diff --git a/core/lib/multivm/src/versions/vm_latest/bootloader_state/utils.rs b/core/lib/multivm/src/versions/vm_latest/bootloader_state/utils.rs index 346c1bde5368..db4c834fbc77 100644 --- a/core/lib/multivm/src/versions/vm_latest/bootloader_state/utils.rs +++ b/core/lib/multivm/src/versions/vm_latest/bootloader_state/utils.rs @@ -67,12 +67,7 @@ pub(super) fn apply_tx_to_memory( .zip(bootloader_tx.encoded.clone()), ); - let bootloader_l2_block = if start_new_l2_block { - bootloader_l2_block.clone() - } else { - bootloader_l2_block.interim_version() - }; - apply_l2_block(memory, &bootloader_l2_block, tx_index); + apply_l2_block_inner(memory, bootloader_l2_block, tx_index, start_new_l2_block); // Note, +1 is moving for pointer let compressed_bytecodes_offset = COMPRESSED_BYTECODES_OFFSET + 1 + compressed_bytecodes_size; @@ -93,6 +88,15 @@ pub(crate) fn apply_l2_block( memory: &mut BootloaderMemory, bootloader_l2_block: &BootloaderL2Block, txs_index: usize, +) { + apply_l2_block_inner(memory, bootloader_l2_block, txs_index, true) +} + +fn apply_l2_block_inner( + memory: &mut BootloaderMemory, + bootloader_l2_block: &BootloaderL2Block, + txs_index: usize, + start_new_l2_block: bool, ) { // Since L2 block information start from the `TX_OPERATOR_L2_BLOCK_INFO_OFFSET` and each // L2 block info takes `TX_OPERATOR_SLOTS_PER_L2_BLOCK_INFO` slots, the position where the L2 block info @@ -110,7 +114,11 @@ pub(crate) fn apply_l2_block( ), ( block_position + 3, - bootloader_l2_block.max_virtual_blocks_to_create.into(), + if start_new_l2_block { + bootloader_l2_block.max_virtual_blocks_to_create.into() + } else { + U256::zero() + }, ), ]) } From dea6969d1b67c54a0985278de68a8d50f1084dc1 Mon Sep 17 00:00:00 2001 From: pompon0 Date: Thu, 25 Jul 2024 10:00:08 +0200 Subject: [PATCH 25/52] fix: consensus secrets generator (#2484) the output format didn't match the expected consensus_secrets.yaml format. --- core/bin/external_node/src/config/mod.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/core/bin/external_node/src/config/mod.rs b/core/bin/external_node/src/config/mod.rs index 120df5f139fa..5674396652f4 100644 --- a/core/bin/external_node/src/config/mod.rs +++ b/core/bin/external_node/src/config/mod.rs @@ -1136,11 +1136,11 @@ pub fn generate_consensus_secrets() { let attester_key = roles::attester::SecretKey::generate(); let node_key = roles::node::SecretKey::generate(); println!("# {}", validator_key.public().encode()); - println!("- validator_key: {}", validator_key.encode()); + println!("validator_key: {}", validator_key.encode()); println!("# {}", attester_key.public().encode()); - println!("- attester_key: {}", attester_key.encode()); + println!("attester_key: {}", attester_key.encode()); println!("# {}", node_key.public().encode()); - println!("- node_key: {}", node_key.encode()); + println!("node_key: {}", node_key.encode()); } pub(crate) fn read_consensus_secrets() -> anyhow::Result> { From 12b470fb40053caffb1b1b82c111b033c409bd23 Mon Sep 17 00:00:00 2001 From: D025 Date: Thu, 25 Jul 2024 15:00:29 +0300 Subject: [PATCH 26/52] ci: add docker login for ci steps (#2491) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ Add docker login ## Why ❔ Fix rate limit for docker pull ## Checklist - [x] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [ ] Tests for the changes have been added / updated. - [ ] Documentation comments have been added / updated. - [ ] Code has been formatted via `zk fmt` and `zk lint`. --- .github/workflows/ci-common-reusable.yml | 8 ++++++++ .github/workflows/ci-core-lint-reusable.yml | 9 ++++++++- .github/workflows/ci-core-reusable.yml | 8 ++++++++ .github/workflows/ci-prover-reusable.yml | 9 ++++++++- .github/workflows/ci-zk-toolbox-reusable.yml | 8 ++++++++ .github/workflows/ci.yml | 15 +++++++++++++++ 6 files changed, 55 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci-common-reusable.yml b/.github/workflows/ci-common-reusable.yml index 191c69180631..657fbd682b7c 100644 --- a/.github/workflows/ci-common-reusable.yml +++ b/.github/workflows/ci-common-reusable.yml @@ -1,6 +1,13 @@ name: Workflow template for CI jobs to be ran on both Prover and Core Components on: workflow_call: + secrets: + DOCKERHUB_USER: + description: "DOCKERHUB_USER" + required: true + DOCKERHUB_TOKEN: + description: "DOCKERHUB_TOKEN" + required: true jobs: build: @@ -18,6 +25,7 @@ jobs: echo ZKSYNC_HOME=$(pwd) >> $GITHUB_ENV echo $(pwd)/bin >> $GITHUB_PATH echo IN_DOCKER=1 >> .env + docker login -u ${{ secrets.DOCKERHUB_USER }} -p ${{ secrets.DOCKERHUB_TOKEN }} - name: Start services run: | diff --git a/.github/workflows/ci-core-lint-reusable.yml b/.github/workflows/ci-core-lint-reusable.yml index 4fd8f76a5383..ac7aefc7bfd9 100644 --- a/.github/workflows/ci-core-lint-reusable.yml +++ b/.github/workflows/ci-core-lint-reusable.yml @@ -1,7 +1,13 @@ name: Workflow template for Core Linting CI jobs on: workflow_call: - + secrets: + DOCKERHUB_USER: + description: "DOCKERHUB_USER" + required: true + DOCKERHUB_TOKEN: + description: "DOCKERHUB_TOKEN" + required: true jobs: code_lint: @@ -17,6 +23,7 @@ jobs: echo ZKSYNC_HOME=$(pwd) >> $GITHUB_ENV echo $(pwd)/bin >> $GITHUB_PATH echo IN_DOCKER=1 >> .env + docker login -u ${{ secrets.DOCKERHUB_USER }} -p ${{ secrets.DOCKERHUB_TOKEN }} - name: Start services run: | diff --git a/.github/workflows/ci-core-reusable.yml b/.github/workflows/ci-core-reusable.yml index 93aa1bb1658b..0558be38fbda 100644 --- a/.github/workflows/ci-core-reusable.yml +++ b/.github/workflows/ci-core-reusable.yml @@ -1,6 +1,13 @@ name: Workflow template for CI jobs for Core Components on: workflow_call: + secrets: + DOCKERHUB_USER: + description: "DOCKERHUB_USER" + required: true + DOCKERHUB_TOKEN: + description: "DOCKERHUB_TOKEN" + required: true inputs: compilers: description: "JSON of required compilers and their versions" @@ -26,6 +33,7 @@ jobs: echo ZKSYNC_HOME=$(pwd) >> $GITHUB_ENV echo $(pwd)/bin >> $GITHUB_PATH echo IN_DOCKER=1 >> .env + docker login -u ${{ secrets.DOCKERHUB_USER }} -p ${{ secrets.DOCKERHUB_TOKEN }} # TODO: Remove when we after upgrade of hardhat-plugins - name: pre-download compilers diff --git a/.github/workflows/ci-prover-reusable.yml b/.github/workflows/ci-prover-reusable.yml index 6a8813a0a343..94fb8906ba9e 100644 --- a/.github/workflows/ci-prover-reusable.yml +++ b/.github/workflows/ci-prover-reusable.yml @@ -1,7 +1,13 @@ name: Workflow template for CI jobs for Prover Components on: workflow_call: - + secrets: + DOCKERHUB_USER: + description: "DOCKERHUB_USER" + required: true + DOCKERHUB_TOKEN: + description: "DOCKERHUB_TOKEN" + required: true jobs: lint: runs-on: [matterlabs-ci-runner] @@ -18,6 +24,7 @@ jobs: echo ZKSYNC_HOME=$(pwd) >> $GITHUB_ENV echo $(pwd)/bin >> $GITHUB_PATH echo IN_DOCKER=1 >> .env + docker login -u ${{ secrets.DOCKERHUB_USER }} -p ${{ secrets.DOCKERHUB_TOKEN }} - name: Start services run: | diff --git a/.github/workflows/ci-zk-toolbox-reusable.yml b/.github/workflows/ci-zk-toolbox-reusable.yml index 87bd1729db91..5550dcab0349 100644 --- a/.github/workflows/ci-zk-toolbox-reusable.yml +++ b/.github/workflows/ci-zk-toolbox-reusable.yml @@ -1,6 +1,13 @@ name: Workflow template for CI jobs for Core Components on: workflow_call: + secrets: + DOCKERHUB_USER: + description: "DOCKERHUB_USER" + required: true + DOCKERHUB_TOKEN: + description: "DOCKERHUB_TOKEN" + required: true env: CLICOLOR: 1 @@ -24,6 +31,7 @@ jobs: echo ZKSYNC_HOME=$(pwd) >> $GITHUB_ENV echo $(pwd)/bin >> $GITHUB_PATH echo IN_DOCKER=1 >> .env + docker login -u ${{ secrets.DOCKERHUB_USER }} -p ${{ secrets.DOCKERHUB_TOKEN }} - name: Start services run: | diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 0155e362f15b..afaf5ccaa615 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -83,24 +83,36 @@ jobs: needs: changed_files if: contains(github.ref_name, 'release-please--branches') uses: ./.github/workflows/ci-core-lint-reusable.yml + secrets: + DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }} + DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} ci-for-core: name: CI for Core Components needs: changed_files if: ${{ (needs.changed_files.outputs.core == 'true' || needs.changed_files.outputs.all == 'true') && !contains(github.ref_name, 'release-please--branches') }} uses: ./.github/workflows/ci-core-reusable.yml + secrets: + DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }} + DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} ci-for-prover: needs: changed_files if: ${{ (needs.changed_files.outputs.prover == 'true' || needs.changed_files.outputs.all == 'true') && !contains(github.ref_name, 'release-please--branches') }} name: CI for Prover Components uses: ./.github/workflows/ci-prover-reusable.yml + secrets: + DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }} + DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} ci-for-zk-toolbox: needs: changed_files if: ${{ (needs.changed_files.outputs.core == 'true' || needs.changed_files.outputs.zk_toolbox == 'true' || needs.changed_files.outputs.all == 'true') && !contains(github.ref_name, 'release-please--branches') }} name: CI for zk_toolbox uses: ./.github/workflows/ci-zk-toolbox-reusable.yml + secrets: + DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }} + DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} ci-for-docs: needs: changed_files @@ -114,6 +126,9 @@ jobs: if: ${{ (needs.changed_files.outputs.prover == 'true' || needs.changed_files.outputs.core == 'true' || needs.changed_files.outputs.all == 'true') && !contains(github.ref_name, 'release-please--branches') }} name: CI for Common Components (prover or core) uses: ./.github/workflows/ci-common-reusable.yml + secrets: + DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }} + DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} build-core-images: name: Build core images From d7343e7776a8be441df3cce478987612916cc134 Mon Sep 17 00:00:00 2001 From: D025 Date: Thu, 25 Jul 2024 15:18:54 +0300 Subject: [PATCH 27/52] ci: add docker login for ci steps (#2495) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ Add missed docker login and fixes in ci lint docker usage ## Why ❔ Failed workflow on rate limits ## Checklist - [x] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [x] Tests for the changes have been added / updated. - [x] Documentation comments have been added / updated. - [x] Code has been formatted via `zk fmt` and `zk lint`. --- .github/workflows/build-contract-verifier-template.yml | 1 + .github/workflows/build-core-template.yml | 1 + .github/workflows/build-prover-template.yml | 1 + .github/workflows/ci-core-reusable.yml | 3 +++ .github/workflows/ci-docs-reusable.yml | 9 ++++++++- .github/workflows/ci.yml | 3 +++ 6 files changed, 17 insertions(+), 1 deletion(-) diff --git a/.github/workflows/build-contract-verifier-template.yml b/.github/workflows/build-contract-verifier-template.yml index 2b24801d065f..fff4474cdef4 100644 --- a/.github/workflows/build-contract-verifier-template.yml +++ b/.github/workflows/build-contract-verifier-template.yml @@ -49,6 +49,7 @@ jobs: echo $(pwd)/bin >> $GITHUB_PATH echo CI=1 >> .env echo IN_DOCKER=1 >> .env + docker login -u ${{ secrets.DOCKERHUB_USER }} -p ${{ secrets.DOCKERHUB_TOKEN }} - name: Download contracts run: | diff --git a/.github/workflows/build-core-template.yml b/.github/workflows/build-core-template.yml index 4ead6cb746dd..5f8418dee9fe 100644 --- a/.github/workflows/build-core-template.yml +++ b/.github/workflows/build-core-template.yml @@ -58,6 +58,7 @@ jobs: echo $(pwd)/bin >> $GITHUB_PATH echo CI=1 >> .env echo IN_DOCKER=1 >> .env + docker login -u ${{ secrets.DOCKERHUB_USER }} -p ${{ secrets.DOCKERHUB_TOKEN }} - name: Download contracts run: | diff --git a/.github/workflows/build-prover-template.yml b/.github/workflows/build-prover-template.yml index ba76740ee2df..2673d2d44820 100644 --- a/.github/workflows/build-prover-template.yml +++ b/.github/workflows/build-prover-template.yml @@ -70,6 +70,7 @@ jobs: echo $(pwd)/bin >> $GITHUB_PATH echo CI=1 >> .env echo IN_DOCKER=1 >> .env + docker login -u ${{ secrets.DOCKERHUB_USER }} -p ${{ secrets.DOCKERHUB_TOKEN }} - name: start-services run: | diff --git a/.github/workflows/ci-core-reusable.yml b/.github/workflows/ci-core-reusable.yml index 0558be38fbda..ae047c6d78d5 100644 --- a/.github/workflows/ci-core-reusable.yml +++ b/.github/workflows/ci-core-reusable.yml @@ -19,6 +19,9 @@ jobs: lint: name: lint uses: ./.github/workflows/ci-core-lint-reusable.yml + secrets: + DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }} + DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} unit-tests: runs-on: [matterlabs-ci-runner] diff --git a/.github/workflows/ci-docs-reusable.yml b/.github/workflows/ci-docs-reusable.yml index 82ef312c9832..446b7b32d3e7 100644 --- a/.github/workflows/ci-docs-reusable.yml +++ b/.github/workflows/ci-docs-reusable.yml @@ -1,7 +1,13 @@ name: Workflow template for CI jobs against docs on: workflow_call: - + secrets: + DOCKERHUB_USER: + description: "DOCKERHUB_USER" + required: true + DOCKERHUB_TOKEN: + description: "DOCKERHUB_TOKEN" + required: true jobs: lint: runs-on: [matterlabs-ci-runner] @@ -16,6 +22,7 @@ jobs: echo ZKSYNC_HOME=$(pwd) >> $GITHUB_ENV echo $(pwd)/bin >> $GITHUB_PATH echo IN_DOCKER=1 >> .env + docker login -u ${{ secrets.DOCKERHUB_USER }} -p ${{ secrets.DOCKERHUB_TOKEN }} - name: Start services run: | diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index afaf5ccaa615..f8ef751b317f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -119,6 +119,9 @@ jobs: if: needs.changed_files.outputs.docs == 'true' name: CI for Docs uses: ./.github/workflows/ci-docs-reusable.yml + secrets: + DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }} + DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} # What needs to be ran for both core and prover ci-for-common: From ddfcececb14d5ff7f1889bf944c47d7a420b8322 Mon Sep 17 00:00:00 2001 From: D025 Date: Thu, 25 Jul 2024 15:25:19 +0300 Subject: [PATCH 28/52] ci: add docker login for ci steps (#2496) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ Fix ci lint job ## Why ❔ Missed docker secrets to auth ## Checklist - [x] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [x] Tests for the changes have been added / updated. - [x] Documentation comments have been added / updated. - [x] Code has been formatted via `zk fmt` and `zk lint`. --- .github/workflows/ci-zk-toolbox-reusable.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/ci-zk-toolbox-reusable.yml b/.github/workflows/ci-zk-toolbox-reusable.yml index 5550dcab0349..5b5173f6c03b 100644 --- a/.github/workflows/ci-zk-toolbox-reusable.yml +++ b/.github/workflows/ci-zk-toolbox-reusable.yml @@ -16,6 +16,9 @@ jobs: lint: name: lint uses: ./.github/workflows/ci-core-lint-reusable.yml + secrets: + DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }} + DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} build: runs-on: [matterlabs-ci-runner] From 9367b432b8f57c5835aa14fa35c0f49e6fe92ceb Mon Sep 17 00:00:00 2001 From: D025 Date: Thu, 25 Jul 2024 15:37:54 +0300 Subject: [PATCH 29/52] ci: add docker login for ci steps (#2497) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ Add missed docker login ## Why ❔ Broken CI/CD workflows ## Checklist - [ ] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [ ] Tests for the changes have been added / updated. - [ ] Documentation comments have been added / updated. - [ ] Code has been formatted via `zk fmt` and `zk lint`. --- .github/workflows/ci-core-reusable.yml | 3 +++ .github/workflows/ci-prover-reusable.yml | 1 + .github/workflows/ci-zk-toolbox-reusable.yml | 2 +- .github/workflows/release-please-cargo-lock.yml | 3 ++- .github/workflows/vm-perf-to-prometheus.yml | 1 + 5 files changed, 8 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci-core-reusable.yml b/.github/workflows/ci-core-reusable.yml index ae047c6d78d5..c0c816aa8e28 100644 --- a/.github/workflows/ci-core-reusable.yml +++ b/.github/workflows/ci-core-reusable.yml @@ -90,6 +90,7 @@ jobs: echo ZKSYNC_HOME=$(pwd) >> $GITHUB_ENV echo $(pwd)/bin >> $GITHUB_PATH echo IN_DOCKER=1 >> .env + docker login -u ${{ secrets.DOCKERHUB_USER }} -p ${{ secrets.DOCKERHUB_TOKEN }} - name: Loadtest configuration run: | @@ -162,6 +163,7 @@ jobs: echo IN_DOCKER=1 >> .env echo RUN_CONTRACT_VERIFICATION_TEST=true >> .env echo ZKSYNC_DEBUG_LOGS=true >> .env + docker login -u ${{ secrets.DOCKERHUB_USER }} -p ${{ secrets.DOCKERHUB_TOKEN }} - name: Download zksolc/solc and zkvyper/vyper run: | @@ -337,6 +339,7 @@ jobs: echo IN_DOCKER=1 >> .env echo RUN_CONTRACT_VERIFICATION_TEST=true >> .env echo ZKSYNC_DEBUG_LOGS=true >> .env + docker login -u ${{ secrets.DOCKERHUB_USER }} -p ${{ secrets.DOCKERHUB_TOKEN }} - name: Start services run: | diff --git a/.github/workflows/ci-prover-reusable.yml b/.github/workflows/ci-prover-reusable.yml index 94fb8906ba9e..993203eb854e 100644 --- a/.github/workflows/ci-prover-reusable.yml +++ b/.github/workflows/ci-prover-reusable.yml @@ -56,6 +56,7 @@ jobs: echo ZKSYNC_HOME=$(pwd) >> $GITHUB_ENV echo $(pwd)/bin >> $GITHUB_PATH echo IN_DOCKER=1 >> .env + docker login -u ${{ secrets.DOCKERHUB_USER }} -p ${{ secrets.DOCKERHUB_TOKEN }} - name: Start services run: | diff --git a/.github/workflows/ci-zk-toolbox-reusable.yml b/.github/workflows/ci-zk-toolbox-reusable.yml index 5b5173f6c03b..8c8434c6711a 100644 --- a/.github/workflows/ci-zk-toolbox-reusable.yml +++ b/.github/workflows/ci-zk-toolbox-reusable.yml @@ -83,7 +83,7 @@ jobs: echo ZKSYNC_HOME=$(pwd) >> $GITHUB_ENV echo $(pwd)/bin >> $GITHUB_PATH echo IN_DOCKER=1 >> .env - + docker login -u ${{ secrets.DOCKERHUB_USER }} -p ${{ secrets.DOCKERHUB_TOKEN }} - name: Start services run: | diff --git a/.github/workflows/release-please-cargo-lock.yml b/.github/workflows/release-please-cargo-lock.yml index a602eaaf083a..c7972580cacb 100644 --- a/.github/workflows/release-please-cargo-lock.yml +++ b/.github/workflows/release-please-cargo-lock.yml @@ -32,7 +32,8 @@ jobs: echo ZKSYNC_HOME=$(pwd) >> $GITHUB_ENV echo $(pwd)/bin >> $GITHUB_PATH echo IN_DOCKER=1 >> .env - + docker login -u ${{ secrets.DOCKERHUB_USER }} -p ${{ secrets.DOCKERHUB_TOKEN }} + - name: Start services if: steps.condition.outputs.skip_steps != 'true' run: docker compose up -d zk diff --git a/.github/workflows/vm-perf-to-prometheus.yml b/.github/workflows/vm-perf-to-prometheus.yml index fce7ead2d696..40da982155c1 100644 --- a/.github/workflows/vm-perf-to-prometheus.yml +++ b/.github/workflows/vm-perf-to-prometheus.yml @@ -28,6 +28,7 @@ jobs: - name: init run: | + docker login -u ${{ secrets.DOCKERHUB_USER }} -p ${{ secrets.DOCKERHUB_TOKEN }} run_retried docker compose pull zk docker compose up -d zk ci_run zk From f42b26027cf864a08b775fa2ed04adcddf5311a9 Mon Sep 17 00:00:00 2001 From: D025 Date: Thu, 25 Jul 2024 15:57:17 +0300 Subject: [PATCH 30/52] ci: add docker login for ci steps (#2498) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ Add missed docker login ## Why ❔ Broken release ## Checklist - [ ] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [ ] Tests for the changes have been added / updated. - [ ] Documentation comments have been added / updated. - [ ] Code has been formatted via `zk fmt` and `zk lint`. --- .github/workflows/build-local-node-docker.yml | 1 + .github/workflows/build-prover-template.yml | 3 +++ .github/workflows/vm-perf-comparison.yml | 1 + 3 files changed, 5 insertions(+) diff --git a/.github/workflows/build-local-node-docker.yml b/.github/workflows/build-local-node-docker.yml index e5e8fb69fb1d..bd3ecd12e627 100644 --- a/.github/workflows/build-local-node-docker.yml +++ b/.github/workflows/build-local-node-docker.yml @@ -29,6 +29,7 @@ jobs: echo $(pwd)/bin >> $GITHUB_PATH echo CI=1 >> .env echo IN_DOCKER=1 >> .env + docker login -u ${{ secrets.DOCKERHUB_USER }} -p ${{ secrets.DOCKERHUB_TOKEN }} # TODO: Remove after when we can upgrade hardhat-plugins - name: pre-download compiilers diff --git a/.github/workflows/build-prover-template.yml b/.github/workflows/build-prover-template.yml index 2673d2d44820..e488157463dd 100644 --- a/.github/workflows/build-prover-template.yml +++ b/.github/workflows/build-prover-template.yml @@ -186,6 +186,9 @@ jobs: component: - witness-vector-generator steps: + - name: Auth Docker hub registry + run: | + docker login -u ${{ secrets.DOCKERHUB_USER }} -p ${{ secrets.DOCKERHUB_TOKEN }} - name: Set up QEMU uses: docker/setup-qemu-action@v3 diff --git a/.github/workflows/vm-perf-comparison.yml b/.github/workflows/vm-perf-comparison.yml index 53dada123574..e62f8fd0a284 100644 --- a/.github/workflows/vm-perf-comparison.yml +++ b/.github/workflows/vm-perf-comparison.yml @@ -35,6 +35,7 @@ jobs: touch .env echo ZKSYNC_HOME=$(pwd) >> $GITHUB_ENV echo $(pwd)/bin >> $GITHUB_PATH + docker login -u ${{ secrets.DOCKERHUB_USER }} -p ${{ secrets.DOCKERHUB_TOKEN }} - name: init run: | From 5c932f49fe5a5899df0e7b6a9fe55756d4559800 Mon Sep 17 00:00:00 2001 From: D025 Date: Thu, 25 Jul 2024 16:46:34 +0300 Subject: [PATCH 31/52] ci: fixes with docker login (#2499) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ Fixes with docker login ## Why ❔ Failed jobs in CI/CD ## Checklist - [x] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [x] Tests for the changes have been added / updated. - [x] Documentation comments have been added / updated. - [x] Code has been formatted via `zk fmt` and `zk lint`. --- .github/workflows/build-prover-fri-gpu-gar.yml | 4 ++++ .github/workflows/build-prover-template.yml | 9 +++------ 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/.github/workflows/build-prover-fri-gpu-gar.yml b/.github/workflows/build-prover-fri-gpu-gar.yml index 9740cafd9678..11f151c8a136 100644 --- a/.github/workflows/build-prover-fri-gpu-gar.yml +++ b/.github/workflows/build-prover-fri-gpu-gar.yml @@ -29,6 +29,10 @@ jobs: run: | gsutil -m rsync -r gs://matterlabs-setup-data-us/${{ inputs.setup_keys_id }} docker/prover-gpu-fri-gar + - name: Auth Docker hub registry + run: | + docker login -u ${{ secrets.DOCKERHUB_USER }} -p ${{ secrets.DOCKERHUB_TOKEN }} + - name: Login to us-central1 GAR run: | gcloud auth print-access-token --lifetime=7200 --impersonate-service-account=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com | docker login -u oauth2accesstoken --password-stdin https://us-docker.pkg.dev diff --git a/.github/workflows/build-prover-template.yml b/.github/workflows/build-prover-template.yml index e488157463dd..cce0fb1ecbe9 100644 --- a/.github/workflows/build-prover-template.yml +++ b/.github/workflows/build-prover-template.yml @@ -186,16 +186,13 @@ jobs: component: - witness-vector-generator steps: + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + - name: Auth Docker hub registry run: | docker login -u ${{ secrets.DOCKERHUB_USER }} -p ${{ secrets.DOCKERHUB_TOKEN }} - - name: Set up QEMU - uses: docker/setup-qemu-action@v3 - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 - - name: Login to us-central1 GAR run: | gcloud auth print-access-token --lifetime=7200 --impersonate-service-account=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com | docker login -u oauth2accesstoken --password-stdin https://us-docker.pkg.dev From d04a6aea1596c9b23dd9b5ac1b36e42fd689106e Mon Sep 17 00:00:00 2001 From: D025 Date: Thu, 25 Jul 2024 23:04:55 +0300 Subject: [PATCH 32/52] ci: fix docker auth with prover gar gpu (#2501) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ Fix workflow with missed docker auth ## Why ❔ Broken prover build workflow ## Checklist - [x] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [x] Tests for the changes have been added / updated. - [x] Documentation comments have been added / updated. - [x] Code has been formatted via `zk fmt` and `zk lint`. --- .github/workflows/build-docker-from-tag.yml | 4 +++- .github/workflows/build-prover-fri-gpu-gar.yml | 7 +++++++ .github/workflows/release-test-stage.yml | 3 +++ 3 files changed, 13 insertions(+), 1 deletion(-) diff --git a/.github/workflows/build-docker-from-tag.yml b/.github/workflows/build-docker-from-tag.yml index 7e5257796643..ebe5a4519a84 100644 --- a/.github/workflows/build-docker-from-tag.yml +++ b/.github/workflows/build-docker-from-tag.yml @@ -95,7 +95,6 @@ jobs: DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }} DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} - build-gar-prover-fri-gpu: name: Build GAR prover FRI GPU needs: [setup, build-push-prover-images] @@ -105,3 +104,6 @@ jobs: setup_keys_id: ${{ needs.setup.outputs.prover_fri_gpu_key_id }} image_tag_suffix: ${{ needs.setup.outputs.image_tag_suffix }} protocol_version: ${{ needs.build-push-prover-images.outputs.protocol_version }} + secrets: + DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }} + DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} diff --git a/.github/workflows/build-prover-fri-gpu-gar.yml b/.github/workflows/build-prover-fri-gpu-gar.yml index 11f151c8a136..b29af5ac42aa 100644 --- a/.github/workflows/build-prover-fri-gpu-gar.yml +++ b/.github/workflows/build-prover-fri-gpu-gar.yml @@ -2,6 +2,13 @@ name: Build Prover FRI GPU with builtin setup data on: workflow_call: + secrets: + DOCKERHUB_USER: + description: "DOCKERHUB_USER" + required: true + DOCKERHUB_TOKEN: + description: "DOCKERHUB_TOKEN" + required: true inputs: image_tag_suffix: description: "Commit sha or git tag for Docker tag" diff --git a/.github/workflows/release-test-stage.yml b/.github/workflows/release-test-stage.yml index 9605568ead5d..7e924b549e21 100644 --- a/.github/workflows/release-test-stage.yml +++ b/.github/workflows/release-test-stage.yml @@ -116,3 +116,6 @@ jobs: setup_keys_id: ${{ needs.setup.outputs.prover_fri_gpu_key_id }} image_tag_suffix: ${{ needs.setup.outputs.image_tag_suffix }} protocol_version: ${{ needs.build-push-prover-images.outputs.protocol_version }} + secrets: + DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }} + DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} From c319c8c87916eb3fa79fb902e0054053c10e72a1 Mon Sep 17 00:00:00 2001 From: zksync-era-bot <147085853+zksync-era-bot@users.noreply.github.com> Date: Thu, 25 Jul 2024 23:28:14 +0300 Subject: [PATCH 33/52] chore(main): release core 24.12.0 (#2478) :robot: I have created a release *beep* *boop* --- ## [24.12.0](https://github.com/matter-labs/zksync-era/compare/core-v24.11.0...core-v24.12.0) (2024-07-25) ### Features * add general config and secrets opts to snapshot creator ([#2471](https://github.com/matter-labs/zksync-era/issues/2471)) ([0f475c9](https://github.com/matter-labs/zksync-era/commit/0f475c949a28c4602539b4d75ee79e605f44e2de)) * Update to consensus 0.1.0-rc.4 (BFT-486) ([#2475](https://github.com/matter-labs/zksync-era/issues/2475)) ([ff6b10c](https://github.com/matter-labs/zksync-era/commit/ff6b10c4a994cf70297a034202bcb55152748cba)) ### Bug Fixes * consensus secrets generator ([#2484](https://github.com/matter-labs/zksync-era/issues/2484)) ([dea6969](https://github.com/matter-labs/zksync-era/commit/dea6969d1b67c54a0985278de68a8d50f1084dc1)) ### Performance Improvements * writing tx to bootloader memory is no longer quadratic ([#2479](https://github.com/matter-labs/zksync-era/issues/2479)) ([1c443e5](https://github.com/matter-labs/zksync-era/commit/1c443e5ecfd000279830262a4a35cbc83a9aacec)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --------- Co-authored-by: zksync-era-bot Co-authored-by: Roman Brodetski --- .github/release-please/manifest.json | 2 +- Cargo.lock | 2 +- core/CHANGELOG.md | 18 ++++++++++++++++++ core/bin/external_node/Cargo.toml | 2 +- 4 files changed, 21 insertions(+), 3 deletions(-) diff --git a/.github/release-please/manifest.json b/.github/release-please/manifest.json index a26bd9fb6b09..40df986877ac 100644 --- a/.github/release-please/manifest.json +++ b/.github/release-please/manifest.json @@ -1,4 +1,4 @@ { - "core": "24.11.0", + "core": "24.12.0", "prover": "16.1.0" } diff --git a/Cargo.lock b/Cargo.lock index 3b43a01fc5a0..a2cf9e4fde0c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8626,7 +8626,7 @@ dependencies = [ [[package]] name = "zksync_external_node" -version = "24.11.0" +version = "24.12.0" dependencies = [ "anyhow", "assert_matches", diff --git a/core/CHANGELOG.md b/core/CHANGELOG.md index d9a944c7efe3..d727db63d511 100644 --- a/core/CHANGELOG.md +++ b/core/CHANGELOG.md @@ -1,5 +1,23 @@ # Changelog +## [24.12.0](https://github.com/matter-labs/zksync-era/compare/core-v24.11.0...core-v24.12.0) (2024-07-25) + + +### Features + +* add general config and secrets opts to snapshot creator ([#2471](https://github.com/matter-labs/zksync-era/issues/2471)) ([0f475c9](https://github.com/matter-labs/zksync-era/commit/0f475c949a28c4602539b4d75ee79e605f44e2de)) +* Update to consensus 0.1.0-rc.4 (BFT-486) ([#2475](https://github.com/matter-labs/zksync-era/issues/2475)) ([ff6b10c](https://github.com/matter-labs/zksync-era/commit/ff6b10c4a994cf70297a034202bcb55152748cba)) + + +### Bug Fixes + +* consensus secrets generator ([#2484](https://github.com/matter-labs/zksync-era/issues/2484)) ([dea6969](https://github.com/matter-labs/zksync-era/commit/dea6969d1b67c54a0985278de68a8d50f1084dc1)) + + +### Performance Improvements + +* writing tx to bootloader memory is no longer quadratic ([#2479](https://github.com/matter-labs/zksync-era/issues/2479)) ([1c443e5](https://github.com/matter-labs/zksync-era/commit/1c443e5ecfd000279830262a4a35cbc83a9aacec)) + ## [24.11.0](https://github.com/matter-labs/zksync-era/compare/core-v24.10.0...core-v24.11.0) (2024-07-23) diff --git a/core/bin/external_node/Cargo.toml b/core/bin/external_node/Cargo.toml index c3e8a4bb18e2..8f2ffde6f798 100644 --- a/core/bin/external_node/Cargo.toml +++ b/core/bin/external_node/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "zksync_external_node" description = "Non-validator ZKsync node" -version = "24.11.0" # x-release-please-version +version = "24.12.0" # x-release-please-version edition.workspace = true authors.workspace = true homepage.workspace = true From b12d01b48d5cd4b350a9061b1148a7bf9ceecd33 Mon Sep 17 00:00:00 2001 From: D025 Date: Thu, 25 Jul 2024 23:44:44 +0300 Subject: [PATCH 34/52] ci: runners usage optimisation (#2500) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ Change runer type for some jobs ## Why ❔ Runner usage optimisation ## Checklist - [x] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [x] Tests for the changes have been added / updated. - [x] Documentation comments have been added / updated. - [x] Code has been formatted via `zk fmt` and `zk lint`. --- .github/workflows/ci.yml | 2 +- .github/workflows/release-please-cargo-lock.yml | 2 +- .github/workflows/release-test-stage.yml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index f8ef751b317f..d9df796a2c50 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -15,7 +15,7 @@ concurrency: jobs: changed_files: - runs-on: [matterlabs-default-infra-runners] + runs-on: ubuntu-latest name: Get changed files outputs: core: ${{ steps.changed-files.outputs.core_any_changed }} diff --git a/.github/workflows/release-please-cargo-lock.yml b/.github/workflows/release-please-cargo-lock.yml index c7972580cacb..7ac8c0550d06 100644 --- a/.github/workflows/release-please-cargo-lock.yml +++ b/.github/workflows/release-please-cargo-lock.yml @@ -6,7 +6,7 @@ on: name: release-please-update-cargo-lock jobs: update_cargo_lock: - runs-on: [matterlabs-ci-runner] + runs-on: [matterlabs-default-infra-runners] steps: - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4 diff --git a/.github/workflows/release-test-stage.yml b/.github/workflows/release-test-stage.yml index 7e924b549e21..5bbdd7ae7a0d 100644 --- a/.github/workflows/release-test-stage.yml +++ b/.github/workflows/release-test-stage.yml @@ -10,7 +10,7 @@ concurrency: jobs: changed_files: - runs-on: [matterlabs-default-infra-runners] + runs-on: ubuntu-latest name: Test changed-files outputs: core: ${{ steps.changed-files-yaml.outputs.core_any_changed }} From 572ad40175784b62d61aa46dcf901a7e2f6ff8f5 Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Fri, 26 Jul 2024 08:09:44 +0400 Subject: [PATCH 35/52] feat(witness_vector_generator): Make it possible to run multiple wvg instances in one binary (#2493) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ WVG is single-threaded, so running one "job" per binary is a waste (kind of). Also, it's not very convenient when developing locally. This PR adds a `--threads` CLI parameter which can be used to manipulate how much WVG jobs will be launched. ## Why ❔ Efficiency & convenience ## Checklist - [ ] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [ ] Tests for the changes have been added / updated. - [ ] Documentation comments have been added / updated. - [ ] Code has been formatted via `zk fmt` and `zk lint`. --- .../bin/witness_vector_generator/src/main.rs | 45 ++++++++++++------- 1 file changed, 29 insertions(+), 16 deletions(-) diff --git a/prover/crates/bin/witness_vector_generator/src/main.rs b/prover/crates/bin/witness_vector_generator/src/main.rs index 58db6d6d5eb4..4451788ca9a4 100644 --- a/prover/crates/bin/witness_vector_generator/src/main.rs +++ b/prover/crates/bin/witness_vector_generator/src/main.rs @@ -32,6 +32,10 @@ struct Cli { pub(crate) config_path: Option, #[arg(long)] pub(crate) secrets_path: Option, + /// Number of WVG jobs to run in parallel. + /// Default value is 1. + #[arg(long, default_value_t = 1)] + pub(crate) threads: usize, } #[tokio::main] @@ -106,17 +110,6 @@ async fn main() -> anyhow::Result<()> { let protocol_version = PROVER_PROTOCOL_SEMANTIC_VERSION; - let witness_vector_generator = WitnessVectorGenerator::new( - object_store, - pool, - circuit_ids_for_round_to_be_proven.clone(), - zone.clone(), - config, - protocol_version, - prover_config.max_attempts, - Some(prover_config.setup_data_path.clone()), - ); - let (stop_sender, stop_receiver) = watch::channel(false); let (stop_signal_sender, stop_signal_receiver) = oneshot::channel(); @@ -128,12 +121,32 @@ async fn main() -> anyhow::Result<()> { }) .expect("Error setting Ctrl+C handler"); - tracing::info!("Starting witness vector generation for group: {} with circuits: {:?} in zone: {} with protocol_version: {:?}", specialized_group_id, circuit_ids_for_round_to_be_proven, zone, protocol_version); + tracing::info!( + "Starting {} witness vector generation jobs for group: {} with circuits: {:?} in zone: {} with protocol_version: {:?}", + opt.threads, + specialized_group_id, + circuit_ids_for_round_to_be_proven, + zone, + protocol_version + ); - let tasks = vec![ - tokio::spawn(exporter_config.run(stop_receiver.clone())), - tokio::spawn(witness_vector_generator.run(stop_receiver, opt.n_iterations)), - ]; + let mut tasks = vec![tokio::spawn(exporter_config.run(stop_receiver.clone()))]; + + for _ in 0..opt.threads { + let witness_vector_generator = WitnessVectorGenerator::new( + object_store.clone(), + pool.clone(), + circuit_ids_for_round_to_be_proven.clone(), + zone.clone(), + config.clone(), + protocol_version, + prover_config.max_attempts, + Some(prover_config.setup_data_path.clone()), + ); + tasks.push(tokio::spawn( + witness_vector_generator.run(stop_receiver.clone(), opt.n_iterations), + )); + } let mut tasks = ManagedTasks::new(tasks); tokio::select! { From b9cb222ed7c27daa73ce20b8911afd94106e70a6 Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Fri, 26 Jul 2024 08:09:46 +0400 Subject: [PATCH 36/52] fix(witness_generator): Only spawn 1 prometheus exporter per witness generator (#2492) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ Currently, when running with `--all-rounds`, witness generator will spawn several prometheus exporter servers. This is meaningless, as the metrics are collected by the global registry anyways. This PR changes the code so that we only spawn a single prometheus exporter. ## Why ❔ Indeed ## Checklist - [ ] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [ ] Tests for the changes have been added / updated. - [ ] Documentation comments have been added / updated. - [ ] Code has been formatted via `zk fmt` and `zk lint`. --- .../crates/bin/witness_generator/src/main.rs | 37 +++++++++---------- 1 file changed, 18 insertions(+), 19 deletions(-) diff --git a/prover/crates/bin/witness_generator/src/main.rs b/prover/crates/bin/witness_generator/src/main.rs index caad9458827e..d337778aba6c 100644 --- a/prover/crates/bin/witness_generator/src/main.rs +++ b/prover/crates/bin/witness_generator/src/main.rs @@ -180,9 +180,25 @@ async fn main() -> anyhow::Result<()> { } }; + let prometheus_config = if use_push_gateway { + let prometheus_config = prometheus_config + .clone() + .context("prometheus config needed when use_push_gateway enabled")?; + PrometheusExporterConfig::push( + prometheus_config + .gateway_endpoint() + .context("gateway_endpoint needed when use_push_gateway enabled")?, + prometheus_config.push_interval(), + ) + } else { + PrometheusExporterConfig::pull(prometheus_listener_port as u16) + }; + let prometheus_task = prometheus_config.run(stop_receiver.clone()); + let mut tasks = Vec::new(); + tasks.push(tokio::spawn(prometheus_task)); - for (i, round) in rounds.iter().enumerate() { + for round in rounds { tracing::info!( "initializing the {:?} witness generator, batch size: {:?} with protocol_version: {:?}", round, @@ -190,22 +206,6 @@ async fn main() -> anyhow::Result<()> { &protocol_version ); - let prometheus_config = if use_push_gateway { - let prometheus_config = prometheus_config - .clone() - .context("prometheus config needed when use_push_gateway enabled")?; - PrometheusExporterConfig::push( - prometheus_config - .gateway_endpoint() - .context("gateway_endpoint needed when use_push_gateway enabled")?, - prometheus_config.push_interval(), - ) - } else { - // `u16` cast is safe since i is in range [0, 4) - PrometheusExporterConfig::pull(prometheus_listener_port + i as u16) - }; - let prometheus_task = prometheus_config.run(stop_receiver.clone()); - let witness_generator_task = match round { AggregationRound::BasicCircuits => { let setup_data_path = prover_config.setup_data_path.clone(); @@ -276,7 +276,6 @@ async fn main() -> anyhow::Result<()> { } }; - tasks.push(tokio::spawn(prometheus_task)); tasks.push(tokio::spawn(witness_generator_task)); tracing::info!( @@ -284,7 +283,7 @@ async fn main() -> anyhow::Result<()> { round, started_at.elapsed() ); - SERVER_METRICS.init_latency[&(*round).into()].set(started_at.elapsed()); + SERVER_METRICS.init_latency[&round.into()].set(started_at.elapsed()); } let (mut stop_signal_sender, mut stop_signal_receiver) = mpsc::channel(256); From 7c336b1e180b9d5ba1ba74169c61ce27a251e2fc Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Fri, 26 Jul 2024 08:15:43 +0400 Subject: [PATCH 37/52] fix(proof_data_handler): Unlock jobs on transient errors (#2486) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ Currently, proof data handler locks the job for proof generation, and starts fetching required data after that. If any error happens during fetching, the method will err, and the job will remain locked. This PR changes it, so that if any error occurs, we unlock the job before we return an error. Additionally, it reduces the amount of non-necessary panics in the touched code, and adds some docs. ## Why ❔ Correctness & efficiency. ## Checklist - [ ] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [ ] Tests for the changes have been added / updated. - [ ] Documentation comments have been added / updated. - [ ] Code has been formatted via `zk fmt` and `zk lint`. --- ...f8fbd8fdaf23266412e2faffb7e3813213b98.json | 14 +++ core/lib/dal/src/proof_generation_dal.rs | 57 ++++++++-- .../src/request_processor.rs | 104 +++++++++++------- 3 files changed, 131 insertions(+), 44 deletions(-) create mode 100644 core/lib/dal/.sqlx/query-a23d63b7c4264ee0f5b60c09f09f8fbd8fdaf23266412e2faffb7e3813213b98.json diff --git a/core/lib/dal/.sqlx/query-a23d63b7c4264ee0f5b60c09f09f8fbd8fdaf23266412e2faffb7e3813213b98.json b/core/lib/dal/.sqlx/query-a23d63b7c4264ee0f5b60c09f09f8fbd8fdaf23266412e2faffb7e3813213b98.json new file mode 100644 index 000000000000..257ce7050619 --- /dev/null +++ b/core/lib/dal/.sqlx/query-a23d63b7c4264ee0f5b60c09f09f8fbd8fdaf23266412e2faffb7e3813213b98.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE proof_generation_details\n SET\n status = 'unpicked',\n updated_at = NOW()\n WHERE\n l1_batch_number = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [] + }, + "hash": "a23d63b7c4264ee0f5b60c09f09f8fbd8fdaf23266412e2faffb7e3813213b98" +} diff --git a/core/lib/dal/src/proof_generation_dal.rs b/core/lib/dal/src/proof_generation_dal.rs index cf1437ff411c..4e37cc644f8e 100644 --- a/core/lib/dal/src/proof_generation_dal.rs +++ b/core/lib/dal/src/proof_generation_dal.rs @@ -30,7 +30,14 @@ enum ProofGenerationJobStatus { } impl ProofGenerationDal<'_, '_> { - pub async fn get_next_block_to_be_proven( + /// Chooses the batch number so that it has all the necessary data to generate the proof + /// and is not already picked. + /// + /// Marks the batch as picked by the prover, preventing it from being picked twice. + /// + /// The batch can be unpicked either via a corresponding DAL method, or it is considered + /// not picked after `processing_timeout` passes. + pub async fn lock_batch_for_proving( &mut self, processing_timeout: Duration, ) -> DalResult> { @@ -72,14 +79,38 @@ impl ProofGenerationDal<'_, '_> { "#, &processing_timeout, ) - .fetch_optional(self.storage.conn()) - .await - .unwrap() + .instrument("lock_batch_for_proving") + .with_arg("processing_timeout", &processing_timeout) + .fetch_optional(self.storage) + .await? .map(|row| L1BatchNumber(row.l1_batch_number as u32)); Ok(result) } + /// Marks a previously locked batch as 'unpicked', allowing it to be picked without having + /// to wait for the processing timeout. + pub async fn unlock_batch(&mut self, l1_batch_number: L1BatchNumber) -> DalResult<()> { + let batch_number = i64::from(l1_batch_number.0); + sqlx::query!( + r#" + UPDATE proof_generation_details + SET + status = 'unpicked', + updated_at = NOW() + WHERE + l1_batch_number = $1 + "#, + batch_number, + ) + .instrument("unlock_batch") + .with_arg("l1_batch_number", &l1_batch_number) + .execute(self.storage) + .await?; + + Ok(()) + } + pub async fn save_proof_artifacts_metadata( &mut self, batch_number: L1BatchNumber, @@ -388,7 +419,7 @@ mod tests { let picked_l1_batch = conn .proof_generation_dal() - .get_next_block_to_be_proven(Duration::MAX) + .lock_batch_for_proving(Duration::MAX) .await .unwrap(); assert_eq!(picked_l1_batch, Some(L1BatchNumber(1))); @@ -399,10 +430,22 @@ mod tests { .unwrap(); assert_eq!(unpicked_l1_batch, None); + // Check that we can unlock the batch and then pick it again. + conn.proof_generation_dal() + .unlock_batch(L1BatchNumber(1)) + .await + .unwrap(); + let picked_l1_batch = conn + .proof_generation_dal() + .lock_batch_for_proving(Duration::MAX) + .await + .unwrap(); + assert_eq!(picked_l1_batch, Some(L1BatchNumber(1))); + // Check that with small enough processing timeout, the L1 batch can be picked again let picked_l1_batch = conn .proof_generation_dal() - .get_next_block_to_be_proven(Duration::ZERO) + .lock_batch_for_proving(Duration::ZERO) .await .unwrap(); assert_eq!(picked_l1_batch, Some(L1BatchNumber(1))); @@ -414,7 +457,7 @@ mod tests { let picked_l1_batch = conn .proof_generation_dal() - .get_next_block_to_be_proven(Duration::MAX) + .lock_batch_for_proving(Duration::MAX) .await .unwrap(); assert_eq!(picked_l1_batch, None); diff --git a/core/node/proof_data_handler/src/request_processor.rs b/core/node/proof_data_handler/src/request_processor.rs index a89f9b63a848..11d0aebfa806 100644 --- a/core/node/proof_data_handler/src/request_processor.rs +++ b/core/node/proof_data_handler/src/request_processor.rs @@ -51,21 +51,64 @@ impl RequestProcessor { ) -> Result, RequestProcessorError> { tracing::info!("Received request for proof generation data: {:?}", request); - let l1_batch_number_result = self - .pool + let l1_batch_number = match self.lock_batch_for_proving().await? { + Some(number) => number, + None => return Ok(Json(ProofGenerationDataResponse::Success(None))), // no batches pending to be proven + }; + + let proof_generation_data = self + .proof_generation_data_for_existing_batch(l1_batch_number) + .await; + + // If we weren't able to fetch all the data, we should unlock the batch before returning. + match proof_generation_data { + Ok(data) => Ok(Json(ProofGenerationDataResponse::Success(Some(Box::new( + data, + ))))), + Err(err) => { + self.unlock_batch(l1_batch_number).await?; + Err(err) + } + } + } + + /// Will choose a batch that has all the required data and isn't picked up by any prover yet. + async fn lock_batch_for_proving(&self) -> Result, RequestProcessorError> { + self.pool .connection() .await - .unwrap() + .map_err(RequestProcessorError::Dal)? .proof_generation_dal() - .get_next_block_to_be_proven(self.config.proof_generation_timeout()) + .lock_batch_for_proving(self.config.proof_generation_timeout()) .await - .map_err(RequestProcessorError::Dal)?; + .map_err(RequestProcessorError::Dal) + } - let l1_batch_number = match l1_batch_number_result { - Some(number) => number, - None => return Ok(Json(ProofGenerationDataResponse::Success(None))), // no batches pending to be proven - }; + /// Marks the batch as 'unpicked', allowing it to be picked up by another prover. + async fn unlock_batch( + &self, + l1_batch_number: L1BatchNumber, + ) -> Result<(), RequestProcessorError> { + self.pool + .connection() + .await + .map_err(RequestProcessorError::Dal)? + .proof_generation_dal() + .unlock_batch(l1_batch_number) + .await + .map_err(RequestProcessorError::Dal) + } + /// Will fetch all the required data for the batch and return it. + /// + /// ## Panics + /// + /// Expects all the data to be present in the database. + /// Will panic if any of the required data is missing. + async fn proof_generation_data_for_existing_batch( + &self, + l1_batch_number: L1BatchNumber, + ) -> Result { let vm_run_data: VMRunWitnessInputData = self .blob_store .get(l1_batch_number) @@ -77,52 +120,43 @@ impl RequestProcessor { .await .map_err(RequestProcessorError::ObjectStore)?; - let previous_batch_metadata = self + // Acquire connection after interacting with GCP, to avoid holding the connection for too long. + let mut conn = self .pool .connection() .await - .unwrap() + .map_err(RequestProcessorError::Dal)?; + + let previous_batch_metadata = conn .blocks_dal() .get_l1_batch_metadata(L1BatchNumber(l1_batch_number.checked_sub(1).unwrap())) .await - .unwrap() + .map_err(RequestProcessorError::Dal)? .expect("No metadata for previous batch"); - let header = self - .pool - .connection() - .await - .unwrap() + let header = conn .blocks_dal() .get_l1_batch_header(l1_batch_number) .await - .unwrap() + .map_err(RequestProcessorError::Dal)? .unwrap_or_else(|| panic!("Missing header for {}", l1_batch_number)); let minor_version = header.protocol_version.unwrap(); - let protocol_version = self - .pool - .connection() - .await - .unwrap() + let protocol_version = conn .protocol_versions_dal() .get_protocol_version_with_latest_patch(minor_version) .await - .unwrap() + .map_err(RequestProcessorError::Dal)? .unwrap_or_else(|| { panic!("Missing l1 verifier info for protocol version {minor_version}") }); - let batch_header = self - .pool - .connection() - .await - .unwrap() + let batch_header = conn .blocks_dal() .get_l1_batch_header(l1_batch_number) .await - .unwrap() - .unwrap(); + .map_err(RequestProcessorError::Dal)? + .unwrap_or_else(|| panic!("Missing header for {}", l1_batch_number)); let eip_4844_blobs = match self.commitment_mode { L1BatchCommitmentMode::Validium => Eip4844Blobs::empty(), @@ -149,16 +183,12 @@ impl RequestProcessor { METRICS.observe_blob_sizes(&blob); - let proof_gen_data = ProofGenerationData { + Ok(ProofGenerationData { l1_batch_number, witness_input_data: blob, protocol_version: protocol_version.version, l1_verifier_config: protocol_version.l1_verifier_config, - }; - - Ok(Json(ProofGenerationDataResponse::Success(Some(Box::new( - proof_gen_data, - ))))) + }) } pub(crate) async fn submit_proof( From 1b61d0797062ab8b0aa2c1e92b23a3a0d8fd2c61 Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Fri, 26 Jul 2024 08:24:58 +0400 Subject: [PATCH 38/52] feat: New prover documentation (#2466) New prover documentation. [Rendered form](https://github.com/matter-labs/zksync-era/tree/popzxc-new-prover-docs/prover/docs/README.md) is recommended, as it's easier to view. Additionally, does the following: - Introduces ordering for advanced guides (it was a mix, not it's more or less sorted and suggests an order for reading). - Fixes a few things in the `setup-dev.md`. - Creates a `prover-local` config profile, so that no manual interactions with configs are needed. - Gets rid of `prover/setup.sh` as it's no longer needed that much. --- .gitignore | 1 + .../{contracts.md => 04_contracts.md} | 0 ...how_call_works.md => 05_how_call_works.md} | 0 ...n_works.md => 06_how_transaction_works.md} | 0 .../{fee_model.md => 07_fee_model.md} | 0 ..._works.md => 08_how_l2_messaging_works.md} | 0 .../advanced/{pubdata.md => 09_pubdata.md} | 0 ...with-blobs.md => 10_pubdata_with_blobs.md} | 0 .../{compression.md => 11_compression.md} | 0 ...vm_intro.md => 12_alternative_vm_intro.md} | 2 - .../{zk_intuition.md => 13_zk_intuition.md} | 0 ...r_overview.md => 14_zk_deeper_overview.md} | 4 +- .../{prover_keys.md => 15_prover_keys.md} | 0 ..._debugging.md => 90_advanced_debugging.md} | 0 .../{docker_and_ci.md => 91_docker_and_ci.md} | 0 docs/guides/advanced/README.md | 28 ++ docs/guides/setup-dev.md | 109 ++---- etc/env/base/fri_prover.toml | 2 +- etc/env/configs/prover-local.toml | 8 + prover/README.md | 41 +- prover/crates/bin/prover_fri/README.md | 351 +----------------- prover/docs/00_intro.md | 80 ++++ prover/docs/01_gcp_vm.md | 147 ++++++++ prover/docs/02_setup.md | 58 +++ prover/docs/03_launch.md | 101 +++++ prover/docs/04_flow.md | 238 ++++++++++++ prover/docs/99_further_reading.md | 13 + prover/docs/README.md | 16 + prover/setup.sh | 26 -- 29 files changed, 742 insertions(+), 483 deletions(-) rename docs/guides/advanced/{contracts.md => 04_contracts.md} (100%) rename docs/guides/advanced/{how_call_works.md => 05_how_call_works.md} (100%) rename docs/guides/advanced/{how_transaction_works.md => 06_how_transaction_works.md} (100%) rename docs/guides/advanced/{fee_model.md => 07_fee_model.md} (100%) rename docs/guides/advanced/{how_l2_messaging_works.md => 08_how_l2_messaging_works.md} (100%) rename docs/guides/advanced/{pubdata.md => 09_pubdata.md} (100%) rename docs/guides/advanced/{pubdata-with-blobs.md => 10_pubdata_with_blobs.md} (100%) rename docs/guides/advanced/{compression.md => 11_compression.md} (100%) rename docs/guides/advanced/{0_alternative_vm_intro.md => 12_alternative_vm_intro.md} (99%) rename docs/guides/advanced/{zk_intuition.md => 13_zk_intuition.md} (100%) rename docs/guides/advanced/{deeper_overview.md => 14_zk_deeper_overview.md} (99%) rename docs/guides/advanced/{prover_keys.md => 15_prover_keys.md} (100%) rename docs/guides/advanced/{advanced_debugging.md => 90_advanced_debugging.md} (100%) rename docs/guides/advanced/{docker_and_ci.md => 91_docker_and_ci.md} (100%) create mode 100644 docs/guides/advanced/README.md create mode 100644 etc/env/configs/prover-local.toml create mode 100644 prover/docs/00_intro.md create mode 100644 prover/docs/01_gcp_vm.md create mode 100644 prover/docs/02_setup.md create mode 100644 prover/docs/03_launch.md create mode 100644 prover/docs/04_flow.md create mode 100644 prover/docs/99_further_reading.md create mode 100644 prover/docs/README.md delete mode 100755 prover/setup.sh diff --git a/.gitignore b/.gitignore index 3ffddc7a7930..7b626c310d4b 100644 --- a/.gitignore +++ b/.gitignore @@ -46,6 +46,7 @@ Cargo.lock !/etc/env/configs/stage-proofs.toml !/etc/env/configs/testnet.toml !/etc/env/configs/mainnet.toml +!/etc/env/configs/prover-local.toml /etc/env/l1-inits !/etc/env/l1-inits/stage.env !/etc/env/l1-inits/stage_proofs.env diff --git a/docs/guides/advanced/contracts.md b/docs/guides/advanced/04_contracts.md similarity index 100% rename from docs/guides/advanced/contracts.md rename to docs/guides/advanced/04_contracts.md diff --git a/docs/guides/advanced/how_call_works.md b/docs/guides/advanced/05_how_call_works.md similarity index 100% rename from docs/guides/advanced/how_call_works.md rename to docs/guides/advanced/05_how_call_works.md diff --git a/docs/guides/advanced/how_transaction_works.md b/docs/guides/advanced/06_how_transaction_works.md similarity index 100% rename from docs/guides/advanced/how_transaction_works.md rename to docs/guides/advanced/06_how_transaction_works.md diff --git a/docs/guides/advanced/fee_model.md b/docs/guides/advanced/07_fee_model.md similarity index 100% rename from docs/guides/advanced/fee_model.md rename to docs/guides/advanced/07_fee_model.md diff --git a/docs/guides/advanced/how_l2_messaging_works.md b/docs/guides/advanced/08_how_l2_messaging_works.md similarity index 100% rename from docs/guides/advanced/how_l2_messaging_works.md rename to docs/guides/advanced/08_how_l2_messaging_works.md diff --git a/docs/guides/advanced/pubdata.md b/docs/guides/advanced/09_pubdata.md similarity index 100% rename from docs/guides/advanced/pubdata.md rename to docs/guides/advanced/09_pubdata.md diff --git a/docs/guides/advanced/pubdata-with-blobs.md b/docs/guides/advanced/10_pubdata_with_blobs.md similarity index 100% rename from docs/guides/advanced/pubdata-with-blobs.md rename to docs/guides/advanced/10_pubdata_with_blobs.md diff --git a/docs/guides/advanced/compression.md b/docs/guides/advanced/11_compression.md similarity index 100% rename from docs/guides/advanced/compression.md rename to docs/guides/advanced/11_compression.md diff --git a/docs/guides/advanced/0_alternative_vm_intro.md b/docs/guides/advanced/12_alternative_vm_intro.md similarity index 99% rename from docs/guides/advanced/0_alternative_vm_intro.md rename to docs/guides/advanced/12_alternative_vm_intro.md index fab623e38ae3..a36f0b560d33 100644 --- a/docs/guides/advanced/0_alternative_vm_intro.md +++ b/docs/guides/advanced/12_alternative_vm_intro.md @@ -2,8 +2,6 @@ ## zkEVM clarifier -[Back to ToC](../../specs/README.md) - The ZKsync zkEVM plays a fundamentally different role in the zkStack than the EVM does in Ethereum. The EVM is used to execute code in Ethereum's state transition function. This STF needs a client to implement and run it. Ethereum has a multi-client philosophy, there are multiple clients, and they are written in Go, Rust, and other traditional programming diff --git a/docs/guides/advanced/zk_intuition.md b/docs/guides/advanced/13_zk_intuition.md similarity index 100% rename from docs/guides/advanced/zk_intuition.md rename to docs/guides/advanced/13_zk_intuition.md diff --git a/docs/guides/advanced/deeper_overview.md b/docs/guides/advanced/14_zk_deeper_overview.md similarity index 99% rename from docs/guides/advanced/deeper_overview.md rename to docs/guides/advanced/14_zk_deeper_overview.md index 7fa4a009a920..8ec2c4d35c03 100644 --- a/docs/guides/advanced/deeper_overview.md +++ b/docs/guides/advanced/14_zk_deeper_overview.md @@ -1,6 +1,4 @@ -# Deeper Overview - -[Back to ToC](../../../README.md) +# Proof System Deeper Overview The purpose of this section is to explain our new proof system from an engineering standpoint. We will examine the code examples and how the libraries communicate. diff --git a/docs/guides/advanced/prover_keys.md b/docs/guides/advanced/15_prover_keys.md similarity index 100% rename from docs/guides/advanced/prover_keys.md rename to docs/guides/advanced/15_prover_keys.md diff --git a/docs/guides/advanced/advanced_debugging.md b/docs/guides/advanced/90_advanced_debugging.md similarity index 100% rename from docs/guides/advanced/advanced_debugging.md rename to docs/guides/advanced/90_advanced_debugging.md diff --git a/docs/guides/advanced/docker_and_ci.md b/docs/guides/advanced/91_docker_and_ci.md similarity index 100% rename from docs/guides/advanced/docker_and_ci.md rename to docs/guides/advanced/91_docker_and_ci.md diff --git a/docs/guides/advanced/README.md b/docs/guides/advanced/README.md new file mode 100644 index 000000000000..5a3673b558ad --- /dev/null +++ b/docs/guides/advanced/README.md @@ -0,0 +1,28 @@ +# ZKsync advanced guides + +This section contains more advanced guides that aim to explain complex internals of ZKsync ecosystem in an easy to grasp +way. + +## Table of Contents + +- [Local initialization](./01_initialization.md) +- [Deposits](./02_deposits.md) +- [Withdrawals](./03_withdrawals.md) +- [Contracts](./04_contracts.md) +- [Calls](./05_how_call_works.md) +- [Transactions](./06_how_transaction_works.md) +- [Fee model](./07_fee_model.md) +- [L2 messaging](./08_how_l2_messaging_works.md) +- [Pubdata](./09_pubdata.md) +- [Pubdata with blobs](./10_pubdata_with_blobs.md) +- [Bytecode compression](./11_compression.md) +- [EraVM intro](./12_alternative_vm_intro.md) +- [ZK intuition](./13_zk_intuition.md) +- [ZK deeper overview](./14_zk_deeper_overview.md) +- [Prover keys](./15_prover_keys.md) + +Additionally, there are a few articles that cover specific topics that may be useful for developers actively working on +`zksync-era` repo. + +- [Advanced debugging](./90_advanced_debugging.md) +- [Docker and CI](./91_docker_and_ci.md) diff --git a/docs/guides/setup-dev.md b/docs/guides/setup-dev.md index aafd96cda40a..12e8da7b022f 100644 --- a/docs/guides/setup-dev.md +++ b/docs/guides/setup-dev.md @@ -2,42 +2,67 @@ ## TL;DR -If you run on 'clean' Debian on GCP: +This is a shorter version of setup guide to make it easier subsequent initializations. If it's the first time you're +initializing the workspace, it's recommended that you read the whole guide below, as it provides more context and tips. + +If you run on 'clean' Ubuntu on GCP: ```bash +# For VMs only! They don't have SSH keys, so we override SSH with HTTPS +git config --global url."https://github.com/".insteadOf git@github.com: +git config --global url."https://".insteadOf git:// + # Rust curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh # NVM curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.5/install.sh | bash # All necessary stuff -sudo apt-get install build-essential pkg-config cmake clang lldb lld libssl-dev postgresql -# Docker -sudo usermod -aG docker YOUR_USER +sudo apt-get update +sudo apt-get install build-essential pkg-config cmake clang lldb lld libssl-dev postgresql apt-transport-https ca-certificates curl software-properties-common +# Install docker +curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add - +sudo add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu focal stable" +sudo apt install docker-ce +sudo usermod -aG docker ${USER} + +# Stop default postgres (as we'll use the docker one) +sudo systemctl stop postgresql +sudo systemctl disable postgresql +# Start docker. +sudo systemctl start docker ## You might need to re-connect (due to usermod change). # Node & yarn nvm install 20 +# Important: there will be a note in the output to load +# new paths in your local session, either run it or reload the terminal. npm install -g yarn yarn set version 1.22.19 +# For running unit tests +cargo install cargo-nextest # SQL tools cargo install sqlx-cli --version 0.7.4 -# Stop default postgres (as we'll use the docker one) -sudo systemctl stop postgresql -# Start docker. -sudo systemctl start docker # Foundry curl -L https://foundry.paradigm.xyz | bash foundryup --branch master +# You will need to reload your `*rc` file here + +# Clone the repo to the desired location +git clone git@github.com:matter-labs/zksync-era.git +cd zksync-era +git submodule update --init --recursive ``` +Don't forget to [add env variables](#Environment) and look at [tips](#tips). + ## Supported operating systems ZKsync currently can be launched on any \*nix operating system (e.g. any linux distribution or MacOS). -If you're using Windows, then make sure to use WSL 2, since WSL 1 is known to cause troubles. +If you're using Windows, then make sure to use WSL 2. Additionally, if you are going to use WSL 2, make sure that your project is located in the _linux filesystem_, since accessing NTFS partitions from within WSL is very slow. @@ -90,38 +115,9 @@ If logging out does not resolve the issue, restarting the computer should. ## `Node` & `Yarn` -1. Install `Node` (requires version `v18.18.0`). Since our team attempts to always use the latest LTS version of - `Node.js`, we suggest you to install [nvm](https://github.com/nvm-sh/nvm). It will allow you to update `Node.js` - version easily in the future (by running `nvm use v18.18.0` in the root of the repository) -2. Install `yarn` (make sure to get version 1.22.19 - you can change the version by running `yarn set version 1.22.19`). - Instructions can be found on the [official site](https://classic.yarnpkg.com/en/docs/install/). Check if `yarn` is - installed by running `yarn -v`. If you face any problems when installing `yarn`, it might be the case that your - package manager installed the wrong package.Make sure to thoroughly follow the instructions above on the official - website. It contains a lot of troubleshooting guides in it. - -## `Axel` - -Install `axel` for downloading keys: - -On mac: - -```bash -brew install axel -``` - -On debian-based linux: - -```bash -sudo apt-get install axel -``` - -Check the version of `axel` with the following command: - -``` -axel --version -``` - -Make sure the version is higher than `2.17.10`. +1. Install `Node` (requires version `v20`). The recommended way is via [nvm](https://github.com/nvm-sh/nvm). +2. Install `yarn`. Can be done via `npm install -g yarn`. Make sure to get version 1.22.19 - you can change the version + by running `yarn set version 1.22.19`. ## `clang` @@ -221,33 +217,9 @@ SQLx is a Rust library we use to interact with Postgres, and its CLI is used to features of the library. ```bash -cargo install --locked sqlx-cli --version 0.7.3 +cargo install --locked sqlx-cli --version 0.7.4 ``` -## Solidity compiler `solc` - -Install the latest solidity compiler. - -On mac: - -```bash -brew install solidity -``` - -On debian-based linux: - -```bash -sudo add-apt-repository ppa:ethereum/ethereum -sudo apt-get update -sudo apt-get install solc -``` - -Alternatively, download a [precompiled version](https://github.com/ethereum/solc-bin) and add it to your PATH. - -## Python - -Most environments will have this preinstalled but if not, install Python. - ## Easier method using `nix` Nix is a tool that can fetch _exactly_ the right dependencies specified via hashes. The current config is Linux-only but @@ -274,11 +246,10 @@ Edit the lines below and add them to your shell profile file (e.g. `~/.bash_prof export ZKSYNC_HOME=/path/to/zksync export PATH=$ZKSYNC_HOME/bin:$PATH - -# If you're like me, uncomment: -# cd $ZKSYNC_HOME ``` +## Tips + ### Tip: `mold` Optionally, you may want to optimize the build time with the modern linker, [`mold`](https://github.com/rui314/mold). diff --git a/etc/env/base/fri_prover.toml b/etc/env/base/fri_prover.toml index 1578a7f66e38..1c93752251bc 100644 --- a/etc/env/base/fri_prover.toml +++ b/etc/env/base/fri_prover.toml @@ -1,5 +1,5 @@ [fri_prover] -setup_data_path = "vk_setup_data_generator_server_fri/data" +setup_data_path = "crates/bin/vk_setup_data_generator_server_fri/data" prometheus_port = 3315 max_attempts = 10 generation_timeout_in_secs = 600 diff --git a/etc/env/configs/prover-local.toml b/etc/env/configs/prover-local.toml new file mode 100644 index 000000000000..1850871bc2c8 --- /dev/null +++ b/etc/env/configs/prover-local.toml @@ -0,0 +1,8 @@ +# Config for running prover locally +__imports__ = [ "base", "l1-inits/.init.env", "l2-inits/dev.init.env" ] + +[eth_sender.sender] +proof_sending_mode = "OnlyRealProofs" + +[fri_prover] +cloud_type = "Local" diff --git a/prover/README.md b/prover/README.md index 5e537bf8bc0b..98ee4edb78e9 100644 --- a/prover/README.md +++ b/prover/README.md @@ -2,36 +2,11 @@ This directory contains all the libraries and binaries related to proving of the blocks. -Directories with 'fri' suffix, are mostly used with the new proof system (Boojum). - -## Components - -### vk_setup_data_generator_server_fri - -Set of tools to create setup keys, verification keys and verification key hashes for the circuits. - -Usually run once, and then we use their outputs in multiple places in the system. - -### prover_fri_gateway - -Communication module between the 'main' server running the state keeper, and the proving subsystem. - -### witness_generator - -Creating prover jobs and saving necessary artifacts. - -### prover_fri - -This directory contains the main 'prover'. It can be run in two modes: either as CPU or as GPU. (controlled via 'gpu' -feature flag). - -### witness_vector_generator - -Only used in GPU proving mode. Prepares all the witness data using CPU, and then streams it to the prover_fri. - -This is mostly used for resource efficiency (as machines with GPUs are more expensive, it allows us to run many -witness_vector_generators, that can 'share' as single gpu based prover_fri). - -### proof_fri_compressor - -Used as a 'last step' to compress/wrap the final FRI proof into a SNARK (to make L1 verification cheaper). +## Documentation + +- [Intro](00_intro.md) +- [Setting up a GCP VM](01_gcp_vm.md) +- [Workspace setup](02_setup.md) +- [Running prover subsystem](03_launch.md) +- [Proof generation flow](04_flow.md) +- [Further reading](99_further_reading.md) diff --git a/prover/crates/bin/prover_fri/README.md b/prover/crates/bin/prover_fri/README.md index 141b058172f7..6a802cbcd8e1 100644 --- a/prover/crates/bin/prover_fri/README.md +++ b/prover/crates/bin/prover_fri/README.md @@ -1,352 +1,5 @@ # FRI Prover -## Overview of the pipeline +Implementation of the circuit prover. -These are the main components to this process: - -- Sequencer -- Prover gateway -- Witness -- Prover -- Compressor - -All of them will be sharing information through a SQL database and GCS bucket. The general idea is that the sequencer -will produce blocks and the gateway will place them into the database to be proven. Then, the rest of the components -will pull jobs from the database and do their part of the pipeline, loading intermediary artifacts from GCS. - -```mermaid -flowchart LR - A["Operator"] -->|Produces block| F[Prover Gateway] - F -->|Inserts into DB| B["Postgres DB"] - B -->|Retrieves proven block \nafter compression| F - B --> C["Witness"] - C --- C1["Basic Circuits"] - C --- C2["Leaf Aggregation"] - C --- C3["Node Aggregation"] - C --- C4["Recursion Tip"] - C --- C5["Scheduler"] - C --> B - B --> D["Vector Generator/Prover"] - D -->|Proven Block| B - B --> G["Compressor"] - G -->|Compressed block| B -``` - -## Prerequisites - -Make sure these dependencies are installed and available on your machine: -[Installing dependencies](../../docs/guides/setup-dev.md). Make sure you go through all steps, including setting -environment variables for `zk`. Same work is done at the bottom of this doc, if you want a TL;DR; for running GPU -provers on GCP. - -## Proving a block using GPU prover locally - -Below steps can be used to prove a block on local machine using GPU prover. Running a GPU prover requires a CUDA 12.0 -installation as a pre-requisite, alongside these machine specs: - -- CPU: At least 16 physical cores -- RAM: 85GB of RAM -- Disk: 200GB of free disk (lower might be fine, depending on how many proofs you want to generate) -- GPU: NVIDIA GPU with CUDA support and at least 6GB of VRAM, we recommend to use GPUs with at least 16GB VRAM for - optimal performance. In our GPU picks for datacenters while running on Google Cloud Platform, the L4 takes the top - spot in terms of price-to-performance ratio, with the T4 coming in second. - -1. Initialize DB and run migrations (make sure you're in the root of the repo): `zk && zk init` -2. Run the server. In the root of the repository: - - ```console - zk server --components=api,eth,tree,state_keeper,housekeeper,commitment_generator,proof_data_handler,vm_runner_bwip - ``` - - Note that it will produce a first l1 batch that can be proven (should be batch 0). - -3. Generate the GPU setup data (no need to regenerate if it's already there). If you want to use this with the GPU - compressors, you need to change the key in the file from `setup_2^26.key` to `setup_2^24.key`. This will consume - around 20GB of disk. You need to be in the `prover/` directory (for all commands from here onwards, you need to be in - the `prover/` directory) and run: - - ```console - ./setup.sh gpu - ``` - -4. Run prover gateway to fetch blocks to be proven from server: - - ```console - zk f cargo run --release --bin zksync_prover_fri_gateway - ``` - -5. Run 4 witness generators to generate witness for each round: - - ```console - API_PROMETHEUS_LISTENER_PORT=3116 zk f cargo run --release --bin zksync_witness_generator -- --round=basic_circuits - API_PROMETHEUS_LISTENER_PORT=3117 zk f cargo run --release --bin zksync_witness_generator -- --round=leaf_aggregation - API_PROMETHEUS_LISTENER_PORT=3118 zk f cargo run --release --bin zksync_witness_generator -- --round=node_aggregation - API_PROMETHEUS_LISTENER_PORT=3119 zk f cargo run --release --bin zksync_witness_generator -- --round=recursion_tip - API_PROMETHEUS_LISTENER_PORT=3120 zk f cargo run --release --bin zksync_witness_generator -- --round=scheduler - ``` - - or alternatively (recommended), start all of them with - - ```console - API_PROMETHEUS_LISTENER_PORT=3116 zk f cargo run --release --bin zksync_witness_generator -- --all_rounds - ``` - - Note that this will automatically open four ports: 3116 (the starting port), 3117, 3118 and 3119 for subsequent - provers. - -6. Run witness vector generators to feed jobs to GPU prover: - - ```console - FRI_WITNESS_VECTOR_GENERATOR_PROMETHEUS_LISTENER_PORT=3420 zk f cargo run --release --bin zksync_witness_vector_generator - ``` - - Note that you may run multiple of them (as 1 prover roughly can be fed by 10 vector generators). Make sure to use a - different port! - -7. Run prover to perform actual proving: `zk f cargo run --features "gpu" --release --bin zksync_prover_fri` - -8. Finally, run proof compressor to compress the proof to be sent on L1: - `zk f cargo run --release --bin zksync_proof_fri_compressor` - -## Block proving with CPU - -We don't recommend using this method, as at the moment none are ran in production and may be broken. There will be -investment in the future, but for the time being, please use GPU provers. That said, instructions are left below for -brave adventurers. - -Below steps can be used to prove a block on local machine using CPU prover. This is useful for debugging and testing -Machine specs: - -- CPU: At least 8 physical cores -- RAM: 80GB of RAM (enable swap if your machine has less RAM) -- Disk: 400GB of free disk - -1. Install Rust (correct version from rust-toolchain file should be used automatically if you don't have any local - overrides) -2. Initialize DB and run migrations. Go into the root of the repository, then run - - ```console - zk init - ``` - -3. Generate the CPU setup data (no need to regenerate if it's already there). This will consume around 300GB of disk. - For this, move to the `prover` directory, and run - - ```console - ./setup.sh - ``` - - For the following steps, we recommend using `tmux` to run every command on a separate session, so you can attach to - and monitor logs for each one. - -4. Run the sequencer/operator. In the root of the repository: - - ```console - zk server --components=api,eth,tree,state_keeper,housekeeper,commitment_generator,proof_data_handler - ``` - - to produce blocks to be proven - -5. Move again into the `prover` directory. The rest of the steps will be performed from there. Run prover gateway to - fetch blocks to be proven from server: - - ```console - zk f cargo run --release --bin zksync_prover_fri_gateway - ``` - -6. Run 4 witness generators to generate witness for each round: - -7. Run prover to perform actual proving: - - ```console - zk f cargo run --release --bin zksync_prover_fri - ``` - -8. Finally, run proof compressor to compress the proof to be sent on L1: - - ```console - zk f cargo run --release --bin zksync_proof_fri_compressor - ``` - -## Running GPU compressors - -There is an option to run compressors with the GPU, which will significantly improve the performance. - -1. The hardware setup should be the same as for GPU prover -2. Install and compile `era-bellman-cuda` library - - ```console - git clone https://github.com/matter-labs/era-bellman-cuda - cmake -Bera-bellman-cuda/build -Sera-bellman-cuda/ -DCMAKE_BUILD_TYPE=Release - cmake --build bellman-cuda/build/ - ``` - -3. Set path of library as environmental variable - - ```console - export BELLMAN_CUDA_DIR=$PWD/bellman-cuda - ``` - -4. GPU compressor uses `setup_2^24.key`. Download it by using: - - ```console - wget https://storage.googleapis.com/matterlabs-setup-keys-us/setup-keys/setup_2^24.key - ``` - -5. Set the env variable with it's path: - - ```console - export CRS_FILE=$PWD/setup_2^24.key - ``` - -6. Run the compressor using: - - ```console - zk f cargo run --features "gpu" --release --bin zksync_proof_fri_compressor - ``` - -## Checking the status of the prover - -Once everything is running (either with the CPU or GPU prover), the server should have at least three blocks, and you -can see the first one by running - -```console -curl -X POST -H 'content-type: application/json' localhost:3050 -d '{"jsonrpc": "2.0", "id": 1, "method": "zks_getBlockDetails", "params": [0]}' -``` - -and then similarly for blocks number `1` and `2` by changing the parameters. - -The prover gateway will then fetch block number 1 to prove and start the entire proving pipeline, which starts out by -generating the witness, then passing it to the prover, then to the compressor to wrap it inside a SNARK to send to L1. - -You can follow the status of this pipeline by running - -```console -zk status prover -``` - -This might take a while (around an hour and a half on my machine using the CPU prover), you can check on it once in a -while. A successful flow should output something like - -```console -==== FRI Prover status ==== -State keeper: First batch: 0, recent batch: 1 -L1 state: block verified: 1, block committed: 1 -Verification key hash on contract is 0x4be443afd605a782b6e56d199df2460a025c81b3dea144e135bece83612563f2 -Verification key in database is 0x4be443afd605a782b6e56d199df2460a025c81b3dea144e135bece83612563f2 -Verifier hash matches. -Verifier params on contract are 0x5a3ef282b21e12fe1f4438e5bb158fc5060b160559c5158c6389d62d9fe3d080, 0x72167c43a46cf38875b267d67716edc4563861364a3c03ab7aee73498421e828, 0x0000000000000000000000000000000000000000000000000000000000000000 -Verification params match. -Next block that should be verified is: 2 -Checking status of the proofs... -Proof progress for 1 : 111 successful, 0 failed, 0 in progress, 0 queued. Compression job status: successful -``` - -The most important thing here is the following line - -```console -L1 state: block verified: 1, block committed: 1 -``` - -which means the proof for the block was verified on L1. - -## Performing circuit upgrade - -Performing circuit upgrade requires crypto library to be updated and generating new setup data, verification key, -finalization hints if the circuit changes. Below steps can be used to perform circuit upgrade: - -1. checkout if the circuit geometry has changed in the new version of the circuit by running the - [workflow](https://github.com/matter-labs/era-zkevm_test_harness/blob/v1.4.0/.github/workflows/.github/workflows/geometry-config-generator.yml) - in harness and merge the generated PR. -2. update the relevant crypto dependencies(boojum, zkevm_circuit, harness, etc) in `Cargo.lock`, for example: - `cargo update -p zkevm_test_harness@1.4.0` -3. prepare an PR with the updated dependencies [sample PR](https://github.com/matter-labs/zksync-2-dev/pull/2481). -4. Run the verification key - [workflow](https://github.com/matter-labs/zksync-era/actions/workflows/fri-vk-generator.yaml) against the PR to - generate the verification key and finalization hints for the new circuit. -5. Only once the above verification key workflow is successful, start the setup-data generation(CPU, GPU setup data - generation can be done in parallel), this step is important, since the setup data requires the new VK, we need to - wait for it to finish. -6. Run the CPU setup data generation - [workflow](https://github.com/matter-labs/zksync-era/actions/workflows/fri-setup-data-generator.yml) against the PR - to generate the CPU setup data. -7. Run the GPU setup data generation - [workflow](https://github.com/matter-labs/zksync-era/actions/workflows/fri-gpu-setup-data-generator.yml) against the - PR to generate the GPU setup data. -8. Once the setup data generation workflows are successful, update the PR with `setup_keys_id` id in - [build-docker-from-tag.yml](../../.github/workflows/build-docker-from-tag.yml) and in - [build-prover-fri-gpu-gar.yml](https://github.com/matter-labs/zksync-era/blob/main/.github/workflows/build-prover-fri-gpu-gar.yml), - make sure to only do it from `FRI prover` not old. - -## Quick Machine Setup for GPU proving on GCP - -``` -# As of 11th of March, 2024 - -# Go to GCP -> pick a project -> compute engine -> create instance -# Give the machine a name -# Go to GPUs and select Nvidia L4, g2-standard-32 (32 vCPUs, 16 core, 128 GB memory) -# Boot disk, select Ubuntu, Ubuntu 22.04 (x86), select SSD persistent disk and change size to 200GB - -# You should have the machine available, that you can SSH into. Assuming you're SSHed in from this point forward - -# Install Rust -curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh - -# Install cMake from https://apt.kitware.com/ -- not listing steps as they may change -... - -# Install cuda -- again not listing steps as they may change -- https://developer.nvidia.com/cuda-downloads -- make sure to select Linux, x86_64, Ubuntu, 22.04, deb(network) and follow through -... - -# Make sure to make the nvidia software available -echo 'export PATH=/usr/local/cuda/bin${PATH:+:${PATH}}' >> ~/.bashrc - -# Reboot for the drivers to kick-in -sudo reboot - -# From here, you can follow-up the instructions from the main setup doc `core/docs/guides/setup-dev.md`; a TL;DR; is: - -# Install NVM -curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.5/install.sh | bash - -# Install dependencies -sudo apt-get install -y build-essential pkg-config cmake clang lldb lld libssl-dev postgresql docker docker-compose-v2 axel - -# Make docker work -sudo usermod -aG docker YOUR_USER - -# Make sure you have all binaries loaded in your environment -source ~/.bashrc - -# Setup the node part -nvm install 18 -npm install -g yarn -yarn set version 1.22.19 - -# Install SQLX for database management -cargo install sqlx-cli --version 0.7.3 - -# Get solidity working -sudo add-apt-repository ppa:ethereum/ethereum -sudo apt-get update -sudo apt-get install solc - -# Make zk work -- insert below into ~/.bashrc -export ZKSYNC_HOME=/path/to/zksync - -export PATH=$ZKSYNC_HOME/bin:$PATH - -# Let's get the last bits of the environment in the desired state (stop postgres, as we use it in docker and start docker) -sudo systemctl stop postgresql -sudo systemctl disable postgresql -sudo systemctl start docker - -sudo reboot - -# Of course, let's get the code -git clone https://github.com/matter-labs/zksync-era.git - -# Load everything in the env and you're good to go -source ~/.bashrc && cd ~/zksync-era -``` +GPU circuit proving is the only maintained implementation. CPU circuit proving has been deprecated. diff --git a/prover/docs/00_intro.md b/prover/docs/00_intro.md new file mode 100644 index 000000000000..fb79cf5bed0e --- /dev/null +++ b/prover/docs/00_intro.md @@ -0,0 +1,80 @@ +# Prover subsystem introduction + +The prover subsystem consists of several binaries that perform different steps of the batch proof generation process, as +follows: + +- [Prover gateway][pg]: interface between core and prover subsystems, fetches batch jobs from core, and sends batch + proofs back to core. +- [Witness generator][wg]: component that takes batch information (tx execution/state diffs/computation results) and + constructs witness for proof generation. +- [Witness vector generator][wvg]: component that uses witness generator output and computes witness vector (_roughly_: + data to be fed into GPU) for circuit provers. +- [Circuit prover][p]: component that generates a circuit proof (GPU accelerated). +- [Proof compressor][pc]: component that "wraps" the generated proof so that it can be sent to L1 (GPU accelerated). + +While not technically a part of the prover workspace, the following components are essential for it: + +- [Proof data handler][pdh]: API on the core side which Prover gateway interacts with. +- [House keeper][hk]: Metrics exporter and job rescheduler. In it's absence, jobs would not be rescheduled and metrics + used for autoscaling would not exist, rendering internal autoscaling infrastructure useless. + +Finally, the prover workspace has several CLI tools: + +- [Circuit key generator][vkg]: CLI used to generate keys required for proving. +- [Prover CLI][pcli]: CLI for observing and maintaining the production proving infrastructure. + +There are core components that also participate in the proof generation process by preparing the input data, such as +[metadata calculator][mc], [commitment generator][cg], [basic witness input producer][bwip], and [protective reads +writer][prw]. We won't cover them much in these docs, but it's better to know that they exist and are important for the +prover subsystem as well. + +We'll cover how the components work further in documentation. + +[pg]: ../crates/bin/prover_fri_gateway/ +[wg]: ../crates/bin/witness_generator/ +[wvg]: ../crates/bin/witness_vector_generator/ +[p]: ../crates/bin/prover_fri/ +[pc]: ../crates/bin/proof_fri_compressor/ +[pdh]: ../../core/node/proof_data_handler/ +[hk]: ../../core/node/house_keeper/ +[vkg]: ../crates/bin/prover_cli/ +[pcli]: ../crates/bin/vk_setup_data_generator_server_fri/ +[mc]: ../../core/node/metadata_calculator/ +[cg]: ../../core/node/commitment_generator/ +[bwip]: ../../core/node/vm_runner/src/impls/bwip.rs +[prw]: ../../core/node/vm_runner/src/impls/protective_reads.rs + +## How it runs + +Proof generation is a multi-stage process, where the initial jobs are created by the Prover gateway, and then moved by +the House Keeper until the proof is generated. + +The real-life deployment of prover subsystem looks as follows: + +- 1x prover gateway +- 1x house keeper +- Many witness generators +- Many witness vector generators +- Many circuit provers +- 1+ proof compressors + +Currently, the proving subsystem is designed to run in GCP. In theory, it's mostly environment-agnostic, and all of the +components can be launched locally, but more work is needed to run a production system in a distributed mode outside of +GCP. + +Witness generators, witness vector generators, and provers are spawned on demand based on the current system load via an +autoscaler (WIP, so not released publicly yet). They can be spawned in multiple clusters among different zones, based on +the availability of machines with required specs. + +## How to develop + +Different parts of the subsystem have different hardware requirement, but the aggregated summary to be able to run +everything on a single machine is as follows: + +- CPU with 16+ physical cores. +- GPU with CUDA support and at least 24 GB of VRAM. +- At least 64GB of RAM. +- 200+ GB of disk space. 400+ GB is recommended for development, as `/target` directory can get quite large. + +Given that the requirements are quite high, it's often more convenient developing the prover in a GCP VM rather than on +a local machine. Setting up a VM is covered further in docs. diff --git a/prover/docs/01_gcp_vm.md b/prover/docs/01_gcp_vm.md new file mode 100644 index 000000000000..a541495e978a --- /dev/null +++ b/prover/docs/01_gcp_vm.md @@ -0,0 +1,147 @@ +# Creating a GCP VM + +In this section we will cover the creation of a VM suitable for prover development. We assume that you already have +access to the GCP cluster. + +## When you need a VM + +Generally, you don't always need a VM to work on prover. You typically need it to either modify the code under +`cfg(feature = "gpu")` flag, or when you need to run some tests. Moreover, VMs are shared, e.g. many people have access +to them, and you can't store sensitive data (like SSH keys) there, so they can't be used as primary workstations. +Finally, the VMs with GPU aren't cheap, so we expect you to use them when you really need them. + +A typical workflow so far is to instantiate a new VM when you need it, and remove once you're done. Remember: even if +the VM is stopped, the SSD is persisted, so it's not free. + +## Create a VM + +Open [Google cloud console](https://console.cloud.google.com/) and choose "Compute Engine". + +On the "Compute Engine" page choose the cluster suitable for creating VMs with GPU, and then click on "Create instance". + +We will need an GPU **L4** instance, so find the zone that is close to you geographically and has such instances. At the +time of writing, `europe-west2` is one of the possible options. L4 is recommended as the cheapest option, but you may +use a beefier machine if you need it. + +When you choose the region, set the following options: + +- Name: A descriptive name that contains your name, e.g. `john-doe-prover-dev-machine`. +- Region and zone: Values you've found above. +- Machine configuration: "GPUs", then: + - GPU Type: NVIDIA L4 + - Number of GPUs: 1 + - Machine type: Preset, `g2-standard-16` +- Availability policies: Spot instances are much cheaper, but there is a chance that it will be preempted while you work + on it. If you're working on something that is not very important, spot instances are recommended. If any disruption + will be harmful, choose standard provisioning. +- Then click on "VM provisioning model advanced settings" and + - Click on "Set a time limit for the VM" + - Set the limit to 8 hours +- On VM termination: Stop +- Boot disk: Click on "Change", then: + - Operating system: Ubuntu + - Version: Ubuntu 22.04 LTS (x86/64) + - Boot disk type: SSD persistent disk + - Size: 300GB + +Leave the remaining options as is and click on "Create". + +You will have to wait a bit and then your instance will be created. Once you see that the machine is running, click on +an arrow near "SSH" in the list of options, and choose "Open in browser window". + +You should successfully connect to your machine now. + +⚠️ Don't forget to remove the VM once you've finished your scope of work. It's OK to keep the machine if you expect to +work with it on the next working day, but otherwise it's better to remove and create a new one when needed. + +## Adding your own ssh key (on local machine) + +Using browser to connect to the machine may not be the most convenient option. Instead, we can add an SSH key to be able +to connect there. + +It is highly recommended to generate a new SSH key specifically for this VM, for example: + +``` +ssh-keygen -t rsa -f ~/.ssh/gcp_vm -C -b 2048 +``` + +...where "your work email" is the same email you use to access GCP. + +Check the contents of the public key: + +``` +cat ~/.ssh/gcp_vm.pub +``` + +Click on your machine name, then click on "Edit". Scroll down until you see "SSH Keys" section and add the generated +public key there. Then save. + +Get back to the list of VMs and find the external IP of your VM. Now you should be able to connect to the VM via ssh. +Assuming that your work email is `abc@example.com` and the external IP is 35.35.35.35: + +``` +ssh -i ~/.ssh/gcp_vm abc@35.35.35.35 +``` + +## Make the VM cozy + +If you intend to use the VM somewhat regularly, install all the tools you would normally install on your own machine, +like `zsh` and `nvim`. + +It is also _highly recommended_ to install `tmux`, as you will have to run multiple binaries and observe their output. +If you don't know what is it or why should you care, watch [this video](https://www.youtube.com/watch?v=DzNmUNvnB04). + +Native `tmux` may be hard to use, so you may also want to install some configuration for it, e.g. + +- [oh-my-tmux](https://github.com/gpakosz/.tmux) or +- [tmux-sensible](https://github.com/tmux-plugins/tmux-sensible). + +Finally, it is recommended to choose a different terminal theme or prompt than what you use locally, so that you can +easily see whether you're running in the VM or locally. + +## Connecting via VS Code + +VS Code can connect to VMs via SSH, so you can have the comfort of using your own IDE while still running everything on +a remote machine. + +If you're using WSL, note that VS Code will have to look up the keys in Windows, so you will have to copy your keys +there as well, e.g.: + +``` +cp ~/.ssh/gcp_vm* /mnt/c/Users/User/.ssh +``` + +Then, when you open a fresh VS Code window, in the "Start" section: + +- Choose "Connect to Host" +- Click on "Configure Hosts" +- Create a host entry. + +Host entry looks as follows: + +``` +Host + HostName + IdentityFile + User +``` + +E.g. for the command we've used as an example before: `ssh -i ~/.ssh/gcp_vm abc@35.35.35.35`, the file will be: + +``` +Host gcp_vm + HostName 35.35.35.35 + IdentityFile ~/.ssh/gcp_vm + User abc +``` + +Once you've configured the host, you can click on "Connect to" again, then "Connect to Host", and your VM should be +listed there. On the first connect you'll have to confirm that you want to connect to it, and then choose the operating +system (Linux). + +## On security + +Do not store SSH keys, tokens, or other private information on GCP VMs. Do not use SSH keys forwarding either. These VMs +are shared, and every person has root access to all the VMs by default. + +You may, however, use tools like `rsync` or `sshfs`. diff --git a/prover/docs/02_setup.md b/prover/docs/02_setup.md new file mode 100644 index 000000000000..67c2b0b945ff --- /dev/null +++ b/prover/docs/02_setup.md @@ -0,0 +1,58 @@ +# Development environment setup + +In this section, we cover installing prerequisites for running prover subsystem. We assume that you have a prepared +machine in place, e.g. a compatible local machine or a prepared GCP VM. + +## ZKsync repo setup + +If you haven't already, you need to initialize the ZKsync repository first. Follow +[this guide](../../docs/guides/setup-dev.md) for that. + +Before proceeding, make sure that you can run the server and integration tests pass. + +## Prover-specific prerequisites + +### Cmake 3.24 or higher + +Use [Kitware APT repository](https://apt.kitware.com/). + +### CUDA runtime + +If you're using a local machine, make sure that you have up-to-date GPU driver. + +Use [Official CUDA downloads](https://developer.nvidia.com/cuda-downloads). + +Choose: OS -> Linux -> x86_64 -> Ubuntu (For WSL2 choose WSL-Ubuntu) -> 22.04 -> deb (network). + +Install both the base and driver (kernel module flavor). + +Setup environment variables: add the following to your configuration file (`.bashrc`/`.zshrc`): + +``` +# CUDA +export CUDA_HOME=/usr/local/cuda +export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/usr/local/cuda/lib64:/usr/local/cuda/extras/CUPTI/lib64 +export PATH=$PATH:$CUDA_HOME/bin +``` + +Reboot for the drivers to kick-in. + +### Bellman-CUDA + +Bellman-CUDA is a library required for GPU proof compressor. + +Navigate to some directory where you want to store the code, and then do the following: + +``` +git clone git@github.com:matter-labs/era-bellman-cuda.git +cmake -Bera-bellman-cuda/build -Sera-bellman-cuda/ -DCMAKE_BUILD_TYPE=Release +cmake --build era-bellman-cuda/build/ +``` + +After that add the following environment variable to your config (`.bashrc`/`.zshrc`): + +``` +export BELLMAN_CUDA_DIR=/era-bellman-cuda +``` + +Don't forget to reload it (e.g. `source ~/.zshrc`). diff --git a/prover/docs/03_launch.md b/prover/docs/03_launch.md new file mode 100644 index 000000000000..2c5809e994e5 --- /dev/null +++ b/prover/docs/03_launch.md @@ -0,0 +1,101 @@ +# Running provers + +## Preparing + +First, run the following command: + +``` +zk env prover-local +``` + +It will create a config similar to `dev`, but with: + +- Proof sending mode set to `OnlyRealProofs` +- Prover mode set to `Local` instead of `GCS`. + +You can always switch back to dev config via `zk env dev`. + +## Enter the prover workspace + +All the commands for binaries in the prover workspace must be done from the prover folder: + +``` +cd $ZKSYNC_HOME/prover +``` + +## Key generation + +This operation should only be done once; if you already generated keys, you can skip it. + +The following command will generate the required keys: + +``` +zk f cargo run --features gpu --release --bin key_generator -- generate-sk-gpu all --recompute-if-missing +``` + +With that, you should be ready to run the prover. + +## Running + +Important! Generating a proof takes a lot of time, so if you just want to see whether you can generate a proof, do it +against clean sequencer state (e.g. right after `zk init`). + +We will be running a bunch of binaries, it's recommended to run each in a separate terminal. + +### Server + +``` +zk server --components=api,tree,eth,state_keeper,housekeeper,tee_verifier_input_producer,commitment_generator,da_dispatcher,proof_data_handler,vm_runner_protective_reads,vm_runner_bwip +``` + +### Proof data handler + +``` +zk f cargo run --release --bin zksync_prover_fri_gateway +``` + +Then wait until the first job is picked up. Prover gateway has to insert protocol information into the database, and +until it happens, witness generators will panic and won't be able to start. + +### Witness generator + +Once a job is created, start witness generators: + +``` +API_PROMETHEUS_LISTENER_PORT=3116 zk f cargo run --release --bin zksync_witness_generator -- --all_rounds +``` + +`--all_rounds` means that witness generator will produce witnesses of all kinds. You can run a witness generator for +each round separately, but it's mostly useful in production environments. + +### Witness vector generator + +``` +FRI_WITNESS_VECTOR_GENERATOR_PROMETHEUS_LISTENER_PORT=3420 zk f cargo run --release --bin zksync_witness_vector_generator +``` + +WVG prepares inputs for prover, and it's a single-threaded time-consuming operation. You may run several instances (make +sure to use different ports). The exact amount of WVGs needed to "feed" one prover depends on CPU/GPU specs, but a +ballpark estimate (useful for local development) is 10 WVGs per prover. + +### Prover + +``` +zk f cargo run --features "gpu" --release --bin zksync_prover_fri +``` + +Prover can prove any kinds of circuits, so you only need a single instance. + +### Proof compressor + +⚠️ Both prover and proof compressor require 24GB of VRAM, and currently it's not possible to make them use different +GPU. So unless you have a GPU with 48GB of VRAM, you won't be able to run both at the same time. + +You should wait until the proof is generated, and once you see in the server logs that it tries to find available +compressor, you can shut the prover down, and run the proof compressor: + +``` +zk f cargo run --features "gpu" --release --bin zksync_proof_fri_compressor +``` + +Once the proof is compressed, proof gateway will see that and will send the generated proof back to core. diff --git a/prover/docs/04_flow.md b/prover/docs/04_flow.md new file mode 100644 index 000000000000..9bb5ebacbc40 --- /dev/null +++ b/prover/docs/04_flow.md @@ -0,0 +1,238 @@ +# Prover flow + +In this section, we're going to learn what stages does the proof generation process have. It's a complex process, so +we'll be looking at it from four perspectives: + +- Core<->Prover subsystem interactions. +- Core side of workflow. +- Prover pipeline. +- Batch proof generation. +- Infrastructure distribution. + +After that, we will touch on how this flow is mapped on the actual production infrastructure. + +## Core <-> Prover subsystem interactions + +Core and prover subsystem are built in such a way that they are mostly isolated from each other. Each side has its own +database and GCS buckets, and both have "gateway" components they use for interaction. + +The only exception here is the `house_keeper`: it's a component that exists as a part of the server, it's main purpose +is to manage jobs (and emit metrics for job management) in the prover workspace, but at the same time it has access to +both core and prover databases. The component will probably be split in the future and most of it will be moved to the +prover workspace. + +Otherwise, the interaction between subsystems can be expressed as follows: + +```mermaid +sequenceDiagram + participant C as Core + participant P as Prover + + loop In parallel, for each batch + P-->>+C: Get a job to prove + C->>-P: Unproven batch + P->>P: Calculate proof + P->>C: Submit proof + end +``` + +Core exposes an API, and Prover repeatedly polls this API, fetching new batch proof jobs and submitting batch proofs. + +## Core side of workflow + +Despite the fact that the prover is isolated from the core, the core has multiple components specifically designed to +prepare _inputs_ for proving. + +The following diagram shows what happens under the hood when the prover subsystem requests a new job: + +```mermaid +sequenceDiagram + box Core + participant Ob as GCS + participant DB as Core database + participant API as Proof data handler + end + participant P as Prover + P-->>+API: Get a job + API-->>DB: Lock a suitable job + DB->>API: Job is marked as "picked_up" + API-->>Ob: Fetch BWIP data + Ob->>API: Return BWIP data + API-->>Ob: Fetch Merkle Tree data + Ob->>API: Return Merkle Tree data + API-->>DB: Fetch batch metadata + DB->>API: Return batch metadata + API->>-P: Return a job +``` + +First of all, `proof_data_handler` will check if all the data required for the proof generation is already prepared by +the core. If so, it will lock the job so that it's not assigned twice, and will fetch required information from multiple +sources. Then this data is given to the prover together with the batch number. + +## Prover pipeline + +Once job is received by the prover, it has to go through several different stages. Consider this a mental model of the +pipeline, since in reality some stages happen in parallel, and some have different degree of sequencing. + +```mermaid +sequenceDiagram +participant C as Core +box Prover +participant PG as Gateway +participant BPG as Basic WG+Proving +participant LPG as Leaf WG+Proving +participant NPG as Node WG+Proving +participant RTPG as Recursion tip WG+Proving +participant SPG as Scheduler WG+Proving +participant CP as Compressor +end +C-->>PG: Job +PG->>BPG: Batch data +BPG->>LPG: Basic proofs +LPG->>NPG: Aggregated proofs (round 1) +NPG->>NPG: Internal aggregation to get 1 proof per circuit type +NPG->>RTPG: Aggregated proofs (round 2) +RTPG->>SPG: Aggregated proofs (round 3) +SPG->>CP: Aggregated proof (round 4) +CP->>PG: SNARK proof +PG-->>C: Proof +``` + +When we process the initial job (during basic witness generation) we create many sub-jobs for basic proof generation. +Once they are processed, we start to aggregate generated proofs, and we do it in "levels". With each aggregation level, +we reduce the number of jobs. + +Aggregation levels are commonly referred by numbers in the prover workspace, from 0 to 4. So if someone mentions +"aggregation round 2", they refer to the "node" stage, and round 4 corresponds to the "scheduler" stage. Proof +compression is considered separate operation, and doesn't have a numeric value. + +Jobs within the aggregation round may also have different types, but this will be covered later. + +The actual numbers may vary, but just for example there might exist a batch, so that it initially creates 10000 jobs, +which are processed as follows: + +- On round 0, we also emit 10000 jobs. We aren't doing "actual" aggregation here. +- On round 1, we're turning 10000 jobs into 100. +- On round 2, we should turn these 100 jobs into at most 16. Depending on the batch parameters, it may required + additional "iterations" of the stage. For example, after we processed the initial 100 jobs, we may get 35 proofs. + Then, additional node level jobs will be created, until we reduce the number to at most 16. +- On round 3, we're turning 16 jobs into 1. +- On round 4, we already have just 1 job, and we produce a single aggregated proof. +- Finally, the proof is processed by the proof compressor and sent back to the core. + +Once again, these numbers are just for example, and don't necessarily represent the actual state of affairs. The exact +number of jobs depend on number of txs in a batch (and what's done inside those txs) while the aggregation split +(mapping of `N circuits of level X` to `M circuits of level X + 1`) is determined by the config geometry. + +## Actual proof generation + +Every "job" we mentioned has several sub-stages. More precisely, it receives some kind of input, which is followed by +witness generation, witness vector generation, and circuit proving. The output of circuit proving is passed as an input +for the next "job" in the pipeline. + +For each aggregation level mentioned above the steps are the same, though the inputs and outputs are different. + +```mermaid +sequenceDiagram +participant Ob as Prover GCS +participant DB as Prover DB +participant WG as Witness Generator +participant WVG as Witness Vector Generator +participant P as Prover +WG-->>DB: Get WG job +DB->>WG: Job +WG-->>Ob: Get job data +Ob->>WG: Data for witness generation +WG->>WG: Build witness +WG->>Ob: Save witness +WG->>DB: Create prover job +WVG-->>DB: Get prover job +DB->>WVG: Prover job +WVG->>WVG: Build witness vector +WVG-->>DB: Lock a free prover +DB->>WVG: Prover address +WVG->>P: Submit witness vector over TCP +P->>P: Generate a proof +P->>Ob: Store proof +P->>DB: Mark proof as stored +``` + +## Circuits + +Finally, even within the same level, there may be different circuit types. Under the hood, they prove the correctness of +different parts of computations. From a purely applied point of view, it mostly means that initially we receive X jobs +of N types, which cause Y jobs of M types, and so on. + +So, in addition to the aggregation layer, we also have a circuit ID. A tuple of aggregation round and circuit ID form an +unique job identifier, which allows us to understand which inputs we should receive, what processing logic we should +run, and which outputs we should produce. + +As of Jul 2024, we have 35 circuit types mapped to 5 aggregation layers. + +_Note:_ specifics of each circuit type and aggregation layers are out of scope for this document, but you can find more +information on that in the [further reading](99_further_reading.md) section. + +## Prover groups + +The next problem you would meet once you start proving in production environment is that different +`(aggregation_round, circuit_id)` pairs have different load. For some, you need a lot of machines, while for some a few +is enough. + +To help with that, we spread the machines into 15 different groups, based on how "busy" they are, and configure each +group to work with a specific set of `(aggregation_round, circuit_id)` pairs only. + +Here you can see +[an example mapping](https://github.com/matter-labs/zksync-era/blob/3fbbee10be99e8c5a696bfd50d81230141bccbf4/etc/env/base/fri_prover_group.toml). + +Whenever you launch a witness generator, witness vector generator, or prover, it will check the group it belongs to, and +will only work with pairs configured for that group. + +If a non-existent group is chosen, all of the pairs will be processed by default. + +## Regions + +Since the number of jobs is high, a cluster in a single region may not have enough machines to process them in a timely +manner. Because of that, our prover infrastructure is designed to work across multiple clusters in different GCP +regions. + +It mostly doesn't affect the code, since we use Postgres and GCS for communication, with one major exception: since WVG +streams data directly to GPU provers via TCP, it will only look for prover machines that are registered in the same zone +as WVG in order to reduce network transfers (inter-AZ costs less than intra-AZ or even cross DC). + +## Protocol versions + +Finally, ZKsync has protocol versions, and it has upgrades from time to time. Each protocol version upgrade is defined +on L1, and the version follows SemVer convention, e.g. each version is defined as `0.x.y`. During the protocol version +upgrade, one of three things can change: + +- Protocol _behavior_. For example, we add new functionality and our VM starts working differently. +- Circuits _implementation_. For example, VM behavior doesn't change, but we add more constraints to the circuits. +- Contracts changes. For example, we add a new method to the contract, which doesn't affect neither VM or circuits. + +For the first two cases, there will be changes in circuits, and there will be new verification keys. It means, that the +proving process will be different. The latter has no implications for L2 behavior. + +As a result, after upgrade, we may need to generate different proofs. But given that upgrades happen asynchronously, we +cannot guarantee that all the "old" batched will be proven at the time of upgrade. + +Because of that, prover is protocol version aware. Each binary that participates in proving is designed to only generate +proofs for a single protocol version. Once the upgrade happens, "old" provers continue working on the "old" unproven +batches, and simultaneously we start spawning "new" provers for the batches generated with the new protocol version. +Once all the "old" batches are proven, no "old" provers will be spawned anymore. + +## Recap + +That's a quite sophisticated infrastructure, and it may be hard to understand it in one go. Here's a quick recap of this +page: + +- Main components of the prover subsystem are house keeper, prover gateway, witness generator, witness vector generator, + GPU prover, and proof compressor. +- House keeper and prover gateway don't perform any significant computations, and there is just one instance of each. +- Witness generator, witness vector generator, and GPU prover work together as a "sub-pipeline". +- As of Jul 2024, the pipeline consists of 5 aggregation rounds, which are further split into 35 + `(aggregation_round, circuit_id)` pairs, followed by the proof compression. +- On the infrastructure level, these 35 pairs are spread across 15 different prover groups, according to how "busy" the + group is. +- Groups may exist in different clusters in different GCP regions. +- Provers are versioned according to the L1 protocol version. There may be provers with different versions running at + the same time. diff --git a/prover/docs/99_further_reading.md b/prover/docs/99_further_reading.md new file mode 100644 index 000000000000..64487a715d57 --- /dev/null +++ b/prover/docs/99_further_reading.md @@ -0,0 +1,13 @@ +# Further reading + +The documentation in this section aimed to provide a practical overview of the prover workspace, e.g. help people to +understand how to run provers and what they do. + +However, we have some documentation that is more focused on theory of proving in the [core workspace docs](../../docs/). + +You may find the following articles helpful for general understanding of ZK proofs: + +- [ZK intuition](../../docs/guides/advanced/13_zk_intuition.md). +- [ZK deeper overview](../../docs/guides/advanced/14_zk_deeper_overview.md). +- [Prover keys](../../docs/guides/advanced/15_prover_keys.md). +- [Overview of our ZK proving system implementation](../../docs/specs/prover/). diff --git a/prover/docs/README.md b/prover/docs/README.md new file mode 100644 index 000000000000..62f3fc8d1c4c --- /dev/null +++ b/prover/docs/README.md @@ -0,0 +1,16 @@ +# Prover subsystem documentation + +This is technical documentation for the prover subsystem.It aims to help developers to set up a development environment +for working with provers. This documentation assumes that you are already familiar with how ZKsync works, and you need +to be able to work with the prover code. + +It does not cover topics such as basics of ZK or production deployment for provers. + +## Table of contents + +- [Intro](00_intro.md) +- [Setting up a GCP VM](01_gcp_vm.md) +- [Workspace setup](02_setup.md) +- [Running prover subsystem](03_launch.md) +- [Proof generation flow](04_flow.md) +- [Further reading](99_further_reading.md) diff --git a/prover/setup.sh b/prover/setup.sh deleted file mode 100755 index 2d546c1f8bd6..000000000000 --- a/prover/setup.sh +++ /dev/null @@ -1,26 +0,0 @@ -#!/usr/bin/env bash -# This script sets up the necessary data needed by the CPU/GPU FRI prover to be used locally. - -GPU_FLAG="" -GENERATE_SK_COMMAND="generate-sk" -if [ "$1" = "gpu" ]; then - GPU_FLAG='--features gpu' - GENERATE_SK_COMMAND="generate-sk-gpu" -fi - -if [[ -z "${ZKSYNC_HOME}" ]]; then - echo "Environment variable ZKSYNC_HOME is not set. Make sure it's set and pointing to the root of this repository" - exit 1 -fi - -sed -i.backup 's/^proof_sending_mode=.*$/proof_sending_mode="OnlyRealProofs"/' ../etc/env/base/eth_sender.toml -rm ../etc/env/base/eth_sender.toml.backup -sed -i.backup 's/^setup_data_path=.*$/setup_data_path="vk_setup_data_generator_server_fri\/data\/"/' ../etc/env/base/fri_prover.toml -rm ../etc/env/base/fri_prover.toml.backup -sed -i.backup 's/^universal_setup_path=.*$/universal_setup_path="..\/keys\/setup\/setup_2^26.key"/' ../etc/env/base/fri_proof_compressor.toml -rm ../etc/env/base/fri_proof_compressor.toml.backup - -zk config compile dev - -# Update setup keys (only if they are not present) -zk f cargo run $GPU_FLAG --release --bin key_generator -- $GENERATE_SK_COMMAND all --recompute-if-missing From e4670136e18b69aace779fec9dea81760de48585 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mat=C3=ADas=20Ignacio=20Gonz=C3=A1lez?= Date: Fri, 26 Jul 2024 10:02:36 +0200 Subject: [PATCH 39/52] docs(zk_toolbox): Update zk toolbox docs (#2439) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ Update zk toolbox docs: * Prover * Contract verifier --- zk_toolbox/README.md | 60 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 60 insertions(+) diff --git a/zk_toolbox/README.md b/zk_toolbox/README.md index aed5fc15cbc9..d97d05f1459a 100644 --- a/zk_toolbox/README.md +++ b/zk_toolbox/README.md @@ -106,3 +106,63 @@ zk_inception server ``` You can specify the chain you are running by providing `--chain ` argument + +### Prover + +#### Requirements + +Make sure you have installed the following requirements: + +- [gcloud](https://cloud.google.com/sdk/docs/install) +- [wget](https://www.gnu.org/software/wget/) +- [cmake](https://apt.kitware.com/) +- [nvcc (CUDA toolkit)](https://developer.nvidia.com/cuda-downloads) + +Checkout [prover docs](https://github.com/matter-labs/zksync-era/blob/main/prover/docs/02_setup.md) for more info. + +#### Running the prover + +To run the prover, follow these steps: + +First, initialize the prover: + +```bash +zk_inception prover init # initializes object store settings, downloads setup keys and initializes bellman-cuda +``` + +You can generate the setup keys with: + +```bash +zk_inception prover generate-sk +``` + +Finally, run the prover: + +```bash +zk_inception prover run +``` + +You can specify the prover component to run by providing `--component ` argument. Possible components are: +`gateway, witness-generator, witness-vector-generator, prover, compressor` + +If you are running `witness-generator` you can specify the round by providing `--round ` argument. Possible +rounds are: `all-rounds, basic-circuits, leaf-aggregation, node-aggregation, recursion-tip, scheduler` + +### Contract verifier + +Running the contract verifier: + +First, download the required `solc`, `zksolc`, `vyper` and `zkvyper` binaries with: + +```bash +zk_inception contract-verifier init` +``` + +Select the minimum version of each compiler, and the tool will download any missing binaries. These binaries are +necessary to compile contracts, as each contract may require a different compiler version. + +Then, run the contract verifier with: + +```bash +zk_inception contract-verifier run +``` From c0815cdaf878afcd9c41dddd9fe56bcf8d910633 Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Fri, 26 Jul 2024 12:07:39 +0400 Subject: [PATCH 40/52] feat(vlog): New vlog interface + opentelemtry improvements (#2472) OK, that's a big PR for its purpose, sorry for that. Essence of changes: ## Nice observability installation from the config We can now write the following: ``` let observability_config = configs .observability .clone() .context("observability config")?; let _observability_guard = observability_config.install()?; ``` instead of having to manually map data from `observability_config` to `ObservabilityBuilder` methods. This is done in a non-intrusive way: the functionality is under a feature flag in `zksync_config` which is only enabled for binaries. ## Nicer `ObservabilityBuilder` interface Previously we've had "everything in one place", now we have separate structures for logs, opentelemetry, and sentry. It would be nice to integrate prometheus there too, but it's a separate task. ## More service information is exposed via opentelemetry Previously only service name was exposed. Now we also expose pod name and namespace. It's done in a non-intrusive way as well, where you can provide _some_ of information if you have it, but otherwise it'll be populated from env variables (which is fine, because it's not a permanent configuration, but rather a machine descriptor). --- Cargo.lock | 360 ++++++++------- Cargo.toml | 10 +- core/bin/block_reverter/Cargo.toml | 2 +- core/bin/block_reverter/src/main.rs | 26 +- core/bin/contract-verifier/Cargo.toml | 2 +- core/bin/contract-verifier/src/main.rs | 19 +- .../external_node/src/config/observability.rs | 36 +- core/bin/external_node/src/config/tests.rs | 6 +- core/bin/external_node/src/main.rs | 4 +- core/bin/external_node/src/node_builder.rs | 10 +- core/bin/external_node/src/tests/mod.rs | 8 +- .../Cargo.toml | 2 +- .../src/main.rs | 13 +- core/bin/snapshots_creator/Cargo.toml | 2 +- core/bin/snapshots_creator/src/main.rs | 14 +- core/bin/zksync_server/Cargo.toml | 2 +- core/bin/zksync_server/src/main.rs | 42 +- core/bin/zksync_server/src/node_builder.rs | 16 +- core/bin/zksync_tee_prover/Cargo.toml | 2 +- core/bin/zksync_tee_prover/src/main.rs | 21 +- core/lib/config/Cargo.toml | 5 + core/lib/config/src/configs/observability.rs | 2 +- core/lib/config/src/lib.rs | 3 + core/lib/config/src/observability_ext.rs | 52 +++ core/lib/utils/src/wait_for_tasks.rs | 10 +- core/lib/vlog/Cargo.toml | 6 +- core/lib/vlog/src/lib.rs | 376 ++-------------- core/lib/vlog/src/logs/layer.rs | 140 ++++++ core/lib/vlog/src/logs/mod.rs | 163 +++++++ core/lib/vlog/src/opentelemetry/mod.rs | 191 ++++++++ core/lib/vlog/src/sentry.rs | 38 ++ .../node/node_framework/examples/main_node.rs | 419 ------------------ core/node/node_framework/examples/showcase.rs | 10 +- core/node/node_framework/src/service/mod.rs | 50 ++- core/node/node_framework/src/service/tests.rs | 20 +- core/tests/loadnext/src/main.rs | 25 +- etc/env/file_based/general.yaml | 17 +- prover/Cargo.lock | 216 ++++----- .../bin/proof_fri_compressor/Cargo.toml | 1 + .../bin/proof_fri_compressor/src/main.rs | 23 +- prover/crates/bin/prover_fri/Cargo.toml | 2 +- prover/crates/bin/prover_fri/src/main.rs | 31 +- .../crates/bin/prover_fri_gateway/Cargo.toml | 2 +- .../crates/bin/prover_fri_gateway/src/main.rs | 15 +- .../crates/bin/witness_generator/Cargo.toml | 2 +- .../crates/bin/witness_generator/src/main.rs | 30 +- .../bin/witness_vector_generator/Cargo.toml | 2 +- .../bin/witness_vector_generator/src/main.rs | 23 +- zk_toolbox/Cargo.lock | 243 +++++----- 49 files changed, 1204 insertions(+), 1510 deletions(-) create mode 100644 core/lib/config/src/observability_ext.rs create mode 100644 core/lib/vlog/src/logs/layer.rs create mode 100644 core/lib/vlog/src/logs/mod.rs create mode 100644 core/lib/vlog/src/opentelemetry/mod.rs create mode 100644 core/lib/vlog/src/sentry.rs delete mode 100644 core/node/node_framework/examples/main_node.rs diff --git a/Cargo.lock b/Cargo.lock index a2cf9e4fde0c..3cdfe491c993 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -342,34 +342,6 @@ dependencies = [ "paste", ] -[[package]] -name = "axum" -version = "0.6.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b829e4e32b91e643de6eafe82b1d90675f5874230191a4ffbc1b336dec4d6bf" -dependencies = [ - "async-trait", - "axum-core 0.3.4", - "bitflags 1.3.2", - "bytes", - "futures-util", - "http 0.2.9", - "http-body 0.4.6", - "hyper 0.14.29", - "itoa", - "matchit", - "memchr", - "mime", - "percent-encoding", - "pin-project-lite", - "rustversion", - "serde", - "sync_wrapper 0.1.2", - "tower", - "tower-layer", - "tower-service", -] - [[package]] name = "axum" version = "0.7.5" @@ -377,7 +349,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3a6c9af12842a67734c9a2e355436e5d03b22383ed60cf13cd0c18fbfe3dcbcf" dependencies = [ "async-trait", - "axum-core 0.4.3", + "axum-core", "bytes", "futures-util", "http 1.1.0", @@ -404,23 +376,6 @@ dependencies = [ "tracing", ] -[[package]] -name = "axum-core" -version = "0.3.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "759fa577a247914fd3f7f76d62972792636412fbfd634cd452f6a385a74d2d2c" -dependencies = [ - "async-trait", - "bytes", - "futures-util", - "http 0.2.9", - "http-body 0.4.6", - "mime", - "rustversion", - "tower-layer", - "tower-service", -] - [[package]] name = "axum-core" version = "0.4.3" @@ -2763,9 +2718,9 @@ dependencies = [ [[package]] name = "hermit-abi" -version = "0.3.3" +version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d77f7ec81a6d05a3abb01ab6eb7590f6083d08449fe5a1c8b1e620283546ccb7" +checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" [[package]] name = "hex" @@ -2944,14 +2899,15 @@ dependencies = [ [[package]] name = "hyper-timeout" -version = "0.4.1" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1" +checksum = "3203a961e5c83b6f5498933e78b6b263e208c197b63e9c6c53cc82ffd3f63793" dependencies = [ - "hyper 0.14.29", + "hyper 1.3.1", + "hyper-util", "pin-project-lite", "tokio", - "tokio-io-timeout", + "tower-service", ] [[package]] @@ -3810,13 +3766,14 @@ dependencies = [ [[package]] name = "mio" -version = "0.8.11" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c" +checksum = "4569e456d394deccd22ce1c1913e6ea0e54519f577285001215d33557431afe4" dependencies = [ + "hermit-abi 0.3.9", "libc", "wasi 0.11.0+wasi-snapshot-preview1", - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] @@ -3961,6 +3918,12 @@ dependencies = [ "serde", ] +[[package]] +name = "num-conv" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" + [[package]] name = "num-derive" version = "0.2.5" @@ -4030,7 +3993,7 @@ version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" dependencies = [ - "hermit-abi 0.3.3", + "hermit-abi 0.3.9", "libc", ] @@ -4149,43 +4112,46 @@ dependencies = [ [[package]] name = "opentelemetry" -version = "0.20.0" +version = "0.24.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9591d937bc0e6d2feb6f71a559540ab300ea49955229c347a517a28d27784c54" +checksum = "4c365a63eec4f55b7efeceb724f1336f26a9cf3427b70e59e2cd2a5b947fba96" dependencies = [ - "opentelemetry_api", - "opentelemetry_sdk", + "futures-core", + "futures-sink", + "js-sys", + "once_cell", + "pin-project-lite", + "thiserror", ] [[package]] name = "opentelemetry-http" -version = "0.9.0" +version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7594ec0e11d8e33faf03530a4c49af7064ebba81c1480e01be67d90b356508b" +checksum = "ad31e9de44ee3538fb9d64fe3376c1362f406162434609e79aea2a41a0af78ab" dependencies = [ "async-trait", "bytes", - "http 0.2.9", - "opentelemetry_api", - "reqwest 0.11.22", + "http 1.1.0", + "opentelemetry", + "reqwest 0.12.5", ] [[package]] name = "opentelemetry-otlp" -version = "0.13.0" +version = "0.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e5e5a5c4135864099f3faafbe939eb4d7f9b80ebf68a8448da961b32a7c1275" +checksum = "6b925a602ffb916fb7421276b86756027b37ee708f9dce2dbdcc51739f07e727" dependencies = [ "async-trait", "futures-core", - "http 0.2.9", + "http 1.1.0", + "opentelemetry", "opentelemetry-http", "opentelemetry-proto", - "opentelemetry-semantic-conventions", - "opentelemetry_api", "opentelemetry_sdk", - "prost 0.11.9", - "reqwest 0.11.22", + "prost 0.13.1", + "reqwest 0.12.5", "thiserror", "tokio", "tonic", @@ -4193,58 +4159,37 @@ dependencies = [ [[package]] name = "opentelemetry-proto" -version = "0.3.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1e3f814aa9f8c905d0ee4bde026afd3b2577a97c10e1699912e3e44f0c4cbeb" +checksum = "30ee9f20bff9c984511a02f082dc8ede839e4a9bf15cc2487c8d6fea5ad850d9" dependencies = [ - "opentelemetry_api", + "opentelemetry", "opentelemetry_sdk", - "prost 0.11.9", + "prost 0.13.1", "tonic", ] [[package]] name = "opentelemetry-semantic-conventions" -version = "0.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73c9f9340ad135068800e7f1b24e9e09ed9e7143f5bf8518ded3d3ec69789269" -dependencies = [ - "opentelemetry", -] - -[[package]] -name = "opentelemetry_api" -version = "0.20.0" +version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a81f725323db1b1206ca3da8bb19874bbd3f57c3bcd59471bfb04525b265b9b" -dependencies = [ - "futures-channel", - "futures-util", - "indexmap 1.9.3", - "js-sys", - "once_cell", - "pin-project-lite", - "thiserror", - "urlencoding", -] +checksum = "1cefe0543875379e47eb5f1e68ff83f45cc41366a92dfd0d073d513bf68e9a05" [[package]] name = "opentelemetry_sdk" -version = "0.20.0" +version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa8e705a0612d48139799fcbaba0d4a90f06277153e43dd2bdc16c6f0edd8026" +checksum = "692eac490ec80f24a17828d49b40b60f5aeaccdfe6a503f939713afd22bc28df" dependencies = [ "async-trait", - "crossbeam-channel 0.5.13", "futures-channel", "futures-executor", "futures-util", + "glob", "once_cell", - "opentelemetry_api", - "ordered-float 3.9.2", + "opentelemetry", "percent-encoding", "rand 0.8.5", - "regex", "serde_json", "thiserror", "tokio", @@ -4260,15 +4205,6 @@ dependencies = [ "num-traits", ] -[[package]] -name = "ordered-float" -version = "3.9.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1e1c390732d15f1d48471625cd92d154e66db2c56645e29a9cd26f4699f72dc" -dependencies = [ - "num-traits", -] - [[package]] name = "os_info" version = "3.7.0" @@ -4733,22 +4669,22 @@ dependencies = [ [[package]] name = "prost" -version = "0.11.9" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b82eaa1d779e9a4bc1c3217db8ffbeabaae1dca241bf70183242128d48681cd" +checksum = "f4fdd22f3b9c31b53c060df4a0613a1c7f062d4115a2b984dd15b1858f7e340d" dependencies = [ "bytes", - "prost-derive 0.11.9", + "prost-derive 0.12.1", ] [[package]] name = "prost" -version = "0.12.1" +version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4fdd22f3b9c31b53c060df4a0613a1c7f062d4115a2b984dd15b1858f7e340d" +checksum = "e13db3d3fde688c61e2446b4d843bc27a7e8af269a69440c0308021dc92333cc" dependencies = [ "bytes", - "prost-derive 0.12.1", + "prost-derive 0.13.1", ] [[package]] @@ -4775,25 +4711,25 @@ dependencies = [ [[package]] name = "prost-derive" -version = "0.11.9" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5d2d8d10f3c6ded6da8b05b5fb3b8a5082514344d56c9f871412d29b4e075b4" +checksum = "265baba7fabd416cf5078179f7d2cbeca4ce7a9041111900675ea7c4cb8a4c32" dependencies = [ "anyhow", - "itertools 0.10.5", + "itertools 0.11.0", "proc-macro2 1.0.69", "quote 1.0.33", - "syn 1.0.109", + "syn 2.0.38", ] [[package]] name = "prost-derive" -version = "0.12.1" +version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "265baba7fabd416cf5078179f7d2cbeca4ce7a9041111900675ea7c4cb8a4c32" +checksum = "18bec9b0adc4eba778b33684b7ba3e7137789434769ee3ce3930463ef904cfca" dependencies = [ "anyhow", - "itertools 0.11.0", + "itertools 0.12.0", "proc-macro2 1.0.69", "quote 1.0.33", "syn 2.0.38", @@ -5749,7 +5685,7 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f3a1a3341211875ef120e117ea7fd5228530ae7e7036a779fdc9117be6b3282c" dependencies = [ - "ordered-float 2.10.1", + "ordered-float", "serde", ] @@ -6673,12 +6609,13 @@ dependencies = [ [[package]] name = "time" -version = "0.3.30" +version = "0.3.36" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4a34ab300f2dee6e562c10a046fc05e358b29f9bf92277f30c3c8d82275f6f5" +checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885" dependencies = [ "deranged", "itoa", + "num-conv", "powerfmt", "serde", "time-core", @@ -6693,10 +6630,11 @@ checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" [[package]] name = "time-macros" -version = "0.2.15" +version = "0.2.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ad70d68dba9e1f8aceda7aa6711965dfec1cac869f311a51bd08b3a2ccbce20" +checksum = "3f252a68540fde3a3877aeea552b832b40ab9a69e318efd078774a01ddee1ccf" dependencies = [ + "num-conv", "time-core", ] @@ -6758,38 +6696,27 @@ dependencies = [ [[package]] name = "tokio" -version = "1.34.0" +version = "1.39.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0c014766411e834f7af5b8f4cf46257aab4036ca95e9d2c144a10f59ad6f5b9" +checksum = "d040ac2b29ab03b09d4129c2f5bbd012a3ac2f79d38ff506a4bf8dd34b0eac8a" dependencies = [ "backtrace", "bytes", "libc", "mio", - "num_cpus", "parking_lot", "pin-project-lite", "signal-hook-registry", "socket2", "tokio-macros", - "windows-sys 0.48.0", -] - -[[package]] -name = "tokio-io-timeout" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30b74022ada614a1b4834de765f9bb43877f910cc8ce4be40e89042c9223a8bf" -dependencies = [ - "pin-project-lite", - "tokio", + "windows-sys 0.52.0", ] [[package]] name = "tokio-macros" -version = "2.2.0" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" +checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" dependencies = [ "proc-macro2 1.0.69", "quote 1.0.33", @@ -6885,24 +6812,26 @@ dependencies = [ [[package]] name = "tonic" -version = "0.9.2" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3082666a3a6433f7f511c7192923fa1fe07c69332d3c6a2e6bb040b569199d5a" +checksum = "38659f4a91aba8598d27821589f5db7dddd94601e7a01b1e485a50e5484c7401" dependencies = [ + "async-stream", "async-trait", - "axum 0.6.20", - "base64 0.21.5", + "axum", + "base64 0.22.1", "bytes", - "futures-core", - "futures-util", - "h2 0.3.26", - "http 0.2.9", - "http-body 0.4.6", - "hyper 0.14.29", + "h2 0.4.5", + "http 1.1.0", + "http-body 1.0.0", + "http-body-util", + "hyper 1.3.1", "hyper-timeout", + "hyper-util", "percent-encoding", "pin-project", - "prost 0.11.9", + "prost 0.13.1", + "socket2", "tokio", "tokio-stream", "tower", @@ -7004,20 +6933,33 @@ dependencies = [ "tracing-core", ] +[[package]] +name = "tracing-log" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" +dependencies = [ + "log", + "once_cell", + "tracing-core", +] + [[package]] name = "tracing-opentelemetry" -version = "0.21.0" +version = "0.25.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75327c6b667828ddc28f5e3f169036cb793c3f588d83bf0f262a7f062ffed3c8" +checksum = "a9784ed4da7d921bc8df6963f8c80a0e4ce34ba6ba76668acadd3edbd985ff3b" dependencies = [ + "js-sys", "once_cell", "opentelemetry", "opentelemetry_sdk", "smallvec", "tracing", "tracing-core", - "tracing-log", + "tracing-log 0.2.0", "tracing-subscriber", + "web-time", ] [[package]] @@ -7048,7 +6990,7 @@ dependencies = [ "time", "tracing", "tracing-core", - "tracing-log", + "tracing-log 0.1.4", "tracing-serde", ] @@ -7463,6 +7405,16 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "web-time" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + [[package]] name = "webpki-roots" version = "0.26.0" @@ -7552,6 +7504,15 @@ dependencies = [ "windows-targets 0.48.5", ] +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.6", +] + [[package]] name = "windows-targets" version = "0.42.2" @@ -7582,6 +7543,22 @@ dependencies = [ "windows_x86_64_msvc 0.48.5", ] +[[package]] +name = "windows-targets" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" +dependencies = [ + "windows_aarch64_gnullvm 0.52.6", + "windows_aarch64_msvc 0.52.6", + "windows_i686_gnu 0.52.6", + "windows_i686_gnullvm", + "windows_i686_msvc 0.52.6", + "windows_x86_64_gnu 0.52.6", + "windows_x86_64_gnullvm 0.52.6", + "windows_x86_64_msvc 0.52.6", +] + [[package]] name = "windows_aarch64_gnullvm" version = "0.42.2" @@ -7594,6 +7571,12 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" + [[package]] name = "windows_aarch64_msvc" version = "0.42.2" @@ -7606,6 +7589,12 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" + [[package]] name = "windows_i686_gnu" version = "0.42.2" @@ -7618,6 +7607,18 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" +[[package]] +name = "windows_i686_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" + [[package]] name = "windows_i686_msvc" version = "0.42.2" @@ -7630,6 +7631,12 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" +[[package]] +name = "windows_i686_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" + [[package]] name = "windows_x86_64_gnu" version = "0.42.2" @@ -7642,6 +7649,12 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" + [[package]] name = "windows_x86_64_gnullvm" version = "0.42.2" @@ -7654,6 +7667,12 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" + [[package]] name = "windows_x86_64_msvc" version = "0.42.2" @@ -7666,6 +7685,12 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" + [[package]] name = "winnow" version = "0.5.17" @@ -8140,6 +8165,7 @@ dependencies = [ "zksync_concurrency", "zksync_consensus_utils", "zksync_crypto_primitives", + "zksync_vlog", ] [[package]] @@ -8328,7 +8354,7 @@ name = "zksync_contract_verification_server" version = "0.1.0" dependencies = [ "anyhow", - "axum 0.7.5", + "axum", "serde", "serde_json", "tokio", @@ -8809,7 +8835,7 @@ dependencies = [ "anyhow", "assert_matches", "async-trait", - "axum 0.7.5", + "axum", "futures 0.3.28", "itertools 0.10.5", "once_cell", @@ -8887,7 +8913,7 @@ dependencies = [ "anyhow", "assert_matches", "async-trait", - "axum 0.7.5", + "axum", "chrono", "futures 0.3.28", "governor", @@ -9204,7 +9230,7 @@ name = "zksync_proof_data_handler" version = "0.1.0" dependencies = [ "anyhow", - "axum 0.7.5", + "axum", "chrono", "hyper 1.3.1", "serde_json", @@ -9656,13 +9682,17 @@ dependencies = [ "opentelemetry", "opentelemetry-otlp", "opentelemetry-semantic-conventions", + "opentelemetry_sdk", "sentry", "serde", "serde_json", + "thiserror", + "time", "tokio", "tracing", "tracing-opentelemetry", "tracing-subscriber", + "url", "vise", "vise-exporter", ] diff --git a/Cargo.toml b/Cargo.toml index 7b6ac30be8f1..49b2cc50c567 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -139,9 +139,10 @@ num = "0.4.0" num_cpus = "1.13" num_enum = "0.7.2" once_cell = "1" -opentelemetry = "0.20.0" -opentelemetry-otlp = "0.13.0" -opentelemetry-semantic-conventions = "0.12.0" +opentelemetry = "0.24.0" +opentelemetry_sdk = "0.24.0" +opentelemetry-otlp = "0.17.0" +opentelemetry-semantic-conventions = "0.16.0" pin-project-lite = "0.2.13" pretty_assertions = "1" prost = "0.12.1" @@ -179,7 +180,8 @@ tower = "0.4.13" tower-http = "0.5.2" tracing = "0.1" tracing-subscriber = "0.3" -tracing-opentelemetry = "0.21.0" +tracing-opentelemetry = "0.25.0" +time = "0.3.36" # Has to be same as used by `tracing-subscriber` url = "2" web3 = "0.19.0" fraction = "0.15.3" diff --git a/core/bin/block_reverter/Cargo.toml b/core/bin/block_reverter/Cargo.toml index 9ac7a49335c4..2144fcffdddd 100644 --- a/core/bin/block_reverter/Cargo.toml +++ b/core/bin/block_reverter/Cargo.toml @@ -12,7 +12,7 @@ categories.workspace = true publish = false [dependencies] -zksync_config.workspace = true +zksync_config = { workspace = true, features = ["observability_ext"] } zksync_core_leftovers.workspace = true zksync_env_config.workspace = true zksync_dal.workspace = true diff --git a/core/bin/block_reverter/src/main.rs b/core/bin/block_reverter/src/main.rs index 513de522aa40..1be006cfb3d1 100644 --- a/core/bin/block_reverter/src/main.rs +++ b/core/bin/block_reverter/src/main.rs @@ -106,21 +106,19 @@ async fn main() -> anyhow::Result<()> { let opts = Cli::parse(); let observability_config = ObservabilityConfig::from_env().context("ObservabilityConfig::from_env()")?; - let log_format: zksync_vlog::LogFormat = observability_config - .log_format - .parse() - .context("Invalid log format")?; - let mut builder = zksync_vlog::ObservabilityBuilder::new() - .with_log_format(log_format) - .disable_default_logs(); // It's a CLI application, so we only need to show logs that were actually requested. - if let Some(sentry_url) = observability_config.sentry_url { - builder = builder - .with_sentry_url(&sentry_url) - .context("Invalid Sentry URL")? - .with_sentry_environment(observability_config.sentry_environment); - } - let _guard = builder.build(); + let logs = zksync_vlog::Logs::try_from(observability_config.clone()) + .context("logs")? + .disable_default_logs(); // It's a CLI application, so we only need to show logs that were actually requested.; + let sentry: Option = + TryFrom::try_from(observability_config.clone()).context("sentry")?; + let opentelemetry: Option = + TryFrom::try_from(observability_config.clone()).context("opentelemetry")?; + let _guard = zksync_vlog::ObservabilityBuilder::new() + .with_logs(Some(logs)) + .with_sentry(sentry) + .with_opentelemetry(opentelemetry) + .build(); let general_config: Option = if let Some(path) = opts.config_path { let yaml = std::fs::read_to_string(&path).with_context(|| path.display().to_string())?; diff --git a/core/bin/contract-verifier/Cargo.toml b/core/bin/contract-verifier/Cargo.toml index d57b44f046cc..f088c2337e71 100644 --- a/core/bin/contract-verifier/Cargo.toml +++ b/core/bin/contract-verifier/Cargo.toml @@ -13,7 +13,7 @@ publish = false [dependencies] zksync_dal.workspace = true zksync_env_config.workspace = true -zksync_config.workspace = true +zksync_config = { workspace = true, features = ["observability_ext"] } zksync_contract_verifier_lib.workspace = true zksync_queued_job_processor.workspace = true zksync_utils.workspace = true diff --git a/core/bin/contract-verifier/src/main.rs b/core/bin/contract-verifier/src/main.rs index fe33a34a7583..36640049e446 100644 --- a/core/bin/contract-verifier/src/main.rs +++ b/core/bin/contract-verifier/src/main.rs @@ -146,25 +146,8 @@ async fn main() -> anyhow::Result<()> { let observability_config = general_config .observability .context("ObservabilityConfig")?; - let log_format: zksync_vlog::LogFormat = observability_config - .log_format - .parse() - .context("Invalid log format")?; - let mut builder = zksync_vlog::ObservabilityBuilder::new().with_log_format(log_format); - if let Some(sentry_url) = &observability_config.sentry_url { - builder = builder - .with_sentry_url(sentry_url) - .expect("Invalid Sentry URL") - .with_sentry_environment(observability_config.sentry_environment); - } - let _guard = builder.build(); - // Report whether sentry is running after the logging subsystem was initialized. - if let Some(sentry_url) = observability_config.sentry_url { - tracing::info!("Sentry configured with URL: {sentry_url}"); - } else { - tracing::info!("No sentry URL was provided"); - } + let _observability_guard = observability_config.install()?; let (stop_sender, stop_receiver) = watch::channel(false); let (stop_signal_sender, mut stop_signal_receiver) = mpsc::channel(256); diff --git a/core/bin/external_node/src/config/observability.rs b/core/bin/external_node/src/config/observability.rs index 4cd4efe0df04..0dd83f3bd35b 100644 --- a/core/bin/external_node/src/config/observability.rs +++ b/core/bin/external_node/src/config/observability.rs @@ -3,7 +3,7 @@ use std::{collections::HashMap, time::Duration}; use anyhow::Context as _; use serde::Deserialize; use zksync_config::configs::GeneralConfig; -use zksync_vlog::{prometheus::PrometheusExporterConfig, LogFormat}; +use zksync_vlog::{logs::LogFormat, prometheus::PrometheusExporterConfig}; use super::{ConfigurationSource, Environment}; @@ -81,26 +81,24 @@ impl ObservabilityENConfig { } pub fn build_observability(&self) -> anyhow::Result { - let mut builder = zksync_vlog::ObservabilityBuilder::new().with_log_format(self.log_format); - if let Some(log_directives) = self.log_directives.clone() { - builder = builder.with_log_directives(log_directives) - }; + let logs = zksync_vlog::Logs::from(self.log_format) + .with_log_directives(self.log_directives.clone()); + // Some legacy deployments use `unset` as an equivalent of `None`. let sentry_url = self.sentry_url.as_deref().filter(|&url| url != "unset"); - if let Some(sentry_url) = sentry_url { - builder = builder - .with_sentry_url(sentry_url) - .context("Invalid Sentry URL")? - .with_sentry_environment(self.sentry_environment.clone()); - } - let guard = builder.build(); - - // Report whether sentry is running after the logging subsystem was initialized. - if let Some(sentry_url) = sentry_url { - tracing::info!("Sentry configured with URL: {sentry_url}"); - } else { - tracing::info!("No sentry URL was provided"); - } + let sentry = sentry_url + .map(|url| { + anyhow::Ok( + zksync_vlog::Sentry::new(url) + .context("Invalid Sentry URL")? + .with_environment(self.sentry_environment.clone()), + ) + }) + .transpose()?; + let guard = zksync_vlog::ObservabilityBuilder::new() + .with_logs(Some(logs)) + .with_sentry(sentry) + .build(); Ok(guard) } diff --git a/core/bin/external_node/src/config/tests.rs b/core/bin/external_node/src/config/tests.rs index 1b42b98a32a4..43210a765723 100644 --- a/core/bin/external_node/src/config/tests.rs +++ b/core/bin/external_node/src/config/tests.rs @@ -42,12 +42,12 @@ fn parsing_observability_config() { assert_eq!(config.prometheus_port, Some(3322)); assert_eq!(config.sentry_url.unwrap(), "https://example.com/"); assert_eq!(config.sentry_environment.unwrap(), "mainnet - mainnet2"); - assert_matches!(config.log_format, zksync_vlog::LogFormat::Plain); + assert_matches!(config.log_format, zksync_vlog::logs::LogFormat::Plain); assert_eq!(config.prometheus_push_interval_ms, 10_000); env_vars.0.insert("MISC_LOG_FORMAT", "json"); let config = ObservabilityENConfig::new(&env_vars).unwrap(); - assert_matches!(config.log_format, zksync_vlog::LogFormat::Json); + assert_matches!(config.log_format, zksync_vlog::logs::LogFormat::Json); // If both the canonical and obsolete vars are specified, the canonical one should prevail. env_vars.0.insert("EN_LOG_FORMAT", "plain"); @@ -55,7 +55,7 @@ fn parsing_observability_config() { .0 .insert("EN_SENTRY_URL", "https://example.com/new"); let config = ObservabilityENConfig::new(&env_vars).unwrap(); - assert_matches!(config.log_format, zksync_vlog::LogFormat::Plain); + assert_matches!(config.log_format, zksync_vlog::logs::LogFormat::Plain); assert_eq!(config.sentry_url.unwrap(), "https://example.com/new"); } diff --git a/core/bin/external_node/src/main.rs b/core/bin/external_node/src/main.rs index f6696d733482..00ce3cc71e87 100644 --- a/core/bin/external_node/src/main.rs +++ b/core/bin/external_node/src/main.rs @@ -824,6 +824,8 @@ async fn main() -> anyhow::Result<()> { if !opt.enable_consensus { config.consensus = None; } + // Note: when old code will be removed, observability must be build within + // tokio context. let _guard = config.observability.build_observability()?; // Build L1 and L2 clients. @@ -856,7 +858,7 @@ async fn main() -> anyhow::Result<()> { // We run the node from a different thread, since the current thread is in tokio context. std::thread::spawn(move || { let node = - ExternalNodeBuilder::new(config).build(opt.components.0.into_iter().collect())?; + ExternalNodeBuilder::new(config)?.build(opt.components.0.into_iter().collect())?; node.run()?; anyhow::Ok(()) }) diff --git a/core/bin/external_node/src/node_builder.rs b/core/bin/external_node/src/node_builder.rs index ff851999f623..d9e216c84dd2 100644 --- a/core/bin/external_node/src/node_builder.rs +++ b/core/bin/external_node/src/node_builder.rs @@ -69,11 +69,11 @@ pub(crate) struct ExternalNodeBuilder { } impl ExternalNodeBuilder { - pub fn new(config: ExternalNodeConfig) -> Self { - Self { - node: ZkStackServiceBuilder::new(), + pub fn new(config: ExternalNodeConfig) -> anyhow::Result { + Ok(Self { + node: ZkStackServiceBuilder::new().context("Cannot create ZkStackServiceBuilder")?, config, - } + }) } fn add_sigint_handler_layer(mut self) -> anyhow::Result { @@ -587,7 +587,7 @@ impl ExternalNodeBuilder { } } - Ok(self.node.build()?) + Ok(self.node.build()) } } diff --git a/core/bin/external_node/src/tests/mod.rs b/core/bin/external_node/src/tests/mod.rs index e2b7edc174c4..2adf336728f0 100644 --- a/core/bin/external_node/src/tests/mod.rs +++ b/core/bin/external_node/src/tests/mod.rs @@ -28,7 +28,7 @@ async fn external_node_basics(components_str: &'static str) { let node_handle = tokio::task::spawn_blocking(move || { std::thread::spawn(move || { - let mut node = ExternalNodeBuilder::new(env.config); + let mut node = ExternalNodeBuilder::new(env.config)?; inject_test_layers( &mut node, env.sigint_receiver, @@ -97,7 +97,7 @@ async fn node_reacts_to_stop_signal_during_initial_reorg_detection() { let mut node_handle = tokio::task::spawn_blocking(move || { std::thread::spawn(move || { - let mut node = ExternalNodeBuilder::new(env.config); + let mut node = ExternalNodeBuilder::new(env.config)?; inject_test_layers( &mut node, env.sigint_receiver, @@ -133,7 +133,7 @@ async fn running_tree_without_core_is_not_allowed() { let node_handle = tokio::task::spawn_blocking(move || { std::thread::spawn(move || { - let mut node = ExternalNodeBuilder::new(env.config); + let mut node = ExternalNodeBuilder::new(env.config)?; inject_test_layers( &mut node, env.sigint_receiver, @@ -170,7 +170,7 @@ async fn running_tree_api_without_tree_is_not_allowed() { let node_handle = tokio::task::spawn_blocking(move || { std::thread::spawn(move || { - let mut node = ExternalNodeBuilder::new(env.config); + let mut node = ExternalNodeBuilder::new(env.config)?; inject_test_layers( &mut node, env.sigint_receiver, diff --git a/core/bin/merkle_tree_consistency_checker/Cargo.toml b/core/bin/merkle_tree_consistency_checker/Cargo.toml index 1399faec1d42..eb7dcd81a0dc 100644 --- a/core/bin/merkle_tree_consistency_checker/Cargo.toml +++ b/core/bin/merkle_tree_consistency_checker/Cargo.toml @@ -12,7 +12,7 @@ categories.workspace = true publish = false [dependencies] -zksync_config.workspace = true +zksync_config = { workspace = true, features = ["observability_ext"] } zksync_env_config.workspace = true zksync_merkle_tree.workspace = true zksync_types.workspace = true diff --git a/core/bin/merkle_tree_consistency_checker/src/main.rs b/core/bin/merkle_tree_consistency_checker/src/main.rs index f8584653681f..4218975e4865 100644 --- a/core/bin/merkle_tree_consistency_checker/src/main.rs +++ b/core/bin/merkle_tree_consistency_checker/src/main.rs @@ -54,18 +54,7 @@ impl Cli { fn main() -> anyhow::Result<()> { let observability_config = ObservabilityConfig::from_env().context("ObservabilityConfig::from_env()")?; - let log_format: zksync_vlog::LogFormat = observability_config - .log_format - .parse() - .context("Invalid log format")?; - let mut builder = zksync_vlog::ObservabilityBuilder::new().with_log_format(log_format); - if let Some(sentry_url) = observability_config.sentry_url { - builder = builder - .with_sentry_url(&sentry_url) - .context("Invalid Sentry URL")? - .with_sentry_environment(observability_config.sentry_environment); - } - let _guard = builder.build(); + let _observability_guard = observability_config.install()?; let db_config = DBConfig::from_env().context("DBConfig::from_env()")?; Cli::parse().run(&db_config) diff --git a/core/bin/snapshots_creator/Cargo.toml b/core/bin/snapshots_creator/Cargo.toml index 33b1fa82a857..530b9635cd4f 100644 --- a/core/bin/snapshots_creator/Cargo.toml +++ b/core/bin/snapshots_creator/Cargo.toml @@ -13,7 +13,7 @@ publish = false [dependencies] vise.workspace = true -zksync_config.workspace = true +zksync_config = { workspace = true, features = ["observability_ext"] } zksync_dal.workspace = true zksync_env_config.workspace = true zksync_types.workspace = true diff --git a/core/bin/snapshots_creator/src/main.rs b/core/bin/snapshots_creator/src/main.rs index e07a879746ad..bfdc17fefcd5 100644 --- a/core/bin/snapshots_creator/src/main.rs +++ b/core/bin/snapshots_creator/src/main.rs @@ -70,21 +70,9 @@ async fn main() -> anyhow::Result<()> { let observability_config = general_config .observability .context("observability config")?; - let log_format: zksync_vlog::LogFormat = observability_config - .log_format - .parse() - .context("Invalid log format")?; - + let _observability_guard = observability_config.install()?; let prometheus_exporter_task = maybe_enable_prometheus_metrics(general_config.prometheus_config, stop_receiver).await?; - let mut builder = zksync_vlog::ObservabilityBuilder::new().with_log_format(log_format); - if let Some(sentry_url) = observability_config.sentry_url { - builder = builder - .with_sentry_url(&sentry_url) - .context("Invalid Sentry URL")? - .with_sentry_environment(observability_config.sentry_environment); - } - let _guard = builder.build(); tracing::info!("Starting snapshots creator"); let creator_config = general_config diff --git a/core/bin/zksync_server/Cargo.toml b/core/bin/zksync_server/Cargo.toml index 5470f24010c1..72eff1384e2d 100644 --- a/core/bin/zksync_server/Cargo.toml +++ b/core/bin/zksync_server/Cargo.toml @@ -12,7 +12,7 @@ categories.workspace = true publish = false [dependencies] -zksync_config.workspace = true +zksync_config = { workspace = true, features = ["observability_ext"] } zksync_env_config.workspace = true zksync_eth_client.workspace = true zksync_protobuf_config.workspace = true diff --git a/core/bin/zksync_server/src/main.rs b/core/bin/zksync_server/src/main.rs index a59705b8e587..93cabfdfe6d6 100644 --- a/core/bin/zksync_server/src/main.rs +++ b/core/bin/zksync_server/src/main.rs @@ -113,36 +113,6 @@ fn main() -> anyhow::Result<()> { } }; - let observability_config = configs - .observability - .clone() - .context("observability config")?; - - let log_format: zksync_vlog::LogFormat = observability_config - .log_format - .parse() - .context("Invalid log format")?; - - let mut builder = zksync_vlog::ObservabilityBuilder::new().with_log_format(log_format); - if let Some(log_directives) = observability_config.log_directives { - builder = builder.with_log_directives(log_directives); - } - - if let Some(sentry_url) = &observability_config.sentry_url { - builder = builder - .with_sentry_url(sentry_url) - .expect("Invalid Sentry URL") - .with_sentry_environment(observability_config.sentry_environment); - } - let _guard = builder.build(); - - // Report whether sentry is running after the logging subsystem was initialized. - if let Some(sentry_url) = observability_config.sentry_url { - tracing::info!("Sentry configured with URL: {sentry_url}"); - } else { - tracing::info!("No sentry URL was provided"); - } - let wallets = match opt.wallets_path { None => tmp_config.wallets(), Some(path) => { @@ -186,8 +156,18 @@ fn main() -> anyhow::Result<()> { .context("failed decoding genesis YAML config")? } }; + let observability_config = configs + .observability + .clone() + .context("observability config")?; - let node = MainNodeBuilder::new(configs, wallets, genesis, contracts_config, secrets); + let node = MainNodeBuilder::new(configs, wallets, genesis, contracts_config, secrets)?; + + let _observability_guard = { + // Observability initialization should be performed within tokio context. + let _context_guard = node.runtime_handle().enter(); + observability_config.install()? + }; if opt.genesis { // If genesis is requested, we don't need to run the node. diff --git a/core/bin/zksync_server/src/node_builder.rs b/core/bin/zksync_server/src/node_builder.rs index 0eaa9b651f64..3da14c920886 100644 --- a/core/bin/zksync_server/src/node_builder.rs +++ b/core/bin/zksync_server/src/node_builder.rs @@ -92,15 +92,19 @@ impl MainNodeBuilder { genesis_config: GenesisConfig, contracts_config: ContractsConfig, secrets: Secrets, - ) -> Self { - Self { - node: ZkStackServiceBuilder::new(), + ) -> anyhow::Result { + Ok(Self { + node: ZkStackServiceBuilder::new().context("Cannot create ZkStackServiceBuilder")?, configs, wallets, genesis_config, contracts_config, secrets, - } + }) + } + + pub fn runtime_handle(&self) -> tokio::runtime::Handle { + self.node.runtime_handle() } fn add_sigint_handler_layer(mut self) -> anyhow::Result { @@ -589,7 +593,7 @@ impl MainNodeBuilder { .add_query_eth_client_layer()? .add_storage_initialization_layer(LayerKind::Task)?; - Ok(self.node.build()?) + Ok(self.node.build()) } /// Builds the node with the specified components. @@ -701,7 +705,7 @@ impl MainNodeBuilder { } } } - Ok(self.node.build()?) + Ok(self.node.build()) } } diff --git a/core/bin/zksync_tee_prover/Cargo.toml b/core/bin/zksync_tee_prover/Cargo.toml index 037833b1890e..0c89971fd305 100644 --- a/core/bin/zksync_tee_prover/Cargo.toml +++ b/core/bin/zksync_tee_prover/Cargo.toml @@ -23,7 +23,7 @@ tracing.workspace = true url.workspace = true vise.workspace = true zksync_basic_types.workspace = true -zksync_config.workspace = true +zksync_config = { workspace = true, features = ["observability_ext"] } zksync_env_config.workspace = true zksync_node_framework.workspace = true zksync_prover_interface.workspace = true diff --git a/core/bin/zksync_tee_prover/src/main.rs b/core/bin/zksync_tee_prover/src/main.rs index b6c311cb55de..174e2108242d 100644 --- a/core/bin/zksync_tee_prover/src/main.rs +++ b/core/bin/zksync_tee_prover/src/main.rs @@ -30,26 +30,15 @@ mod tee_prover; fn main() -> anyhow::Result<()> { let observability_config = ObservabilityConfig::from_env().context("ObservabilityConfig::from_env()")?; - let log_format: zksync_vlog::LogFormat = observability_config - .log_format - .parse() - .context("Invalid log format")?; - let mut builder = zksync_vlog::ObservabilityBuilder::new().with_log_format(log_format); - if let Some(sentry_url) = observability_config.sentry_url { - builder = builder - .with_sentry_url(&sentry_url) - .context("Invalid Sentry URL")? - .with_sentry_environment(observability_config.sentry_environment); - } - let _guard = builder.build(); + let _observability_guard = observability_config.install()?; let tee_prover_config = TeeProverConfig::from_env()?; let attestation_quote_bytes = std::fs::read(tee_prover_config.attestation_quote_file_path)?; let prometheus_config = PrometheusConfig::from_env()?; - let mut builder = ZkStackServiceBuilder::new(); - let mut builder_mut = builder + let mut builder = ZkStackServiceBuilder::new()?; + builder .add_layer(SigintHandlerLayer) .add_layer(TeeProverLayer::new( tee_prover_config.api_url, @@ -61,9 +50,9 @@ fn main() -> anyhow::Result<()> { if let Some(gateway) = prometheus_config.gateway_endpoint() { let exporter_config = PrometheusExporterConfig::push(gateway, prometheus_config.push_interval()); - builder_mut = builder_mut.add_layer(PrometheusExporterLayer(exporter_config)); + builder.add_layer(PrometheusExporterLayer(exporter_config)); } - builder_mut.build()?.run()?; + builder.build().run()?; Ok(()) } diff --git a/core/lib/config/Cargo.toml b/core/lib/config/Cargo.toml index b1a2a0ef1e8f..784bdebfef07 100644 --- a/core/lib/config/Cargo.toml +++ b/core/lib/config/Cargo.toml @@ -15,9 +15,14 @@ zksync_basic_types.workspace = true zksync_crypto_primitives.workspace = true zksync_consensus_utils.workspace = true zksync_concurrency.workspace = true +zksync_vlog = { workspace = true, optional = true } url.workspace = true anyhow.workspace = true rand.workspace = true secrecy.workspace = true serde = { workspace = true, features = ["derive"] } + +[features] +default = [] +observability_ext = ["zksync_vlog"] diff --git a/core/lib/config/src/configs/observability.rs b/core/lib/config/src/configs/observability.rs index 96754e61e897..0bc61df31197 100644 --- a/core/lib/config/src/configs/observability.rs +++ b/core/lib/config/src/configs/observability.rs @@ -11,7 +11,7 @@ pub struct ObservabilityConfig { /// Format of the logs as expected by the `vlog` crate. /// Currently must be either `plain` or `json`. pub log_format: String, - // Log directives in format that is used in `RUST_LOG` + /// Log directives in format that is used in `RUST_LOG` pub log_directives: Option, } diff --git a/core/lib/config/src/lib.rs b/core/lib/config/src/lib.rs index 91b5c6d480e3..c5944e581a97 100644 --- a/core/lib/config/src/lib.rs +++ b/core/lib/config/src/lib.rs @@ -8,3 +8,6 @@ pub use crate::configs::{ pub mod configs; pub mod testonly; + +#[cfg(feature = "observability_ext")] +mod observability_ext; diff --git a/core/lib/config/src/observability_ext.rs b/core/lib/config/src/observability_ext.rs new file mode 100644 index 000000000000..5f8a8927efd5 --- /dev/null +++ b/core/lib/config/src/observability_ext.rs @@ -0,0 +1,52 @@ +//! Extensions for the `ObservabilityConfig` to install the observability stack. + +use crate::configs::ObservabilityConfig; + +impl ObservabilityConfig { + /// Installs the observability stack based on the configuration. + /// + /// If any overrides are needed, consider using the `TryFrom` implementations. + pub fn install(self) -> anyhow::Result { + let logs = zksync_vlog::Logs::try_from(self.clone())?; + let sentry = Option::::try_from(self.clone())?; + let opentelemetry = Option::::try_from(self.clone())?; + + let guard = zksync_vlog::ObservabilityBuilder::new() + .with_logs(Some(logs)) + .with_sentry(sentry) + .with_opentelemetry(opentelemetry) + .build(); + Ok(guard) + } +} + +impl TryFrom for zksync_vlog::Logs { + type Error = anyhow::Error; + + fn try_from(config: ObservabilityConfig) -> Result { + Ok(zksync_vlog::Logs::new(&config.log_format)?.with_log_directives(config.log_directives)) + } +} + +impl TryFrom for Option { + type Error = anyhow::Error; + + fn try_from(config: ObservabilityConfig) -> Result { + Ok(config + .sentry_url + .map(|url| zksync_vlog::Sentry::new(&url)) + .transpose()? + .map(|sentry| sentry.with_environment(config.sentry_environment))) + } +} + +impl TryFrom for Option { + type Error = anyhow::Error; + + fn try_from(config: ObservabilityConfig) -> Result { + Ok(config + .opentelemetry + .map(|config| zksync_vlog::OpenTelemetry::new(&config.level, config.endpoint)) + .transpose()?) + } +} diff --git a/core/lib/utils/src/wait_for_tasks.rs b/core/lib/utils/src/wait_for_tasks.rs index ab548bdd1dde..210d26484162 100644 --- a/core/lib/utils/src/wait_for_tasks.rs +++ b/core/lib/utils/src/wait_for_tasks.rs @@ -47,14 +47,20 @@ impl ManagedTasks { let err = "One of the actors finished its run, while it wasn't expected to do it"; tracing::error!("{err}"); - zksync_vlog::capture_message(err, zksync_vlog::AlertLevel::Warning); + zksync_vlog::sentry::capture_message( + err, + zksync_vlog::sentry::AlertLevel::Warning, + ); } } Ok(Err(err)) => { let err = format!("One of the tokio actors unexpectedly finished with error: {err:#}"); tracing::error!("{err}"); - zksync_vlog::capture_message(&err, zksync_vlog::AlertLevel::Warning); + zksync_vlog::sentry::capture_message( + &err, + zksync_vlog::sentry::AlertLevel::Warning, + ); } Err(error) => { let panic_message = try_extract_panic_message(error); diff --git a/core/lib/vlog/Cargo.toml b/core/lib/vlog/Cargo.toml index eb1ed735519c..3f9ce247442a 100644 --- a/core/lib/vlog/Cargo.toml +++ b/core/lib/vlog/Cargo.toml @@ -22,10 +22,13 @@ tracing-subscriber = { workspace = true, features = [ "json", ] } tracing-opentelemetry.workspace = true +time.workspace = true +thiserror.workspace = true sentry.workspace = true serde.workspace = true serde_json.workspace = true -opentelemetry = { workspace = true, features = ["rt-tokio", "trace"] } +opentelemetry = { workspace = true, features = ["trace"] } +opentelemetry_sdk = { workspace = true, features = [ "rt-tokio" ] } opentelemetry-otlp = { workspace = true, features = [ "http-proto", "reqwest-client", @@ -33,3 +36,4 @@ opentelemetry-otlp = { workspace = true, features = [ opentelemetry-semantic-conventions.workspace = true vise.workspace = true vise-exporter.workspace = true +url.workspace = true diff --git a/core/lib/vlog/src/lib.rs b/core/lib/vlog/src/lib.rs index 9b2886ba81d5..5633f20f5882 100644 --- a/core/lib/vlog/src/lib.rs +++ b/core/lib/vlog/src/lib.rs @@ -1,152 +1,23 @@ //! This crate contains the observability subsystem. //! It is responsible for providing a centralized interface for consistent observability configuration. -use std::{backtrace::Backtrace, borrow::Cow, panic::PanicInfo, str::FromStr}; +use ::sentry::ClientInitGuard; +use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt}; -// Temporary re-export of `sentry::capture_message` aiming to simplify the transition from `vlog` to using -// crates directly. -use opentelemetry::{ - sdk::{ - propagation::TraceContextPropagator, - trace::{self, RandomIdGenerator, Sampler, Tracer}, - Resource, - }, - KeyValue, -}; -use opentelemetry_otlp::WithExportConfig; -use opentelemetry_semantic_conventions::resource::SERVICE_NAME; -pub use sentry::{capture_message, Level as AlertLevel}; -use sentry::{types::Dsn, ClientInitGuard}; -use serde::{de::Error, Deserialize, Deserializer}; -use tracing_opentelemetry::OpenTelemetryLayer; -use tracing_subscriber::{ - filter::Filtered, - fmt, - layer::{Layered, SubscriberExt}, - registry::LookupSpan, - util::SubscriberInitExt, - EnvFilter, Layer, -}; +pub use crate::{logs::Logs, opentelemetry::OpenTelemetry, sentry::Sentry}; +pub mod logs; +pub mod opentelemetry; pub mod prometheus; - -type TracingLayer = - Layered, EnvFilter, Inner>, Inner>; - -/// Specifies the format of the logs in stdout. -#[derive(Debug, Clone, Copy, Default)] -pub enum LogFormat { - #[default] - Plain, - Json, -} - -impl std::fmt::Display for LogFormat { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - Self::Plain => f.write_str("plain"), - Self::Json => f.write_str("json"), - } - } -} - -#[derive(Debug)] -pub struct LogFormatError(&'static str); - -impl std::fmt::Display for LogFormatError { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{}", self.0) - } -} - -impl std::error::Error for LogFormatError {} - -impl FromStr for LogFormat { - type Err = LogFormatError; - - fn from_str(s: &str) -> Result { - match s { - "plain" => Ok(LogFormat::Plain), - "json" => Ok(LogFormat::Json), - _ => Err(LogFormatError("invalid log format")), - } - } -} - -impl<'de> Deserialize<'de> for LogFormat { - fn deserialize>(deserializer: D) -> Result { - let s = String::deserialize(deserializer)?; - s.parse::().map_err(D::Error::custom) - } -} - -// Doesn't define WARN and ERROR, because the highest verbosity of spans is INFO. -#[derive(Copy, Clone, Debug, Default)] -pub enum OpenTelemetryLevel { - #[default] - OFF, - INFO, - DEBUG, - TRACE, -} - -#[derive(Debug)] -pub struct OpenTelemetryLevelFormatError; - -impl std::fmt::Display for OpenTelemetryLevelFormatError { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "Invalid OpenTelemetry level format") - } -} - -impl std::error::Error for OpenTelemetryLevelFormatError {} - -impl FromStr for OpenTelemetryLevel { - type Err = OpenTelemetryLevelFormatError; - - fn from_str(s: &str) -> Result { - match s { - "off" => Ok(OpenTelemetryLevel::OFF), - "info" => Ok(OpenTelemetryLevel::INFO), - "debug" => Ok(OpenTelemetryLevel::DEBUG), - "trace" => Ok(OpenTelemetryLevel::TRACE), - _ => Err(OpenTelemetryLevelFormatError), - } - } -} - -impl std::fmt::Display for OpenTelemetryLevel { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - let str = match self { - OpenTelemetryLevel::OFF => "off", - OpenTelemetryLevel::INFO => "info", - OpenTelemetryLevel::DEBUG => "debug", - OpenTelemetryLevel::TRACE => "trace", - }; - write!(f, "{}", str) - } -} - -#[derive(Clone, Debug)] -pub struct OpenTelemetryOptions { - /// Enables export of span data of specified level (and above) using opentelemetry exporters. - pub opentelemetry_level: OpenTelemetryLevel, - /// Opentelemetry HTTP collector endpoint. - pub otlp_endpoint: String, - /// Logical service name to be used for exported events. See [`SERVICE_NAME`]. - pub service_name: String, -} +pub mod sentry; /// Builder for the observability subsystem. /// Currently capable of configuring logging output and sentry integration. #[derive(Debug, Default)] pub struct ObservabilityBuilder { - disable_default_logs: bool, - log_format: LogFormat, - log_directives: Option, - sentry_url: Option, - sentry_environment: Option, - opentelemetry_options: Option, + logs: Option, + opentelemetry_layer: Option, + sentry: Option, } /// Guard for the observability subsystem. @@ -167,235 +38,40 @@ impl ObservabilityBuilder { Self::default() } - /// Sets the log format. - /// Default is `LogFormat::Plain`. - pub fn with_log_format(mut self, log_format: LogFormat) -> Self { - self.log_format = log_format; + pub fn with_logs(mut self, logs: Option) -> Self { + self.logs = logs; self } - pub fn with_log_directives(mut self, log_level: String) -> Self { - self.log_directives = Some(log_level); + pub fn with_opentelemetry(mut self, opentelemetry: Option) -> Self { + self.opentelemetry_layer = opentelemetry; self } - /// Disables logs enabled by default. - /// May be used, for example, in interactive CLI applications, where the user may want to fully control - /// the verbosity. - pub fn disable_default_logs(mut self) -> Self { - self.disable_default_logs = true; - self - } - - /// Enables Sentry integration. - /// Returns an error if the provided Sentry URL is invalid. - pub fn with_sentry_url( - mut self, - sentry_url: &str, - ) -> Result { - let sentry_url = sentry_url.parse()?; - self.sentry_url = Some(sentry_url); - Ok(self) - } - - /// Sets the Sentry environment ID. - /// If not set, no environment will be provided in Sentry events. - pub fn with_sentry_environment(mut self, environment: Option) -> Self { - self.sentry_environment = environment; + pub fn with_sentry(mut self, sentry: Option) -> Self { + self.sentry = sentry; self } - pub fn with_opentelemetry( - mut self, - opentelemetry_level: &str, - otlp_endpoint: String, - service_name: String, - ) -> Result { - self.opentelemetry_options = Some(OpenTelemetryOptions { - opentelemetry_level: opentelemetry_level.parse()?, - otlp_endpoint, - service_name, - }); - Ok(self) - } - - fn add_opentelemetry_layer( - opentelemetry_level: OpenTelemetryLevel, - otlp_endpoint: String, - service_name: String, - subscriber: S, - ) -> TracingLayer - where - S: tracing::Subscriber + for<'span> LookupSpan<'span> + Send + Sync, - { - let filter = match opentelemetry_level { - OpenTelemetryLevel::OFF => EnvFilter::new("off"), - OpenTelemetryLevel::INFO => EnvFilter::new("info"), - OpenTelemetryLevel::DEBUG => EnvFilter::new("debug"), - OpenTelemetryLevel::TRACE => EnvFilter::new("trace"), - }; - // `otel::tracing` should be a level info to emit opentelemetry trace & span - // `otel` set to debug to log detected resources, configuration read and inferred - let filter = filter - .add_directive("otel::tracing=trace".parse().unwrap()) - .add_directive("otel=debug".parse().unwrap()); - - let resource = vec![KeyValue::new(SERVICE_NAME, service_name)]; - - let tracer = opentelemetry_otlp::new_pipeline() - .tracing() - .with_exporter( - opentelemetry_otlp::new_exporter() - .http() - .with_endpoint(otlp_endpoint), - ) - .with_trace_config( - trace::config() - .with_sampler(Sampler::AlwaysOn) - .with_id_generator(RandomIdGenerator::default()) - .with_resource(Resource::new(resource)), - ) - .install_batch(opentelemetry::runtime::Tokio) - .unwrap(); - - opentelemetry::global::set_text_map_propagator(TraceContextPropagator::new()); - let layer = tracing_opentelemetry::layer() - .with_tracer(tracer) - .with_filter(filter); - subscriber.with(layer) - } - - /// Builds a filter for the logs. - /// - /// Unless `disable_default_logs` was set, uses `zksync=info` as a default which is then merged - /// with user-defined directives. Provided directives can extend/override the default value. - /// - /// The provided default convers all the crates with a name starting with `zksync` (per `tracing` - /// [documentation][1]), which is a good enough default for any project. - /// - /// If `log_directives` are provided via `with_log_directives`, they will be used. - /// Otherwise, the value will be parsed from the environment variable `RUST_LOG`. - /// - /// [1]: https://docs.rs/tracing-subscriber/0.3.18/tracing_subscriber/filter/targets/struct.Targets.html#filtering-with-targets - fn build_filter(&self) -> EnvFilter { - let mut directives = if self.disable_default_logs { - "".to_string() - } else { - "zksync=info,".to_string() - }; - if let Some(log_directives) = &self.log_directives { - directives.push_str(log_directives); - } else if let Ok(env_directives) = std::env::var(EnvFilter::DEFAULT_ENV) { - directives.push_str(&env_directives); - }; - EnvFilter::new(directives) - } - /// Initializes the observability subsystem. pub fn build(self) -> ObservabilityGuard { - // Initialize logs. - let env_filter = self.build_filter(); - - match self.log_format { - LogFormat::Plain => { - let subscriber = tracing_subscriber::registry() - .with(env_filter) - .with(fmt::Layer::default()); - if let Some(opts) = self.opentelemetry_options { - let subscriber = Self::add_opentelemetry_layer( - opts.opentelemetry_level, - opts.otlp_endpoint, - opts.service_name, - subscriber, - ); - subscriber.init() - } else { - subscriber.init() - } - } - LogFormat::Json => { - let timer = tracing_subscriber::fmt::time::UtcTime::rfc_3339(); - let subscriber = tracing_subscriber::registry().with(env_filter).with( - fmt::Layer::default() - .with_file(true) - .with_line_number(true) - .with_timer(timer) - .json(), - ); - if let Some(opts) = self.opentelemetry_options { - let subscriber = Self::add_opentelemetry_layer( - opts.opentelemetry_level, - opts.otlp_endpoint, - opts.service_name, - subscriber, - ); - subscriber.init() - } else { - subscriber.init() - } - } - }; + let logs = self.logs.unwrap_or_default(); + logs.install_panic_hook(); - // Check whether we need to change the default panic handler. - // Note that this must happen before we initialize Sentry, since otherwise - // Sentry's panic handler will also invoke the default one, resulting in unformatted - // panic info being output to stderr. - if matches!(self.log_format, LogFormat::Json) { - // Remove any existing hook. We expect that no hook is set by default. - let _ = std::panic::take_hook(); - // Override the default panic handler to print the panic in JSON format. - std::panic::set_hook(Box::new(json_panic_handler)); - }; + // For now we use logs filter as a global filter for subscriber. + // Later we may want to enforce each layer to have its own filter. + let global_filter = logs.build_filter(); - // Initialize the Sentry. - let sentry_guard = if let Some(sentry_url) = self.sentry_url { - let options = sentry::ClientOptions { - release: sentry::release_name!(), - environment: self.sentry_environment.map(Cow::from), - attach_stacktrace: true, - ..Default::default() - }; + tracing_subscriber::registry() + .with(global_filter) + .with(logs.into_layer()) + .with(self.opentelemetry_layer.map(|layer| layer.into_layer())) + .init(); - Some(sentry::init((sentry_url, options))) - } else { - None - }; + let sentry_guard = self.sentry.map(|sentry| sentry.install()); ObservabilityGuard { _sentry_guard: sentry_guard, } } } - -fn json_panic_handler(panic_info: &PanicInfo) { - let backtrace = Backtrace::force_capture(); - let timestamp = chrono::Utc::now(); - let panic_message = if let Some(s) = panic_info.payload().downcast_ref::() { - s.as_str() - } else if let Some(s) = panic_info.payload().downcast_ref::<&str>() { - s - } else { - "Panic occurred without additional info" - }; - - let panic_location = panic_info - .location() - .map(|val| val.to_string()) - .unwrap_or_else(|| "Unknown location".to_owned()); - - let backtrace_str = backtrace.to_string(); - let timestamp_str = timestamp.format("%Y-%m-%dT%H:%M:%S%.fZ").to_string(); - - println!( - "{}", - serde_json::json!({ - "timestamp": timestamp_str, - "level": "CRITICAL", - "fields": { - "message": panic_message, - "location": panic_location, - "backtrace": backtrace_str, - } - }) - ); -} diff --git a/core/lib/vlog/src/logs/layer.rs b/core/lib/vlog/src/logs/layer.rs new file mode 100644 index 000000000000..3577487bea2e --- /dev/null +++ b/core/lib/vlog/src/logs/layer.rs @@ -0,0 +1,140 @@ +use tracing::{span, Subscriber}; +use tracing_subscriber::{fmt, registry::LookupSpan, Layer}; + +/// Implementation of statically typed logs layer, which can be either plain or JSON. +/// This is mostly required to avoid [boxing the layer][layer_box]. +/// +/// [layer_box]: https://docs.rs/tracing-subscriber/latest/tracing_subscriber/layer/trait.Layer.html#method.boxed +#[derive(Debug)] +pub enum LogsLayer { + Plain(fmt::Layer), + Json(JsonLayer), +} + +macro_rules! dispatch_layer { + ($self:ident.$method:ident($($arg:ident),*)) => { + match $self { + LogsLayer::Plain(layer) => layer.$method($($arg),*), + LogsLayer::Json(layer) => layer.$method($($arg),*), + } + }; +} + +// Implementation note: methods like `and_then`, `with_filter`, `with_timer`, etc. are not +// implemented because they wrap `Self`, so default implementation is sufficient. +impl tracing_subscriber::Layer for LogsLayer +where + S: Subscriber + for<'span> LookupSpan<'span> + Send + Sync, +{ + fn on_register_dispatch(&self, collector: &tracing::Dispatch) { + dispatch_layer!(self.on_register_dispatch(collector)); + } + + fn on_layer(&mut self, subscriber: &mut S) { + dispatch_layer!(self.on_layer(subscriber)); + } + + fn register_callsite( + &self, + metadata: &'static tracing::Metadata<'static>, + ) -> tracing::subscriber::Interest { + dispatch_layer!(self.register_callsite(metadata)) + } + + fn enabled( + &self, + metadata: &tracing::Metadata<'_>, + ctx: tracing_subscriber::layer::Context<'_, S>, + ) -> bool { + dispatch_layer!(self.enabled(metadata, ctx)) + } + + fn on_new_span( + &self, + attrs: &span::Attributes<'_>, + id: &span::Id, + ctx: tracing_subscriber::layer::Context<'_, S>, + ) { + dispatch_layer!(self.on_new_span(attrs, id, ctx)) + } + + fn on_record( + &self, + span: &span::Id, + values: &span::Record<'_>, + ctx: tracing_subscriber::layer::Context<'_, S>, + ) { + dispatch_layer!(self.on_record(span, values, ctx)) + } + + fn on_follows_from( + &self, + span: &span::Id, + follows: &span::Id, + ctx: tracing_subscriber::layer::Context<'_, S>, + ) { + dispatch_layer!(self.on_follows_from(span, follows, ctx)) + } + + fn event_enabled( + &self, + event: &tracing::Event<'_>, + ctx: tracing_subscriber::layer::Context<'_, S>, + ) -> bool { + dispatch_layer!(self.event_enabled(event, ctx)) + } + + fn on_event(&self, event: &tracing::Event<'_>, ctx: tracing_subscriber::layer::Context<'_, S>) { + dispatch_layer!(self.on_event(event, ctx)) + } + + fn on_enter(&self, id: &span::Id, ctx: tracing_subscriber::layer::Context<'_, S>) { + dispatch_layer!(self.on_enter(id, ctx)) + } + + fn on_exit(&self, id: &span::Id, ctx: tracing_subscriber::layer::Context<'_, S>) { + dispatch_layer!(self.on_exit(id, ctx)) + } + + fn on_close(&self, id: span::Id, ctx: tracing_subscriber::layer::Context<'_, S>) { + dispatch_layer!(self.on_close(id, ctx)) + } + + fn on_id_change( + &self, + old: &span::Id, + new: &span::Id, + ctx: tracing_subscriber::layer::Context<'_, S>, + ) { + dispatch_layer!(self.on_id_change(old, new, ctx)) + } + + fn boxed(self) -> Box + Send + Sync + 'static> + where + Self: Sized, + Self: Layer + Send + Sync + 'static, + S: Subscriber, + { + dispatch_layer!(self.boxed()) + } +} + +// I guess tracing types weren't supposed to be written, but we have to. +// If this type has to be changed, the easiest way to figure it out is to attempt +// constructing the object, e.g.: +// ``` +// let layer = fmt::Layer::default() +// .with_file(true) +// .with_line_number(true) +// .with_timer(timer) +// .json(); +// ``` +// Compiler will complain and tell the type for you. +type JsonLayer = tracing_subscriber::fmt::Layer< + S, + tracing_subscriber::fmt::format::JsonFields, + tracing_subscriber::fmt::format::Format< + tracing_subscriber::fmt::format::Json, + tracing_subscriber::fmt::time::UtcTime, + >, +>; diff --git a/core/lib/vlog/src/logs/mod.rs b/core/lib/vlog/src/logs/mod.rs new file mode 100644 index 000000000000..0ecf1c6d9f0b --- /dev/null +++ b/core/lib/vlog/src/logs/mod.rs @@ -0,0 +1,163 @@ +use std::{backtrace::Backtrace, panic::PanicInfo, str::FromStr}; + +use serde::Deserialize; +use tracing_subscriber::{fmt, registry::LookupSpan, EnvFilter, Layer}; + +mod layer; + +/// Specifies the format of the logs in stdout. +#[derive(Debug, Clone, Copy, Default, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum LogFormat { + #[default] + Plain, + Json, +} + +impl FromStr for LogFormat { + type Err = LogFormatError; + + fn from_str(s: &str) -> Result { + match s { + "plain" => Ok(Self::Plain), + "json" => Ok(Self::Json), + _ => Err(LogFormatError::InvalidFormat), + } + } +} + +#[derive(Debug, thiserror::Error)] +#[non_exhaustive] +pub enum LogFormatError { + #[error("Invalid log format")] + InvalidFormat, +} + +#[derive(Debug, Default)] +pub struct Logs { + format: LogFormat, + log_directives: Option, + disable_default_logs: bool, +} + +impl From for Logs { + fn from(format: LogFormat) -> Self { + Self { + format, + log_directives: None, + disable_default_logs: false, + } + } +} + +impl Logs { + pub fn new(format: &str) -> Result { + Ok(Self { + format: format.parse()?, + log_directives: None, + disable_default_logs: false, + }) + } + + /// Builds a filter for the logs. + /// + /// Unless `disable_default_logs` was set, uses `zksync=info` as a default which is then merged + /// with user-defined directives. Provided directives can extend/override the default value. + /// + /// The provided default convers all the crates with a name starting with `zksync` (per `tracing` + /// [documentation][1]), which is a good enough default for any project. + /// + /// If `log_directives` are provided via `with_log_directives`, they will be used. + /// Otherwise, the value will be parsed from the environment variable `RUST_LOG`. + /// + /// [1]: https://docs.rs/tracing-subscriber/0.3.18/tracing_subscriber/filter/targets/struct.Targets.html#filtering-with-targets + pub(super) fn build_filter(&self) -> EnvFilter { + let mut directives = if self.disable_default_logs { + "".to_string() + } else { + "zksync=info,".to_string() + }; + if let Some(log_directives) = &self.log_directives { + directives.push_str(log_directives); + } else if let Ok(env_directives) = std::env::var(EnvFilter::DEFAULT_ENV) { + directives.push_str(&env_directives); + }; + EnvFilter::new(directives) + } + + pub fn with_log_directives(mut self, log_directives: Option) -> Self { + self.log_directives = log_directives; + self + } + + pub fn disable_default_logs(mut self) -> Self { + self.disable_default_logs = true; + self + } + + pub fn install_panic_hook(&self) { + // Check whether we need to change the default panic handler. + // Note that this must happen before we initialize Sentry, since otherwise + // Sentry's panic handler will also invoke the default one, resulting in unformatted + // panic info being output to stderr. + if matches!(self.format, LogFormat::Json) { + // Remove any existing hook. We expect that no hook is set by default. + let _ = std::panic::take_hook(); + // Override the default panic handler to print the panic in JSON format. + std::panic::set_hook(Box::new(json_panic_handler)); + }; + } + + pub fn into_layer(self) -> impl Layer + where + S: tracing::Subscriber + for<'span> LookupSpan<'span> + Send + Sync, + { + let filter = self.build_filter(); + let layer = match self.format { + LogFormat::Plain => layer::LogsLayer::Plain(fmt::Layer::new()), + LogFormat::Json => { + let timer = tracing_subscriber::fmt::time::UtcTime::rfc_3339(); + let json_layer = fmt::Layer::default() + .with_file(true) + .with_line_number(true) + .with_timer(timer) + .json(); + layer::LogsLayer::Json(json_layer) + } + }; + layer.with_filter(filter) + } +} + +fn json_panic_handler(panic_info: &PanicInfo) { + let backtrace = Backtrace::force_capture(); + let timestamp = chrono::Utc::now(); + let panic_message = if let Some(s) = panic_info.payload().downcast_ref::() { + s.as_str() + } else if let Some(s) = panic_info.payload().downcast_ref::<&str>() { + s + } else { + "Panic occurred without additional info" + }; + + let panic_location = panic_info + .location() + .map(|val| val.to_string()) + .unwrap_or_else(|| "Unknown location".to_owned()); + + let backtrace_str = backtrace.to_string(); + let timestamp_str = timestamp.format("%Y-%m-%dT%H:%M:%S%.fZ").to_string(); + + println!( + "{}", + serde_json::json!({ + "timestamp": timestamp_str, + "level": "CRITICAL", + "fields": { + "message": panic_message, + "location": panic_location, + "backtrace": backtrace_str, + } + }) + ); +} diff --git a/core/lib/vlog/src/opentelemetry/mod.rs b/core/lib/vlog/src/opentelemetry/mod.rs new file mode 100644 index 000000000000..64049df8ce9b --- /dev/null +++ b/core/lib/vlog/src/opentelemetry/mod.rs @@ -0,0 +1,191 @@ +use std::str::FromStr; + +use opentelemetry::{trace::TracerProvider, KeyValue}; +use opentelemetry_otlp::WithExportConfig; +use opentelemetry_sdk::{ + propagation::TraceContextPropagator, + trace::{RandomIdGenerator, Sampler}, + Resource, +}; +use opentelemetry_semantic_conventions::resource::{ + K8S_NAMESPACE_NAME, K8S_POD_NAME, SERVICE_NAME, +}; +use tracing_subscriber::{registry::LookupSpan, EnvFilter, Layer}; +use url::Url; + +/// Information about the service. +#[derive(Debug, Default)] +#[non_exhaustive] +pub struct ServiceDescriptor { + /// Name of the k8s pod. + pub k8s_pod_name: Option, + /// Name of the k8s namespace. + pub k8s_namespace_name: Option, + /// Name of the service. + pub service_name: Option, +} + +impl ServiceDescriptor { + pub fn new() -> Self { + Self::default() + } + + pub fn with_k8s_pod_name(mut self, k8s_pod_name: Option) -> Self { + self.k8s_pod_name = k8s_pod_name; + self + } + + pub fn with_k8s_namespace_name(mut self, k8s_namespace_name: Option) -> Self { + self.k8s_namespace_name = k8s_namespace_name; + self + } + + pub fn with_service_name(mut self, service_name: Option) -> Self { + self.service_name = service_name; + self + } + + /// Tries to fill empty fields from environment variables. + /// + /// The following environment variables are used: + /// - `POD_NAME` + /// - `POD_NAMESPACE` + /// - `SERVICE_NAME` + pub fn fill_from_env(mut self) -> Self { + if self.k8s_pod_name.is_none() { + self.k8s_pod_name = std::env::var("POD_NAME").ok(); + } + if self.k8s_namespace_name.is_none() { + self.k8s_namespace_name = std::env::var("POD_NAMESPACE").ok(); + } + if self.service_name.is_none() { + self.service_name = std::env::var("SERVICE_NAME").ok(); + } + self + } + + fn into_otlp_resource(self) -> Resource { + let mut attributes = vec![]; + if let Some(pod_name) = self.k8s_pod_name { + attributes.push(KeyValue::new(K8S_POD_NAME, pod_name)); + } + if let Some(pod_namespace) = self.k8s_namespace_name { + attributes.push(KeyValue::new(K8S_NAMESPACE_NAME, pod_namespace)); + } + if let Some(service_name) = self.service_name { + attributes.push(KeyValue::new(SERVICE_NAME, service_name)); + } + Resource::new(attributes) + } +} + +#[derive(Debug)] +pub struct OpenTelemetry { + /// Enables export of span data of specified level (and above) using opentelemetry exporters. + pub opentelemetry_level: OpenTelemetryLevel, + /// Opentelemetry HTTP collector endpoint. + pub otlp_endpoint: Url, + /// Information about service + pub service: Option, +} + +impl OpenTelemetry { + pub fn new( + opentelemetry_level: &str, + otlp_endpoint: String, + ) -> Result { + Ok(Self { + opentelemetry_level: opentelemetry_level.parse()?, + otlp_endpoint: otlp_endpoint + .parse() + .map_err(|e| OpenTelemetryLayerError::InvalidUrl(otlp_endpoint, e))?, + service: None, + }) + } + + pub fn with_service_descriptor(mut self, service: ServiceDescriptor) -> Self { + self.service = Some(service); + self + } + + pub(super) fn into_layer(self) -> impl Layer + where + S: tracing::Subscriber + for<'span> LookupSpan<'span> + Send + Sync, + { + let filter = match self.opentelemetry_level { + OpenTelemetryLevel::OFF => EnvFilter::new("off"), + OpenTelemetryLevel::INFO => EnvFilter::new("info"), + OpenTelemetryLevel::DEBUG => EnvFilter::new("debug"), + OpenTelemetryLevel::TRACE => EnvFilter::new("trace"), + }; + // `otel::tracing` should be a level info to emit opentelemetry trace & span + // `otel` set to debug to log detected resources, configuration read and inferred + let filter = filter + .add_directive("otel::tracing=trace".parse().unwrap()) + .add_directive("otel=debug".parse().unwrap()); + + let service = self.service.unwrap_or_default().fill_from_env(); + let service_name = service + .service_name + .clone() + .unwrap_or_else(|| "zksync_vlog".to_string()); + let resource = service.into_otlp_resource(); + + let exporter = opentelemetry_otlp::new_exporter() + .http() + .with_endpoint(self.otlp_endpoint) + .build_span_exporter() + .expect("Failed to create OTLP exporter"); // URL is validated. + + let config = opentelemetry_sdk::trace::Config::default() + .with_id_generator(RandomIdGenerator::default()) + .with_sampler(Sampler::AlwaysOn) + .with_resource(resource); + + let provider = opentelemetry_sdk::trace::TracerProvider::builder() + .with_batch_exporter(exporter, opentelemetry_sdk::runtime::Tokio) + .with_config(config) + .build(); + + // TODO: Version and other metadata + let tracer = provider.tracer_builder(service_name).build(); + + opentelemetry::global::set_text_map_propagator(TraceContextPropagator::new()); + tracing_opentelemetry::layer() + .with_tracer(tracer) + .with_filter(filter) + } +} + +// Doesn't define WARN and ERROR, because the highest verbosity of spans is INFO. +#[derive(Copy, Clone, Debug, Default)] +pub enum OpenTelemetryLevel { + #[default] + OFF, + INFO, + DEBUG, + TRACE, +} + +#[derive(Debug, thiserror::Error)] +#[non_exhaustive] +pub enum OpenTelemetryLayerError { + #[error("Invalid OpenTelemetry level format")] + InvalidFormat, + #[error("Invalid URL: \"{0}\" - {1}")] + InvalidUrl(String, url::ParseError), +} + +impl FromStr for OpenTelemetryLevel { + type Err = OpenTelemetryLayerError; + + fn from_str(s: &str) -> Result { + match s { + "off" => Ok(OpenTelemetryLevel::OFF), + "info" => Ok(OpenTelemetryLevel::INFO), + "debug" => Ok(OpenTelemetryLevel::DEBUG), + "trace" => Ok(OpenTelemetryLevel::TRACE), + _ => Err(OpenTelemetryLayerError::InvalidFormat), + } + } +} diff --git a/core/lib/vlog/src/sentry.rs b/core/lib/vlog/src/sentry.rs new file mode 100644 index 000000000000..5102efd9a1a7 --- /dev/null +++ b/core/lib/vlog/src/sentry.rs @@ -0,0 +1,38 @@ +use std::borrow::Cow; + +// Temporary re-export of `sentry::capture_message` aiming to simplify the transition from `vlog` to using +// crates directly. +pub use sentry::{capture_message, Level as AlertLevel}; +use sentry::{types::Dsn, ClientInitGuard}; + +#[derive(Debug)] +pub struct Sentry { + url: Dsn, + environment: Option, +} + +impl Sentry { + pub fn new(url: &str) -> Result { + Ok(Self { + url: url.parse()?, + environment: None, + }) + } + + pub fn with_environment(mut self, environment: Option) -> Self { + self.environment = environment; + self + } + + pub fn install(self) -> ClientInitGuard { + // Initialize the Sentry. + let options = sentry::ClientOptions { + release: sentry::release_name!(), + environment: self.environment.map(Cow::from), + attach_stacktrace: true, + ..Default::default() + }; + + sentry::init((self.url, options)) + } +} diff --git a/core/node/node_framework/examples/main_node.rs b/core/node/node_framework/examples/main_node.rs deleted file mode 100644 index 38f989bda85f..000000000000 --- a/core/node/node_framework/examples/main_node.rs +++ /dev/null @@ -1,419 +0,0 @@ -//! An incomplete example of how node initialization looks like. -//! This example defines a `ResourceProvider` that works using the main node env config, and -//! initializes a single task with a health check server. - -use anyhow::Context; -use zksync_config::{ - configs::{ - chain::{ - CircuitBreakerConfig, MempoolConfig, NetworkConfig, OperationsManagerConfig, - StateKeeperConfig, - }, - fri_prover_group::FriProverGroupConfig, - house_keeper::HouseKeeperConfig, - wallets::Wallets, - DatabaseSecrets, FriProofCompressorConfig, FriProverConfig, FriWitnessGeneratorConfig, - L1Secrets, ObservabilityConfig, ProofDataHandlerConfig, - }, - ApiConfig, ContractVerifierConfig, ContractsConfig, DBConfig, EthConfig, EthWatchConfig, - GasAdjusterConfig, GenesisConfig, ObjectStoreConfig, PostgresConfig, -}; -use zksync_env_config::FromEnv; -use zksync_metadata_calculator::MetadataCalculatorConfig; -use zksync_node_api_server::{ - tx_sender::{ApiContracts, TxSenderConfig}, - web3::{state::InternalApiConfig, Namespace}, -}; -use zksync_node_framework::{ - implementations::layers::{ - circuit_breaker_checker::CircuitBreakerCheckerLayer, - commitment_generator::CommitmentGeneratorLayer, - contract_verification_api::ContractVerificationApiLayer, - eth_sender::{EthTxAggregatorLayer, EthTxManagerLayer}, - eth_watch::EthWatchLayer, - healtcheck_server::HealthCheckLayer, - house_keeper::HouseKeeperLayer, - l1_gas::SequencerL1GasLayer, - metadata_calculator::MetadataCalculatorLayer, - object_store::ObjectStoreLayer, - pk_signing_eth_client::PKSigningEthClientLayer, - pools_layer::PoolsLayerBuilder, - proof_data_handler::ProofDataHandlerLayer, - query_eth_client::QueryEthClientLayer, - sigint::SigintHandlerLayer, - state_keeper::{ - main_batch_executor::MainBatchExecutorLayer, mempool_io::MempoolIOLayer, - output_handler::OutputHandlerLayer, StateKeeperLayer, - }, - web3_api::{ - caches::MempoolCacheLayer, - server::{Web3ServerLayer, Web3ServerOptionalConfig}, - tree_api_client::TreeApiClientLayer, - tx_sender::{PostgresStorageCachesConfig, TxSenderLayer}, - tx_sink::MasterPoolSinkLayer, - }, - }, - service::{ZkStackService, ZkStackServiceBuilder, ZkStackServiceError}, -}; -use zksync_state::RocksdbStorageOptions; - -struct MainNodeBuilder { - node: ZkStackServiceBuilder, -} - -impl MainNodeBuilder { - fn new() -> Self { - Self { - node: ZkStackServiceBuilder::new(), - } - } - - fn add_sigint_handler_layer(mut self) -> anyhow::Result { - self.node.add_layer(SigintHandlerLayer); - Ok(self) - } - - fn add_pools_layer(mut self) -> anyhow::Result { - let config = PostgresConfig::from_env()?; - let secrets = DatabaseSecrets::from_env()?; - let pools_layer = PoolsLayerBuilder::empty(config, secrets) - .with_master(true) - .with_replica(true) - .with_prover(true) - .build(); - self.node.add_layer(pools_layer); - Ok(self) - } - - fn add_pk_signing_client_layer(mut self) -> anyhow::Result { - let genesis = GenesisConfig::from_env()?; - let eth_config = EthConfig::from_env()?; - let wallets = Wallets::from_env()?; - self.node.add_layer(PKSigningEthClientLayer::new( - eth_config, - ContractsConfig::from_env()?, - genesis.l1_chain_id, - wallets.eth_sender.context("Eth sender configs")?, - )); - Ok(self) - } - - fn add_query_eth_client_layer(mut self) -> anyhow::Result { - let genesis = GenesisConfig::from_env()?; - let eth_config = L1Secrets::from_env()?; - let query_eth_client_layer = - QueryEthClientLayer::new(genesis.l1_chain_id, eth_config.l1_rpc_url); - self.node.add_layer(query_eth_client_layer); - Ok(self) - } - - fn add_sequencer_l1_gas_layer(mut self) -> anyhow::Result { - let gas_adjuster_config = GasAdjusterConfig::from_env()?; - let state_keeper_config = StateKeeperConfig::from_env()?; - let genesis_config = GenesisConfig::from_env()?; - let eth_sender_config = EthConfig::from_env()?; - let sequencer_l1_gas_layer = SequencerL1GasLayer::new( - gas_adjuster_config, - genesis_config, - state_keeper_config, - eth_sender_config - .sender - .context("eth_sender")? - .pubdata_sending_mode, - ); - self.node.add_layer(sequencer_l1_gas_layer); - Ok(self) - } - - fn add_object_store_layer(mut self) -> anyhow::Result { - let object_store_config = ObjectStoreConfig::from_env()?; - self.node - .add_layer(ObjectStoreLayer::new(object_store_config)); - Ok(self) - } - - fn add_metadata_calculator_layer(mut self) -> anyhow::Result { - let merkle_tree_env_config = DBConfig::from_env()?.merkle_tree; - let operations_manager_env_config = OperationsManagerConfig::from_env()?; - let state_keeper_env_config = StateKeeperConfig::from_env()?; - let metadata_calculator_config = MetadataCalculatorConfig::for_main_node( - &merkle_tree_env_config, - &operations_manager_env_config, - &state_keeper_env_config, - ); - self.node - .add_layer(MetadataCalculatorLayer::new(metadata_calculator_config)); - Ok(self) - } - - fn add_state_keeper_layer(mut self) -> anyhow::Result { - let wallets = Wallets::from_env()?; - let contracts_config = ContractsConfig::from_env()?; - let sk_config = StateKeeperConfig::from_env()?; - let persisence_layer = OutputHandlerLayer::new( - contracts_config.l2_shared_bridge_addr.unwrap(), - sk_config.l2_block_seal_queue_capacity, - ); - let mempool_io_layer = MempoolIOLayer::new( - NetworkConfig::from_env()?.zksync_network_id, - sk_config, - MempoolConfig::from_env()?, - wallets.state_keeper.context("State keeper wallets")?, - ); - let main_node_batch_executor_builder_layer = - MainBatchExecutorLayer::new(StateKeeperConfig::from_env()?.save_call_traces, true); - let db_config = DBConfig::from_env()?; - - let rocksdb_options = RocksdbStorageOptions { - block_cache_capacity: db_config - .experimental - .state_keeper_db_block_cache_capacity(), - max_open_files: db_config.experimental.state_keeper_db_max_open_files, - }; - let state_keeper_layer = - StateKeeperLayer::new(db_config.state_keeper_db_path, rocksdb_options); - self.node - .add_layer(persisence_layer) - .add_layer(mempool_io_layer) - .add_layer(main_node_batch_executor_builder_layer) - .add_layer(state_keeper_layer); - Ok(self) - } - - fn add_eth_watch_layer(mut self) -> anyhow::Result { - self.node.add_layer(EthWatchLayer::new( - EthWatchConfig::from_env()?, - ContractsConfig::from_env()?, - )); - Ok(self) - } - - fn add_proof_data_handler_layer(mut self) -> anyhow::Result { - let genesis_config = GenesisConfig::from_env()?; - self.node.add_layer(ProofDataHandlerLayer::new( - ProofDataHandlerConfig::from_env()?, - genesis_config.l1_batch_commit_data_generator_mode, - )); - Ok(self) - } - - fn add_healthcheck_layer(mut self) -> anyhow::Result { - let healthcheck_config = ApiConfig::from_env()?.healthcheck; - self.node.add_layer(HealthCheckLayer(healthcheck_config)); - Ok(self) - } - - fn add_tx_sender_layer(mut self) -> anyhow::Result { - let state_keeper_config = StateKeeperConfig::from_env()?; - let rpc_config = ApiConfig::from_env()?.web3_json_rpc; - let network_config = NetworkConfig::from_env()?; - let postgres_storage_caches_config = PostgresStorageCachesConfig { - factory_deps_cache_size: rpc_config.factory_deps_cache_size() as u64, - initial_writes_cache_size: rpc_config.initial_writes_cache_size() as u64, - latest_values_cache_size: rpc_config.latest_values_cache_size() as u64, - }; - let wallets = Wallets::from_env()?; - - // On main node we always use master pool sink. - self.node.add_layer(MasterPoolSinkLayer); - self.node.add_layer(TxSenderLayer::new( - TxSenderConfig::new( - &state_keeper_config, - &rpc_config, - wallets - .state_keeper - .context("StateKeeper wallets")? - .fee_account - .address(), - network_config.zksync_network_id, - ), - postgres_storage_caches_config, - rpc_config.vm_concurrency_limit(), - ApiContracts::load_from_disk_blocking(), // TODO (BFT-138): Allow to dynamically reload API contracts - )); - Ok(self) - } - - fn add_api_caches_layer(mut self) -> anyhow::Result { - let rpc_config = ApiConfig::from_env()?.web3_json_rpc; - self.node.add_layer(MempoolCacheLayer::new( - rpc_config.mempool_cache_size(), - rpc_config.mempool_cache_update_interval(), - )); - Ok(self) - } - - fn add_tree_api_client_layer(mut self) -> anyhow::Result { - let rpc_config = ApiConfig::from_env()?.web3_json_rpc; - self.node - .add_layer(TreeApiClientLayer::http(rpc_config.tree_api_url)); - Ok(self) - } - - fn add_http_web3_api_layer(mut self) -> anyhow::Result { - let rpc_config = ApiConfig::from_env()?.web3_json_rpc; - let contracts_config = ContractsConfig::from_env()?; - let state_keeper_config = StateKeeperConfig::from_env()?; - let with_debug_namespace = state_keeper_config.save_call_traces; - let genesis_config = GenesisConfig::from_env()?; - - let mut namespaces = Namespace::DEFAULT.to_vec(); - if with_debug_namespace { - namespaces.push(Namespace::Debug) - } - namespaces.push(Namespace::Snapshots); - - let optional_config = Web3ServerOptionalConfig { - namespaces: Some(namespaces), - filters_limit: Some(rpc_config.filters_limit()), - subscriptions_limit: Some(rpc_config.subscriptions_limit()), - batch_request_size_limit: Some(rpc_config.max_batch_request_size()), - response_body_size_limit: Some(rpc_config.max_response_body_size()), - ..Default::default() - }; - self.node.add_layer(Web3ServerLayer::http( - rpc_config.http_port, - InternalApiConfig::new(&rpc_config, &contracts_config, &genesis_config), - optional_config, - )); - - Ok(self) - } - - fn add_ws_web3_api_layer(mut self) -> anyhow::Result { - let rpc_config = ApiConfig::from_env()?.web3_json_rpc; - let contracts_config = ContractsConfig::from_env()?; - let genesis_config = GenesisConfig::from_env()?; - let state_keeper_config = StateKeeperConfig::from_env()?; - let circuit_breaker_config = CircuitBreakerConfig::from_env()?; - let with_debug_namespace = state_keeper_config.save_call_traces; - - let mut namespaces = Namespace::DEFAULT.to_vec(); - if with_debug_namespace { - namespaces.push(Namespace::Debug) - } - namespaces.push(Namespace::Snapshots); - - let optional_config = Web3ServerOptionalConfig { - namespaces: Some(namespaces), - filters_limit: Some(rpc_config.filters_limit()), - subscriptions_limit: Some(rpc_config.subscriptions_limit()), - batch_request_size_limit: Some(rpc_config.max_batch_request_size()), - response_body_size_limit: Some(rpc_config.max_response_body_size()), - websocket_requests_per_minute_limit: Some( - rpc_config.websocket_requests_per_minute_limit(), - ), - replication_lag_limit: circuit_breaker_config.replication_lag_limit(), - with_extended_tracing: rpc_config.extended_api_tracing, - ..Default::default() - }; - self.node.add_layer(Web3ServerLayer::ws( - rpc_config.ws_port, - InternalApiConfig::new(&rpc_config, &contracts_config, &genesis_config), - optional_config, - )); - - Ok(self) - } - fn add_eth_sender_layer(mut self) -> anyhow::Result { - let eth_sender_config = EthConfig::from_env()?; - let contracts_config = ContractsConfig::from_env()?; - let network_config = NetworkConfig::from_env()?; - let genesis_config = GenesisConfig::from_env()?; - - self.node.add_layer(EthTxAggregatorLayer::new( - eth_sender_config.clone(), - contracts_config, - network_config.zksync_network_id, - genesis_config.l1_batch_commit_data_generator_mode, - )); - self.node - .add_layer(EthTxManagerLayer::new(eth_sender_config)); - - Ok(self) - } - - fn add_house_keeper_layer(mut self) -> anyhow::Result { - let house_keeper_config = HouseKeeperConfig::from_env()?; - let fri_prover_config = FriProverConfig::from_env()?; - let fri_witness_generator_config = FriWitnessGeneratorConfig::from_env()?; - let fri_prover_group_config = FriProverGroupConfig::from_env()?; - let fri_proof_compressor_config = FriProofCompressorConfig::from_env()?; - - self.node.add_layer(HouseKeeperLayer::new( - house_keeper_config, - fri_prover_config, - fri_witness_generator_config, - fri_prover_group_config, - fri_proof_compressor_config, - )); - - Ok(self) - } - - fn add_commitment_generator_layer(mut self) -> anyhow::Result { - let genesis = GenesisConfig::from_env()?; - self.node.add_layer(CommitmentGeneratorLayer::new( - genesis.l1_batch_commit_data_generator_mode, - )); - - Ok(self) - } - - fn add_circuit_breaker_checker_layer(mut self) -> anyhow::Result { - let circuit_breaker_config = CircuitBreakerConfig::from_env()?; - self.node - .add_layer(CircuitBreakerCheckerLayer(circuit_breaker_config)); - - Ok(self) - } - - fn add_contract_verification_api_layer(mut self) -> anyhow::Result { - let config = ContractVerifierConfig::from_env()?; - self.node.add_layer(ContractVerificationApiLayer(config)); - Ok(self) - } - - fn build(mut self) -> Result { - self.node.build() - } -} - -fn main() -> anyhow::Result<()> { - let observability_config = - ObservabilityConfig::from_env().context("ObservabilityConfig::from_env()")?; - let log_format: zksync_vlog::LogFormat = observability_config - .log_format - .parse() - .context("Invalid log format")?; - let _guard = zksync_vlog::ObservabilityBuilder::new() - .with_log_format(log_format) - .build(); - - MainNodeBuilder::new() - .add_sigint_handler_layer()? - .add_pools_layer()? - .add_circuit_breaker_checker_layer()? - .add_query_eth_client_layer()? - .add_sequencer_l1_gas_layer()? - .add_object_store_layer()? - .add_metadata_calculator_layer()? - .add_state_keeper_layer()? - .add_eth_watch_layer()? - .add_pk_signing_client_layer()? - .add_eth_sender_layer()? - .add_proof_data_handler_layer()? - .add_healthcheck_layer()? - .add_tx_sender_layer()? - .add_tree_api_client_layer()? - .add_api_caches_layer()? - .add_http_web3_api_layer()? - .add_ws_web3_api_layer()? - .add_house_keeper_layer()? - .add_commitment_generator_layer()? - .add_contract_verification_api_layer()? - .build()? - .run()?; - - Ok(()) -} diff --git a/core/node/node_framework/examples/showcase.rs b/core/node/node_framework/examples/showcase.rs index 3dbb576c1935..1cdc5e2f5a19 100644 --- a/core/node/node_framework/examples/showcase.rs +++ b/core/node/node_framework/examples/showcase.rs @@ -251,10 +251,10 @@ impl WiringLayer for TasksLayer { } fn main() -> anyhow::Result<()> { - ZkStackServiceBuilder::new() - .add_layer(DatabaseLayer) - .add_layer(TasksLayer) - .build()? - .run()?; + let mut builder = ZkStackServiceBuilder::new()?; + + builder.add_layer(DatabaseLayer).add_layer(TasksLayer); + + builder.build().run()?; Ok(()) } diff --git a/core/node/node_framework/src/service/mod.rs b/core/node/node_framework/src/service/mod.rs index 22102a60efb7..f5019be01a79 100644 --- a/core/node/node_framework/src/service/mod.rs +++ b/core/node/node_framework/src/service/mod.rs @@ -36,17 +36,37 @@ mod tests; const TASK_SHUTDOWN_TIMEOUT: Duration = Duration::from_secs(30); /// A builder for [`ZkStackService`]. -#[derive(Default, Debug)] +#[derive(Debug)] pub struct ZkStackServiceBuilder { /// List of wiring layers. // Note: It has to be a `Vec` and not e.g. `HashMap` because the order in which we // iterate through it matters. layers: Vec<(&'static str, WireFn)>, + /// Tokio runtime used to spawn tasks. + runtime: Runtime, } impl ZkStackServiceBuilder { - pub fn new() -> Self { - Self { layers: Vec::new() } + /// Creates a new builder. + /// + /// Returns an error if called within a Tokio runtime context. + pub fn new() -> Result { + if tokio::runtime::Handle::try_current().is_ok() { + return Err(ZkStackServiceError::RuntimeDetected); + } + let runtime = tokio::runtime::Builder::new_multi_thread() + .enable_all() + .build() + .unwrap(); + Ok(Self { + layers: Vec::new(), + runtime, + }) + } + + /// Returns a handle to the Tokio runtime used by the service. + pub fn runtime_handle(&self) -> tokio::runtime::Handle { + self.runtime.handle().clone() } /// Adds a wiring layer. @@ -71,28 +91,17 @@ impl ZkStackServiceBuilder { } /// Builds the service. - /// - /// In case of errors during wiring phase, will return the list of all the errors that happened, in the order - /// of their occurrence. - pub fn build(&mut self) -> Result { - if tokio::runtime::Handle::try_current().is_ok() { - return Err(ZkStackServiceError::RuntimeDetected); - } - let runtime = tokio::runtime::Builder::new_multi_thread() - .enable_all() - .build() - .unwrap(); - + pub fn build(self) -> ZkStackService { let (stop_sender, _stop_receiver) = watch::channel(false); - Ok(ZkStackService { - layers: std::mem::take(&mut self.layers), + ZkStackService { + layers: self.layers, resources: Default::default(), runnables: Default::default(), stop_sender, - runtime, + runtime: self.runtime, errors: Vec::new(), - }) + } } } @@ -120,6 +129,9 @@ type TaskFuture = NamedFuture>>>; impl ZkStackService { /// Runs the system. + /// + /// In case of errors during wiring phase, will return the list of all the errors that happened, in the order + /// of their occurrence. pub fn run(mut self) -> Result<(), ZkStackServiceError> { self.wire()?; diff --git a/core/node/node_framework/src/service/tests.rs b/core/node/node_framework/src/service/tests.rs index e801e97b7e96..4fae955201d9 100644 --- a/core/node/node_framework/src/service/tests.rs +++ b/core/node/node_framework/src/service/tests.rs @@ -16,7 +16,7 @@ fn test_new_with_nested_runtime() { let runtime = Runtime::new().unwrap(); let initialization_result = - runtime.block_on(async { ZkStackServiceBuilder::new().build().unwrap_err() }); + runtime.block_on(async { ZkStackServiceBuilder::new().unwrap_err() }); assert_matches!(initialization_result, ZkStackServiceError::RuntimeDetected); } @@ -43,7 +43,7 @@ impl WiringLayer for DefaultLayer { // `add_layer` should add multiple layers. #[test] fn test_add_layer() { - let mut zk_stack_service = ZkStackServiceBuilder::new(); + let mut zk_stack_service = ZkStackServiceBuilder::new().unwrap(); zk_stack_service .add_layer(DefaultLayer { name: "first_layer", @@ -61,7 +61,7 @@ fn test_add_layer() { // `add_layer` should ignore already added layers. #[test] fn test_layers_are_unique() { - let mut zk_stack_service = ZkStackServiceBuilder::new(); + let mut zk_stack_service = ZkStackServiceBuilder::new().unwrap(); zk_stack_service .add_layer(DefaultLayer { name: "default_layer", @@ -79,7 +79,7 @@ fn test_layers_are_unique() { // `ZkStack` Service's `run()` method has to return error if there is no tasks added. #[test] fn test_run_with_no_tasks() { - let empty_run_result = ZkStackServiceBuilder::new().build().unwrap().run(); + let empty_run_result = ZkStackServiceBuilder::new().unwrap().build().run(); assert_matches!(empty_run_result.unwrap_err(), ZkStackServiceError::NoTasks); } @@ -103,10 +103,10 @@ impl WiringLayer for WireErrorLayer { // `ZkStack` Service's `run()` method has to take into account errors on wiring step. #[test] fn test_run_with_error_tasks() { - let mut zk_stack_service = ZkStackServiceBuilder::new(); + let mut zk_stack_service = ZkStackServiceBuilder::new().unwrap(); let error_layer = WireErrorLayer; zk_stack_service.add_layer(error_layer); - let result = zk_stack_service.build().unwrap().run(); + let result = zk_stack_service.build().run(); assert_matches!(result.unwrap_err(), ZkStackServiceError::Wiring(_)); } @@ -151,9 +151,9 @@ impl Task for ErrorTask { // `ZkStack` Service's `run()` method has to take into account errors inside task execution. #[test] fn test_run_with_failed_tasks() { - let mut zk_stack_service: ZkStackServiceBuilder = ZkStackServiceBuilder::new(); + let mut zk_stack_service: ZkStackServiceBuilder = ZkStackServiceBuilder::new().unwrap(); zk_stack_service.add_layer(TaskErrorLayer); - let result = zk_stack_service.build().unwrap().run(); + let result = zk_stack_service.build().run(); assert_matches!(result.unwrap_err(), ZkStackServiceError::Task(_)); } @@ -235,7 +235,7 @@ fn test_task_run() { let successful_task_was_run = Arc::new(Mutex::new(false)); let remaining_task_was_run = Arc::new(Mutex::new(false)); - let mut zk_stack_service = ZkStackServiceBuilder::new(); + let mut zk_stack_service = ZkStackServiceBuilder::new().unwrap(); zk_stack_service.add_layer(TasksLayer { successful_task_was_run: successful_task_was_run.clone(), @@ -243,7 +243,7 @@ fn test_task_run() { }); assert!( - zk_stack_service.build().unwrap().run().is_ok(), + zk_stack_service.build().run().is_ok(), "ZkStackServiceBuilder run finished with an error, but it shouldn't" ); let res1 = *successful_task_was_run.lock().unwrap(); diff --git a/core/tests/loadnext/src/main.rs b/core/tests/loadnext/src/main.rs index 7ba6e762ea26..81185cc1c63e 100644 --- a/core/tests/loadnext/src/main.rs +++ b/core/tests/loadnext/src/main.rs @@ -6,6 +6,7 @@ use std::time::Duration; +use anyhow::Context as _; use loadnext::{ command::TxType, config::{ExecutionConfig, LoadtestConfig}, @@ -21,7 +22,7 @@ async fn main() -> anyhow::Result<()> { // We don't want to introduce dependency on `zksync_env_config` in loadnext, // but we historically rely on the environment variables for the observability configuration, // so we load them directly here. - let log_format: zksync_vlog::LogFormat = std::env::var("MISC_LOG_FORMAT") + let log_format: zksync_vlog::logs::LogFormat = std::env::var("MISC_LOG_FORMAT") .ok() .unwrap_or("plain".to_string()) .parse()?; @@ -39,14 +40,20 @@ async fn main() -> anyhow::Result<()> { } }; - let mut builder = zksync_vlog::ObservabilityBuilder::new().with_log_format(log_format); - if let Some(sentry_url) = sentry_url { - builder = builder - .with_sentry_url(&sentry_url) - .expect("Invalid Sentry URL") - .with_sentry_environment(environment); - } - let _guard = builder.build(); + let logs = zksync_vlog::Logs::from(log_format); + let sentry = sentry_url + .map(|url| { + anyhow::Ok( + zksync_vlog::Sentry::new(&url) + .context("Invalid Sentry URL")? + .with_environment(environment), + ) + }) + .transpose()?; + let _guard = zksync_vlog::ObservabilityBuilder::new() + .with_logs(Some(logs)) + .with_sentry(sentry) + .build(); let config = LoadtestConfig::from_env() .expect("Config parameters should be loaded from env or from default values"); diff --git a/etc/env/file_based/general.yaml b/etc/env/file_based/general.yaml index 7914ece95c70..34ca63e4a99c 100644 --- a/etc/env/file_based/general.yaml +++ b/etc/env/file_based/general.yaml @@ -327,14 +327,15 @@ prometheus: observability: log_format: plain log_directives: "zksync_node_test_utils=info,zksync_state_keeper=info,zksync_reorg_detector=info,zksync_consistency_checker=info,zksync_metadata_calculator=info,zksync_node_sync=info,zksync_node_consensus=info,zksync_contract_verification_server=info,zksync_node_api_server=info,zksync_tee_verifier_input_producer=info,zksync_node_framework=info,zksync_block_reverter=info,zksync_commitment_generator=info,zksync_node_db_pruner=info,zksync_eth_sender=info,zksync_node_fee_model=info,zksync_node_genesis=info,zksync_house_keeper=info,zksync_proof_data_handler=info,zksync_shared_metrics=info,zksync_node_test_utils=info,zksync_vm_runner=info,zksync_consensus_bft=info,zksync_consensus_network=info,zksync_consensus_storage=info,zksync_core_leftovers=debug,zksync_server=debug,zksync_contract_verifier=debug,zksync_dal=info,zksync_db_connection=info,zksync_eth_client=info,zksync_eth_watch=debug,zksync_storage=info,zksync_db_manager=info,zksync_merkle_tree=info,zksync_state=debug,zksync_utils=debug,zksync_queued_job_processor=info,zksync_types=info,zksync_mempool=debug,loadnext=info,vm=info,zksync_object_store=info,zksync_external_node=info,zksync_witness_generator=info,zksync_prover_fri=info,zksync_witness_vector_generator=info,zksync_web3_decl=debug,zksync_health_check=debug,zksync_proof_fri_compressor=info,vise_exporter=error,snapshots_creator=debug,zksync_base_token_adjuster=debug,zksync_external_price_api=debug" - sentry: - url: unset - panic_interval: 1800 - error_interval: 10800 - environment: localhost - opentelemetry: - endpoint: unset - level: debug + # Uncomment only if needed + # sentry: + # url: unset + # panic_interval: 1800 + # error_interval: 10800 + # environment: localhost + # opentelemetry: + # endpoint: unset + # level: debug protective_reads_writer: db_path: "./db/main/protective_reads" diff --git a/prover/Cargo.lock b/prover/Cargo.lock index 6be3ab0db106..0afe435859d6 100644 --- a/prover/Cargo.lock +++ b/prover/Cargo.lock @@ -288,18 +288,17 @@ dependencies = [ [[package]] name = "axum" -version = "0.6.20" +version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b829e4e32b91e643de6eafe82b1d90675f5874230191a4ffbc1b336dec4d6bf" +checksum = "3a6c9af12842a67734c9a2e355436e5d03b22383ed60cf13cd0c18fbfe3dcbcf" dependencies = [ "async-trait", "axum-core", - "bitflags 1.3.2", "bytes", "futures-util", - "http 0.2.12", - "http-body 0.4.6", - "hyper 0.14.29", + "http 1.1.0", + "http-body 1.0.0", + "http-body-util", "itoa", "matchit", "memchr", @@ -308,7 +307,7 @@ dependencies = [ "pin-project-lite", "rustversion", "serde", - "sync_wrapper 0.1.2", + "sync_wrapper 1.0.1", "tower", "tower-layer", "tower-service", @@ -316,17 +315,20 @@ dependencies = [ [[package]] name = "axum-core" -version = "0.3.4" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "759fa577a247914fd3f7f76d62972792636412fbfd634cd452f6a385a74d2d2c" +checksum = "a15c63fd72d41492dc4f497196f5da1fb04fb7529e631d73630d1b491e47a2e3" dependencies = [ "async-trait", "bytes", "futures-util", - "http 0.2.12", - "http-body 0.4.6", + "http 1.1.0", + "http-body 1.0.0", + "http-body-util", "mime", + "pin-project-lite", "rustversion", + "sync_wrapper 0.1.2", "tower-layer", "tower-service", ] @@ -2726,6 +2728,7 @@ dependencies = [ "http 1.1.0", "http-body 1.0.0", "httparse", + "httpdate", "itoa", "pin-project-lite", "smallvec", @@ -2753,14 +2756,15 @@ dependencies = [ [[package]] name = "hyper-timeout" -version = "0.4.1" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1" +checksum = "3203a961e5c83b6f5498933e78b6b263e208c197b63e9c6c53cc82ffd3f63793" dependencies = [ - "hyper 0.14.29", + "hyper 1.3.1", + "hyper-util", "pin-project-lite", "tokio", - "tokio-io-timeout", + "tower-service", ] [[package]] @@ -3890,43 +3894,46 @@ dependencies = [ [[package]] name = "opentelemetry" -version = "0.20.0" +version = "0.24.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9591d937bc0e6d2feb6f71a559540ab300ea49955229c347a517a28d27784c54" +checksum = "4c365a63eec4f55b7efeceb724f1336f26a9cf3427b70e59e2cd2a5b947fba96" dependencies = [ - "opentelemetry_api", - "opentelemetry_sdk", + "futures-core", + "futures-sink", + "js-sys", + "once_cell", + "pin-project-lite", + "thiserror", ] [[package]] name = "opentelemetry-http" -version = "0.9.0" +version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7594ec0e11d8e33faf03530a4c49af7064ebba81c1480e01be67d90b356508b" +checksum = "ad31e9de44ee3538fb9d64fe3376c1362f406162434609e79aea2a41a0af78ab" dependencies = [ "async-trait", "bytes", - "http 0.2.12", - "opentelemetry_api", - "reqwest 0.11.27", + "http 1.1.0", + "opentelemetry", + "reqwest 0.12.5", ] [[package]] name = "opentelemetry-otlp" -version = "0.13.0" +version = "0.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e5e5a5c4135864099f3faafbe939eb4d7f9b80ebf68a8448da961b32a7c1275" +checksum = "6b925a602ffb916fb7421276b86756027b37ee708f9dce2dbdcc51739f07e727" dependencies = [ "async-trait", "futures-core", - "http 0.2.12", + "http 1.1.0", + "opentelemetry", "opentelemetry-http", "opentelemetry-proto", - "opentelemetry-semantic-conventions", - "opentelemetry_api", "opentelemetry_sdk", - "prost 0.11.9", - "reqwest 0.11.27", + "prost 0.13.1", + "reqwest 0.12.5", "thiserror", "tokio", "tonic", @@ -3934,58 +3941,37 @@ dependencies = [ [[package]] name = "opentelemetry-proto" -version = "0.3.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1e3f814aa9f8c905d0ee4bde026afd3b2577a97c10e1699912e3e44f0c4cbeb" +checksum = "30ee9f20bff9c984511a02f082dc8ede839e4a9bf15cc2487c8d6fea5ad850d9" dependencies = [ - "opentelemetry_api", + "opentelemetry", "opentelemetry_sdk", - "prost 0.11.9", + "prost 0.13.1", "tonic", ] [[package]] name = "opentelemetry-semantic-conventions" -version = "0.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73c9f9340ad135068800e7f1b24e9e09ed9e7143f5bf8518ded3d3ec69789269" -dependencies = [ - "opentelemetry", -] - -[[package]] -name = "opentelemetry_api" -version = "0.20.0" +version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a81f725323db1b1206ca3da8bb19874bbd3f57c3bcd59471bfb04525b265b9b" -dependencies = [ - "futures-channel", - "futures-util", - "indexmap 1.9.3", - "js-sys", - "once_cell", - "pin-project-lite", - "thiserror", - "urlencoding", -] +checksum = "1cefe0543875379e47eb5f1e68ff83f45cc41366a92dfd0d073d513bf68e9a05" [[package]] name = "opentelemetry_sdk" -version = "0.20.0" +version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa8e705a0612d48139799fcbaba0d4a90f06277153e43dd2bdc16c6f0edd8026" +checksum = "692eac490ec80f24a17828d49b40b60f5aeaccdfe6a503f939713afd22bc28df" dependencies = [ "async-trait", - "crossbeam-channel 0.5.13", "futures-channel", "futures-executor", "futures-util", + "glob", "once_cell", - "opentelemetry_api", - "ordered-float 3.9.2", + "opentelemetry", "percent-encoding", "rand 0.8.5", - "regex", "serde_json", "thiserror", "tokio", @@ -4001,15 +3987,6 @@ dependencies = [ "num-traits", ] -[[package]] -name = "ordered-float" -version = "3.9.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1e1c390732d15f1d48471625cd92d154e66db2c56645e29a9cd26f4699f72dc" -dependencies = [ - "num-traits", -] - [[package]] name = "os_info" version = "3.8.2" @@ -4382,22 +4359,22 @@ dependencies = [ [[package]] name = "prost" -version = "0.11.9" +version = "0.12.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b82eaa1d779e9a4bc1c3217db8ffbeabaae1dca241bf70183242128d48681cd" +checksum = "deb1435c188b76130da55f17a466d252ff7b1418b2ad3e037d127b94e3411f29" dependencies = [ "bytes", - "prost-derive 0.11.9", + "prost-derive 0.12.6", ] [[package]] name = "prost" -version = "0.12.6" +version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "deb1435c188b76130da55f17a466d252ff7b1418b2ad3e037d127b94e3411f29" +checksum = "e13db3d3fde688c61e2446b4d843bc27a7e8af269a69440c0308021dc92333cc" dependencies = [ "bytes", - "prost-derive 0.12.6", + "prost-derive 0.13.1", ] [[package]] @@ -4423,25 +4400,25 @@ dependencies = [ [[package]] name = "prost-derive" -version = "0.11.9" +version = "0.12.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5d2d8d10f3c6ded6da8b05b5fb3b8a5082514344d56c9f871412d29b4e075b4" +checksum = "81bddcdb20abf9501610992b6759a4c888aef7d1a7247ef75e2404275ac24af1" dependencies = [ "anyhow", - "itertools 0.10.5", + "itertools 0.12.1", "proc-macro2 1.0.85", "quote 1.0.36", - "syn 1.0.109", + "syn 2.0.66", ] [[package]] name = "prost-derive" -version = "0.12.6" +version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81bddcdb20abf9501610992b6759a4c888aef7d1a7247ef75e2404275ac24af1" +checksum = "18bec9b0adc4eba778b33684b7ba3e7137789434769ee3ce3930463ef904cfca" dependencies = [ "anyhow", - "itertools 0.12.1", + "itertools 0.13.0", "proc-macro2 1.0.85", "quote 1.0.36", "syn 2.0.66", @@ -5434,7 +5411,7 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f3a1a3341211875ef120e117ea7fd5228530ae7e7036a779fdc9117be6b3282c" dependencies = [ - "ordered-float 2.10.1", + "ordered-float", "serde", ] @@ -6356,16 +6333,6 @@ dependencies = [ "windows-sys 0.48.0", ] -[[package]] -name = "tokio-io-timeout" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30b74022ada614a1b4834de765f9bb43877f910cc8ce4be40e89042c9223a8bf" -dependencies = [ - "pin-project-lite", - "tokio", -] - [[package]] name = "tokio-macros" version = "2.3.0" @@ -6475,24 +6442,26 @@ dependencies = [ [[package]] name = "tonic" -version = "0.9.2" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3082666a3a6433f7f511c7192923fa1fe07c69332d3c6a2e6bb040b569199d5a" +checksum = "38659f4a91aba8598d27821589f5db7dddd94601e7a01b1e485a50e5484c7401" dependencies = [ + "async-stream", "async-trait", "axum", - "base64 0.21.7", + "base64 0.22.1", "bytes", - "futures-core", - "futures-util", - "h2 0.3.26", - "http 0.2.12", - "http-body 0.4.6", - "hyper 0.14.29", + "h2 0.4.5", + "http 1.1.0", + "http-body 1.0.0", + "http-body-util", + "hyper 1.3.1", "hyper-timeout", + "hyper-util", "percent-encoding", "pin-project", - "prost 0.11.9", + "prost 0.13.1", + "socket2", "tokio", "tokio-stream", "tower", @@ -6566,17 +6535,6 @@ dependencies = [ "valuable", ] -[[package]] -name = "tracing-log" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f751112709b4e791d8ce53e32c4ed2d353565a795ce84da2285393f41557bdf2" -dependencies = [ - "log", - "once_cell", - "tracing-core", -] - [[package]] name = "tracing-log" version = "0.2.0" @@ -6590,18 +6548,20 @@ dependencies = [ [[package]] name = "tracing-opentelemetry" -version = "0.21.0" +version = "0.25.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75327c6b667828ddc28f5e3f169036cb793c3f588d83bf0f262a7f062ffed3c8" +checksum = "a9784ed4da7d921bc8df6963f8c80a0e4ce34ba6ba76668acadd3edbd985ff3b" dependencies = [ + "js-sys", "once_cell", "opentelemetry", "opentelemetry_sdk", "smallvec", "tracing", "tracing-core", - "tracing-log 0.1.4", + "tracing-log", "tracing-subscriber", + "web-time", ] [[package]] @@ -6632,7 +6592,7 @@ dependencies = [ "time", "tracing", "tracing-core", - "tracing-log 0.2.0", + "tracing-log", "tracing-serde", ] @@ -7044,6 +7004,16 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "web-time" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + [[package]] name = "webpki-roots" version = "0.26.2" @@ -7758,6 +7728,7 @@ dependencies = [ "zksync_concurrency", "zksync_consensus_utils", "zksync_crypto_primitives", + "zksync_vlog", ] [[package]] @@ -8123,6 +8094,7 @@ dependencies = [ "vk_setup_data_generator_server_fri", "zkevm_test_harness 0.150.2-rc.1", "zksync-wrapper-prover", + "zksync_config", "zksync_core_leftovers", "zksync_env_config", "zksync_object_store", @@ -8432,13 +8404,17 @@ dependencies = [ "opentelemetry", "opentelemetry-otlp", "opentelemetry-semantic-conventions", + "opentelemetry_sdk", "sentry", "serde", "serde_json", + "thiserror", + "time", "tokio", "tracing", "tracing-opentelemetry", "tracing-subscriber", + "url", "vise", "vise-exporter", ] diff --git a/prover/crates/bin/proof_fri_compressor/Cargo.toml b/prover/crates/bin/proof_fri_compressor/Cargo.toml index 0c01a40874f2..a44244c97b57 100644 --- a/prover/crates/bin/proof_fri_compressor/Cargo.toml +++ b/prover/crates/bin/proof_fri_compressor/Cargo.toml @@ -13,6 +13,7 @@ categories.workspace = true vise.workspace = true zksync_types.workspace = true zksync_prover_dal.workspace = true +zksync_config = { workspace = true, features = ["observability_ext"] } zksync_env_config.workspace = true zksync_object_store.workspace = true zksync_prover_interface.workspace = true diff --git a/prover/crates/bin/proof_fri_compressor/src/main.rs b/prover/crates/bin/proof_fri_compressor/src/main.rs index 8be498be5e00..a1a8ac90253e 100644 --- a/prover/crates/bin/proof_fri_compressor/src/main.rs +++ b/prover/crates/bin/proof_fri_compressor/src/main.rs @@ -47,28 +47,7 @@ async fn main() -> anyhow::Result<()> { .observability .expect("observability config") .clone(); - let log_format: zksync_vlog::LogFormat = observability_config - .log_format - .parse() - .context("Invalid log format")?; - - let mut builder = zksync_vlog::ObservabilityBuilder::new().with_log_format(log_format); - if let Some(sentry_url) = &observability_config.sentry_url { - builder = builder - .with_sentry_url(sentry_url) - .expect("Invalid Sentry URL") - .with_sentry_environment(observability_config.sentry_environment); - } - if let Some(opentelemetry) = observability_config.opentelemetry { - builder = builder - .with_opentelemetry( - &opentelemetry.level, - opentelemetry.endpoint, - "zksync-prover-fri-compressor".into(), - ) - .expect("Invalid OpenTelemetry config"); - } - let _guard = builder.build(); + let _observability_guard = observability_config.install()?; let config = general_config .proof_compressor_config diff --git a/prover/crates/bin/prover_fri/Cargo.toml b/prover/crates/bin/prover_fri/Cargo.toml index 4f343e8c4e91..0d2e92be0481 100644 --- a/prover/crates/bin/prover_fri/Cargo.toml +++ b/prover/crates/bin/prover_fri/Cargo.toml @@ -13,7 +13,7 @@ categories.workspace = true vise.workspace = true zksync_types.workspace = true zksync_prover_dal.workspace = true -zksync_config.workspace = true +zksync_config = { workspace = true, features = ["observability_ext"] } zksync_env_config.workspace = true zksync_vlog.workspace = true zksync_object_store.workspace = true diff --git a/prover/crates/bin/prover_fri/src/main.rs b/prover/crates/bin/prover_fri/src/main.rs index e4b2fd5a6709..db813394c194 100644 --- a/prover/crates/bin/prover_fri/src/main.rs +++ b/prover/crates/bin/prover_fri/src/main.rs @@ -63,36 +63,7 @@ async fn main() -> anyhow::Result<()> { let observability_config = general_config .observability .context("observability config")?; - let log_format: zksync_vlog::LogFormat = observability_config - .log_format - .parse() - .context("Invalid log format")?; - - let mut builder = zksync_vlog::ObservabilityBuilder::new().with_log_format(log_format); - if let Some(sentry_url) = &observability_config.sentry_url { - builder = builder - .with_sentry_url(sentry_url) - .expect("Invalid Sentry URL") - .with_sentry_environment(observability_config.sentry_environment); - } - - if let Some(opentelemetry) = observability_config.opentelemetry { - builder = builder - .with_opentelemetry( - &opentelemetry.level, - opentelemetry.endpoint, - "zksync-prover-fri".into(), - ) - .expect("Invalid OpenTelemetry config"); - } - let _guard = builder.build(); - - // Report whether sentry is running after the logging subsystem was initialized. - if let Some(sentry_url) = observability_config.sentry_url { - tracing::info!("Sentry configured with URL: {sentry_url}",); - } else { - tracing::info!("No sentry URL was provided"); - } + let _observability_guard = observability_config.install()?; let prover_config = general_config.prover_config.context("fri_prover config")?; let exporter_config = PrometheusExporterConfig::pull(prover_config.prometheus_port); diff --git a/prover/crates/bin/prover_fri_gateway/Cargo.toml b/prover/crates/bin/prover_fri_gateway/Cargo.toml index 6dd54d5d677d..8d116c4219d6 100644 --- a/prover/crates/bin/prover_fri_gateway/Cargo.toml +++ b/prover/crates/bin/prover_fri_gateway/Cargo.toml @@ -13,7 +13,7 @@ categories.workspace = true vise.workspace = true zksync_types.workspace = true zksync_prover_dal.workspace = true -zksync_config.workspace = true +zksync_config = { workspace = true, features = ["observability_ext"] } zksync_env_config.workspace = true zksync_core_leftovers.workspace = true zksync_object_store.workspace = true diff --git a/prover/crates/bin/prover_fri_gateway/src/main.rs b/prover/crates/bin/prover_fri_gateway/src/main.rs index c204fb7395f2..ed0574e7fba1 100644 --- a/prover/crates/bin/prover_fri_gateway/src/main.rs +++ b/prover/crates/bin/prover_fri_gateway/src/main.rs @@ -29,20 +29,7 @@ async fn main() -> anyhow::Result<()> { let observability_config = general_config .observability .context("observability config")?; - - let log_format: zksync_vlog::LogFormat = observability_config - .log_format - .parse() - .context("Invalid log format")?; - - let mut builder = zksync_vlog::ObservabilityBuilder::new().with_log_format(log_format); - if let Some(sentry_url) = &observability_config.sentry_url { - builder = builder - .with_sentry_url(sentry_url) - .expect("Invalid Sentry URL") - .with_sentry_environment(observability_config.sentry_environment); - } - let _guard = builder.build(); + let _observability_guard = observability_config.install()?; let config = general_config .prover_gateway diff --git a/prover/crates/bin/witness_generator/Cargo.toml b/prover/crates/bin/witness_generator/Cargo.toml index 64c6713540fa..fe73a02ba2af 100644 --- a/prover/crates/bin/witness_generator/Cargo.toml +++ b/prover/crates/bin/witness_generator/Cargo.toml @@ -12,7 +12,7 @@ categories.workspace = true [dependencies] vise.workspace = true zksync_prover_dal.workspace = true -zksync_config.workspace = true +zksync_config = { workspace = true, features = ["observability_ext"] } zksync_prover_interface.workspace = true zksync_env_config.workspace = true zksync_system_constants.workspace = true diff --git a/prover/crates/bin/witness_generator/src/main.rs b/prover/crates/bin/witness_generator/src/main.rs index d337778aba6c..38b2e46ef74b 100644 --- a/prover/crates/bin/witness_generator/src/main.rs +++ b/prover/crates/bin/witness_generator/src/main.rs @@ -77,35 +77,7 @@ async fn main() -> anyhow::Result<()> { let observability_config = general_config .observability .context("observability config")?; - let log_format: zksync_vlog::LogFormat = observability_config - .log_format - .parse() - .context("Invalid log format")?; - - let mut builder = zksync_vlog::ObservabilityBuilder::new().with_log_format(log_format); - if let Some(sentry_url) = &observability_config.sentry_url { - builder = builder - .with_sentry_url(sentry_url) - .expect("Invalid Sentry URL") - .with_sentry_environment(observability_config.sentry_environment); - } - if let Some(opentelemetry) = observability_config.opentelemetry { - builder = builder - .with_opentelemetry( - &opentelemetry.level, - opentelemetry.endpoint, - "zksync-witness-generator".into(), - ) - .expect("Invalid OpenTelemetry config"); - } - let _guard = builder.build(); - - // Report whether sentry is running after the logging subsystem was initialized. - if let Some(sentry_url) = observability_config.sentry_url { - tracing::info!("Sentry configured with URL: {sentry_url}",); - } else { - tracing::info!("No sentry URL was provided"); - } + let _observability_guard = observability_config.install()?; let started_at = Instant::now(); let use_push_gateway = opt.batch_size.is_some(); diff --git a/prover/crates/bin/witness_vector_generator/Cargo.toml b/prover/crates/bin/witness_vector_generator/Cargo.toml index e8edecdf87b3..6a1d0af861c6 100644 --- a/prover/crates/bin/witness_vector_generator/Cargo.toml +++ b/prover/crates/bin/witness_vector_generator/Cargo.toml @@ -13,7 +13,7 @@ categories.workspace = true vise.workspace = true zksync_types.workspace = true zksync_prover_dal.workspace = true -zksync_config.workspace = true +zksync_config = { workspace = true, features = ["observability_ext"] } zksync_env_config.workspace = true zksync_object_store.workspace = true zksync_prover_fri_utils.workspace = true diff --git a/prover/crates/bin/witness_vector_generator/src/main.rs b/prover/crates/bin/witness_vector_generator/src/main.rs index 4451788ca9a4..1d3113ebf1aa 100644 --- a/prover/crates/bin/witness_vector_generator/src/main.rs +++ b/prover/crates/bin/witness_vector_generator/src/main.rs @@ -48,28 +48,7 @@ async fn main() -> anyhow::Result<()> { let observability_config = general_config .observability .context("observability config")?; - let log_format: zksync_vlog::LogFormat = observability_config - .log_format - .parse() - .context("Invalid log format")?; - - let mut builder = zksync_vlog::ObservabilityBuilder::new().with_log_format(log_format); - if let Some(sentry_url) = &observability_config.sentry_url { - builder = builder - .with_sentry_url(sentry_url) - .expect("Invalid Sentry URL") - .with_sentry_environment(observability_config.sentry_environment); - } - if let Some(opentelemetry) = observability_config.opentelemetry { - builder = builder - .with_opentelemetry( - &opentelemetry.level, - opentelemetry.endpoint, - "zksync-witness-vector-generator".into(), - ) - .expect("Invalid OpenTelemetry config"); - } - let _guard = builder.build(); + let _observability_guard = observability_config.install()?; let config = general_config .witness_vector_generator diff --git a/zk_toolbox/Cargo.lock b/zk_toolbox/Cargo.lock index 9d738fdf7231..375e35b060c2 100644 --- a/zk_toolbox/Cargo.lock +++ b/zk_toolbox/Cargo.lock @@ -151,6 +151,28 @@ dependencies = [ "term", ] +[[package]] +name = "async-stream" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd56dd203fef61ac097dd65721a419ddccb106b2d2b70ba60a6b529f03961a51" +dependencies = [ + "async-stream-impl", + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "async-stream-impl" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.68", +] + [[package]] name = "async-trait" version = "0.1.80" @@ -207,18 +229,17 @@ checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" [[package]] name = "axum" -version = "0.6.20" +version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b829e4e32b91e643de6eafe82b1d90675f5874230191a4ffbc1b336dec4d6bf" +checksum = "3a6c9af12842a67734c9a2e355436e5d03b22383ed60cf13cd0c18fbfe3dcbcf" dependencies = [ "async-trait", "axum-core", - "bitflags 1.3.2", "bytes", "futures-util", - "http 0.2.12", - "http-body 0.4.6", - "hyper 0.14.29", + "http 1.1.0", + "http-body 1.0.0", + "http-body-util", "itoa", "matchit", "memchr", @@ -227,7 +248,7 @@ dependencies = [ "pin-project-lite", "rustversion", "serde", - "sync_wrapper 0.1.2", + "sync_wrapper 1.0.1", "tower", "tower-layer", "tower-service", @@ -235,17 +256,20 @@ dependencies = [ [[package]] name = "axum-core" -version = "0.3.4" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "759fa577a247914fd3f7f76d62972792636412fbfd634cd452f6a385a74d2d2c" +checksum = "a15c63fd72d41492dc4f497196f5da1fb04fb7529e631d73630d1b491e47a2e3" dependencies = [ "async-trait", "bytes", "futures-util", - "http 0.2.12", - "http-body 0.4.6", + "http 1.1.0", + "http-body 1.0.0", + "http-body-util", "mime", + "pin-project-lite", "rustversion", + "sync_wrapper 0.1.2", "tower-layer", "tower-service", ] @@ -765,15 +789,6 @@ dependencies = [ "cfg-if", ] -[[package]] -name = "crossbeam-channel" -version = "0.5.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33480d6946193aa8033910124896ca395333cae7e2d1113d1fef6c3272217df2" -dependencies = [ - "crossbeam-utils", -] - [[package]] name = "crossbeam-deque" version = "0.8.5" @@ -2121,6 +2136,7 @@ dependencies = [ "http 1.1.0", "http-body 1.0.0", "httparse", + "httpdate", "itoa", "pin-project-lite", "smallvec", @@ -2161,14 +2177,15 @@ dependencies = [ [[package]] name = "hyper-timeout" -version = "0.4.1" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1" +checksum = "3203a961e5c83b6f5498933e78b6b263e208c197b63e9c6c53cc82ffd3f63793" dependencies = [ - "hyper 0.14.29", + "hyper 1.4.0", + "hyper-util", "pin-project-lite", "tokio", - "tokio-io-timeout", + "tower-service", ] [[package]] @@ -3006,43 +3023,46 @@ dependencies = [ [[package]] name = "opentelemetry" -version = "0.20.0" +version = "0.24.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9591d937bc0e6d2feb6f71a559540ab300ea49955229c347a517a28d27784c54" +checksum = "4c365a63eec4f55b7efeceb724f1336f26a9cf3427b70e59e2cd2a5b947fba96" dependencies = [ - "opentelemetry_api", - "opentelemetry_sdk", + "futures-core", + "futures-sink", + "js-sys", + "once_cell", + "pin-project-lite", + "thiserror", ] [[package]] name = "opentelemetry-http" -version = "0.9.0" +version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7594ec0e11d8e33faf03530a4c49af7064ebba81c1480e01be67d90b356508b" +checksum = "ad31e9de44ee3538fb9d64fe3376c1362f406162434609e79aea2a41a0af78ab" dependencies = [ "async-trait", "bytes", - "http 0.2.12", - "opentelemetry_api", - "reqwest 0.11.27", + "http 1.1.0", + "opentelemetry", + "reqwest 0.12.5", ] [[package]] name = "opentelemetry-otlp" -version = "0.13.0" +version = "0.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e5e5a5c4135864099f3faafbe939eb4d7f9b80ebf68a8448da961b32a7c1275" +checksum = "6b925a602ffb916fb7421276b86756027b37ee708f9dce2dbdcc51739f07e727" dependencies = [ "async-trait", "futures-core", - "http 0.2.12", + "http 1.1.0", + "opentelemetry", "opentelemetry-http", "opentelemetry-proto", - "opentelemetry-semantic-conventions", - "opentelemetry_api", "opentelemetry_sdk", - "prost 0.11.9", - "reqwest 0.11.27", + "prost 0.13.1", + "reqwest 0.12.5", "thiserror", "tokio", "tonic", @@ -3050,58 +3070,37 @@ dependencies = [ [[package]] name = "opentelemetry-proto" -version = "0.3.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1e3f814aa9f8c905d0ee4bde026afd3b2577a97c10e1699912e3e44f0c4cbeb" +checksum = "30ee9f20bff9c984511a02f082dc8ede839e4a9bf15cc2487c8d6fea5ad850d9" dependencies = [ - "opentelemetry_api", + "opentelemetry", "opentelemetry_sdk", - "prost 0.11.9", + "prost 0.13.1", "tonic", ] [[package]] name = "opentelemetry-semantic-conventions" -version = "0.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73c9f9340ad135068800e7f1b24e9e09ed9e7143f5bf8518ded3d3ec69789269" -dependencies = [ - "opentelemetry", -] - -[[package]] -name = "opentelemetry_api" -version = "0.20.0" +version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a81f725323db1b1206ca3da8bb19874bbd3f57c3bcd59471bfb04525b265b9b" -dependencies = [ - "futures-channel", - "futures-util", - "indexmap 1.9.3", - "js-sys", - "once_cell", - "pin-project-lite", - "thiserror", - "urlencoding", -] +checksum = "1cefe0543875379e47eb5f1e68ff83f45cc41366a92dfd0d073d513bf68e9a05" [[package]] name = "opentelemetry_sdk" -version = "0.20.0" +version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa8e705a0612d48139799fcbaba0d4a90f06277153e43dd2bdc16c6f0edd8026" +checksum = "692eac490ec80f24a17828d49b40b60f5aeaccdfe6a503f939713afd22bc28df" dependencies = [ "async-trait", - "crossbeam-channel", "futures-channel", "futures-executor", "futures-util", + "glob", "once_cell", - "opentelemetry_api", - "ordered-float 3.9.2", + "opentelemetry", "percent-encoding", "rand", - "regex", "serde_json", "thiserror", "tokio", @@ -3123,15 +3122,6 @@ dependencies = [ "num-traits", ] -[[package]] -name = "ordered-float" -version = "3.9.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1e1c390732d15f1d48471625cd92d154e66db2c56645e29a9cd26f4699f72dc" -dependencies = [ - "num-traits", -] - [[package]] name = "os_info" version = "3.8.2" @@ -3524,22 +3514,22 @@ dependencies = [ [[package]] name = "prost" -version = "0.11.9" +version = "0.12.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b82eaa1d779e9a4bc1c3217db8ffbeabaae1dca241bf70183242128d48681cd" +checksum = "deb1435c188b76130da55f17a466d252ff7b1418b2ad3e037d127b94e3411f29" dependencies = [ "bytes", - "prost-derive 0.11.9", + "prost-derive 0.12.6", ] [[package]] name = "prost" -version = "0.12.6" +version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "deb1435c188b76130da55f17a466d252ff7b1418b2ad3e037d127b94e3411f29" +checksum = "e13db3d3fde688c61e2446b4d843bc27a7e8af269a69440c0308021dc92333cc" dependencies = [ "bytes", - "prost-derive 0.12.6", + "prost-derive 0.13.1", ] [[package]] @@ -3565,22 +3555,22 @@ dependencies = [ [[package]] name = "prost-derive" -version = "0.11.9" +version = "0.12.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5d2d8d10f3c6ded6da8b05b5fb3b8a5082514344d56c9f871412d29b4e075b4" +checksum = "81bddcdb20abf9501610992b6759a4c888aef7d1a7247ef75e2404275ac24af1" dependencies = [ "anyhow", - "itertools 0.10.5", + "itertools 0.12.1", "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.68", ] [[package]] name = "prost-derive" -version = "0.12.6" +version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81bddcdb20abf9501610992b6759a4c888aef7d1a7247ef75e2404275ac24af1" +checksum = "18bec9b0adc4eba778b33684b7ba3e7137789434769ee3ce3930463ef904cfca" dependencies = [ "anyhow", "itertools 0.12.1", @@ -4405,7 +4395,7 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f3a1a3341211875ef120e117ea7fd5228530ae7e7036a779fdc9117be6b3282c" dependencies = [ - "ordered-float 2.10.1", + "ordered-float", "serde", ] @@ -5217,16 +5207,6 @@ dependencies = [ "windows-sys 0.48.0", ] -[[package]] -name = "tokio-io-timeout" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30b74022ada614a1b4834de765f9bb43877f910cc8ce4be40e89042c9223a8bf" -dependencies = [ - "pin-project-lite", - "tokio", -] - [[package]] name = "tokio-macros" version = "2.3.0" @@ -5366,24 +5346,26 @@ dependencies = [ [[package]] name = "tonic" -version = "0.9.2" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3082666a3a6433f7f511c7192923fa1fe07c69332d3c6a2e6bb040b569199d5a" +checksum = "38659f4a91aba8598d27821589f5db7dddd94601e7a01b1e485a50e5484c7401" dependencies = [ + "async-stream", "async-trait", "axum", - "base64 0.21.7", + "base64 0.22.1", "bytes", - "futures-core", - "futures-util", - "h2 0.3.26", - "http 0.2.12", - "http-body 0.4.6", - "hyper 0.14.29", + "h2 0.4.5", + "http 1.1.0", + "http-body 1.0.0", + "http-body-util", + "hyper 1.4.0", "hyper-timeout", + "hyper-util", "percent-encoding", "pin-project", - "prost 0.11.9", + "prost 0.13.1", + "socket2", "tokio", "tokio-stream", "tower", @@ -5467,17 +5449,6 @@ dependencies = [ "tracing", ] -[[package]] -name = "tracing-log" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f751112709b4e791d8ce53e32c4ed2d353565a795ce84da2285393f41557bdf2" -dependencies = [ - "log", - "once_cell", - "tracing-core", -] - [[package]] name = "tracing-log" version = "0.2.0" @@ -5491,18 +5462,20 @@ dependencies = [ [[package]] name = "tracing-opentelemetry" -version = "0.21.0" +version = "0.25.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75327c6b667828ddc28f5e3f169036cb793c3f588d83bf0f262a7f062ffed3c8" +checksum = "a9784ed4da7d921bc8df6963f8c80a0e4ce34ba6ba76668acadd3edbd985ff3b" dependencies = [ + "js-sys", "once_cell", "opentelemetry", "opentelemetry_sdk", "smallvec", "tracing", "tracing-core", - "tracing-log 0.1.4", + "tracing-log", "tracing-subscriber", + "web-time", ] [[package]] @@ -5533,7 +5506,7 @@ dependencies = [ "time", "tracing", "tracing-core", - "tracing-log 0.2.0", + "tracing-log", "tracing-serde", ] @@ -5909,6 +5882,16 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "web-time" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + [[package]] name = "webpki-roots" version = "0.25.4" @@ -6572,13 +6555,17 @@ dependencies = [ "opentelemetry", "opentelemetry-otlp", "opentelemetry-semantic-conventions", + "opentelemetry_sdk", "sentry", "serde", "serde_json", + "thiserror", + "time", "tokio", "tracing", "tracing-opentelemetry", "tracing-subscriber", + "url", "vise", "vise-exporter", ] From 1e885be15eaad2767436da2d97ca020114efbc65 Mon Sep 17 00:00:00 2001 From: Grzegorz Prusak Date: Fri, 26 Jul 2024 10:07:49 +0200 Subject: [PATCH 41/52] chore: removed fallback for loading consensus config (#2485) Followup to https://github.com/matter-labs/gitops-kubernetes/pull/5866 . --- core/bin/zksync_server/src/main.rs | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/core/bin/zksync_server/src/main.rs b/core/bin/zksync_server/src/main.rs index 93cabfdfe6d6..d9096f6f67df 100644 --- a/core/bin/zksync_server/src/main.rs +++ b/core/bin/zksync_server/src/main.rs @@ -100,16 +100,8 @@ fn main() -> anyhow::Result<()> { Some(path) => { let yaml = std::fs::read_to_string(&path).with_context(|| path.display().to_string())?; - let mut configs = - decode_yaml_repr::(&yaml) - .context("failed decoding general YAML config")?; - // Fallback to the consensus_config.yaml file. - // TODO: remove once we move the consensus config to general config on stage - if configs.consensus_config.is_none() { - configs.consensus_config = - config::read_consensus_config().context("read_consensus_config()")?; - } - configs + decode_yaml_repr::(&yaml) + .context("failed decoding general YAML config")? } }; From fb55d1c2e15a9683701c116440c6bf0c19a94f0b Mon Sep 17 00:00:00 2001 From: Grzegorz Prusak Date: Fri, 26 Jul 2024 10:20:14 +0200 Subject: [PATCH 42/52] chore: Changed the instruction in documentation to use docker commands (#2503) This way building the binary locally is no longer necessary. --- docs/guides/external-node/09_decentralization.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/guides/external-node/09_decentralization.md b/docs/guides/external-node/09_decentralization.md index fa780ba9ff55..67e83e0cde04 100644 --- a/docs/guides/external-node/09_decentralization.md +++ b/docs/guides/external-node/09_decentralization.md @@ -28,7 +28,7 @@ Each participant node of the gossipnet has to have an identity (a public/secret the first time, generate the secrets by running: ``` -cargo run -p zksync_external_node -- generate-secrets > consensus_secrets.yaml +docker run --entrypoint /usr/bin/zksync_external_node "matterlabs/external-node:2.0-v24.12.0" generate-secrets > consensus_secrets.yaml chmod 600 consensus_secrets.yaml ``` @@ -81,11 +81,11 @@ EN_CONSENSUS_SECRETS_PATH=... These variables should point to your consensus config and secrets files that we have just created. Tweak the paths to the files if you have placed them differently. -### Add `--enable-consensus` flag to your entry point +### Add `--enable-consensus` flag to your entry point command For the consensus configuration to take effect you have to add `--enable-consensus` flag to the command line when -running the node: +running the node, for example: ``` -cargo run -p zksync_external_node -- --enable-consensus +docker run "matterlabs/external-node:2.0-v24.12.0" --enable-consensus ``` From c6b3adf3f29d3a89daa2cfffa1c0e5cb9770eb0d Mon Sep 17 00:00:00 2001 From: Grzegorz Prusak Date: Fri, 26 Jul 2024 10:47:47 +0200 Subject: [PATCH 43/52] feat: Added a JSON RPC to simulating L1 for consensus attestation (#2480) It just checks what is the last L1 batch certificate stored locally and returns its number (actually, the next number to avoid problems with 0). ENs will query the main node for this information to determine what is the number of the next batch that they should vote for. --- Cargo.lock | 1 + core/lib/dal/src/consensus_dal.rs | 74 +++++++++++++------ core/lib/types/src/api/en.rs | 7 ++ core/lib/web3_decl/src/namespaces/en.rs | 7 ++ core/node/api_server/Cargo.toml | 1 + .../web3/backend_jsonrpsee/namespaces/en.rs | 6 ++ .../node/api_server/src/web3/namespaces/en.rs | 26 ++++++- core/node/consensus/src/storage/connection.rs | 51 ++++++++----- core/node/consensus/src/storage/mod.rs | 9 +++ core/node/consensus/src/storage/store.rs | 22 ++---- core/node/consensus/src/tests.rs | 55 +++++++++++++- core/node/node_sync/src/client.rs | 8 ++ core/node/node_sync/src/testonly.rs | 4 + 13 files changed, 209 insertions(+), 62 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 3cdfe491c993..5d2fa02b3d0f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8936,6 +8936,7 @@ dependencies = [ "tracing", "vise", "zksync_config", + "zksync_consensus_roles", "zksync_contracts", "zksync_dal", "zksync_health_check", diff --git a/core/lib/dal/src/consensus_dal.rs b/core/lib/dal/src/consensus_dal.rs index 7655abbe230c..87285266d58e 100644 --- a/core/lib/dal/src/consensus_dal.rs +++ b/core/lib/dal/src/consensus_dal.rs @@ -1,5 +1,5 @@ use anyhow::Context as _; -use bigdecimal::Zero; +use bigdecimal::Zero as _; use zksync_consensus_roles::{attester, validator}; use zksync_consensus_storage::{BlockStoreState, ReplicaState}; use zksync_db_connection::{ @@ -378,9 +378,7 @@ impl ConsensusDal<'_, '_> { ) -> Result<(), InsertCertificateError> { use InsertCertificateError as E; let header = &cert.message.proposal; - let mut txn = self.storage.start_transaction().await?; - let want_payload = txn - .consensus_dal() + let want_payload = self .block_payload(cert.message.proposal.number) .await? .ok_or(E::MissingPayload)?; @@ -394,28 +392,24 @@ impl ConsensusDal<'_, '_> { VALUES ($1, $2) "#, - header.number.0 as i64, + i64::try_from(header.number.0).context("overflow")?, zksync_protobuf::serde::serialize(cert, serde_json::value::Serializer).unwrap(), ) .instrument("insert_block_certificate") .report_latency() - .execute(&mut txn) + .execute(self.storage) .await?; - txn.commit().await.context("commit")?; Ok(()) } /// Inserts a certificate for the L1 batch. - /// - /// Insertion is allowed even if it creates gaps in the L1 batch history. - /// - /// This method assumes that all payload validation has been carried out by the caller. + /// Noop if a certificate for the same L1 batch is already present. + /// No verification is performed - it cannot be performed due to circular dependency on + /// `zksync_l1_contract_interface`. pub async fn insert_batch_certificate( &mut self, cert: &attester::BatchQC, - ) -> Result<(), InsertCertificateError> { - let l1_batch_number = cert.message.number.0 as i64; - + ) -> anyhow::Result<()> { let res = sqlx::query!( r#" INSERT INTO @@ -424,18 +418,17 @@ impl ConsensusDal<'_, '_> { ($1, $2, NOW(), NOW()) ON CONFLICT (l1_batch_number) DO NOTHING "#, - l1_batch_number, + i64::try_from(cert.message.number.0).context("overflow")?, + // Unwrap is ok, because serialization should always succeed. zksync_protobuf::serde::serialize(cert, serde_json::value::Serializer).unwrap(), ) .instrument("insert_batch_certificate") .report_latency() .execute(self.storage) .await?; - if res.rows_affected().is_zero() { - tracing::debug!(l1_batch_number, "duplicate batch certificate"); + tracing::debug!(l1_batch_number = ?cert.message.number, "duplicate batch certificate"); } - Ok(()) } @@ -443,7 +436,7 @@ impl ConsensusDal<'_, '_> { /// depending on the order in which votes have been collected over gossip by consensus. pub async fn get_last_batch_certificate_number( &mut self, - ) -> DalResult> { + ) -> anyhow::Result> { let row = sqlx::query!( r#" SELECT @@ -457,9 +450,46 @@ impl ConsensusDal<'_, '_> { .fetch_one(self.storage) .await?; - Ok(row - .number - .map(|number| attester::BatchNumber(number as u64))) + let Some(n) = row.number else { + return Ok(None); + }; + Ok(Some(attester::BatchNumber( + n.try_into().context("overflow")?, + ))) + } + + /// Next batch that the attesters should vote for. + /// This is a main node only query. + /// ENs should call the attestation_status RPC of the main node. + pub async fn next_batch_to_attest(&mut self) -> anyhow::Result { + // First batch that we don't have a certificate for. + if let Some(last) = self + .get_last_batch_certificate_number() + .await + .context("get_last_batch_certificate_number()")? + { + return Ok(last + 1); + } + // Otherwise start with the last sealed L1 batch. + // We don't want to backfill certificates for old batches. + // Note that there is a race condition in case the next + // batch is sealed before the certificate for the current + // last sealed batch is stored. This is only relevant + // for the first certificate though and anyway this is + // a test setup, so we are OK with that race condition. + if let Some(sealed) = self + .storage + .blocks_dal() + .get_sealed_l1_batch_number() + .await + .context("get_sealed_l1_batch_number()")? + { + return Ok(attester::BatchNumber(sealed.0.into())); + } + // Otherwise start with 0. + // Note that main node doesn't start from snapshot + // and doesn't have prunning enabled. + Ok(attester::BatchNumber(0)) } } diff --git a/core/lib/types/src/api/en.rs b/core/lib/types/src/api/en.rs index 7232071bd44b..75de25ad80b2 100644 --- a/core/lib/types/src/api/en.rs +++ b/core/lib/types/src/api/en.rs @@ -46,3 +46,10 @@ pub struct SyncBlock { #[derive(Debug, Clone, Serialize, Deserialize)] pub struct ConsensusGenesis(pub serde_json::Value); + +/// AttestationStatus maintained by the main node. +/// Used for testing L1 batch signing by consensus attesters. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct AttestationStatus { + pub next_batch_to_attest: L1BatchNumber, +} diff --git a/core/lib/web3_decl/src/namespaces/en.rs b/core/lib/web3_decl/src/namespaces/en.rs index 3bd55ecf936f..0a4c8acb4c60 100644 --- a/core/lib/web3_decl/src/namespaces/en.rs +++ b/core/lib/web3_decl/src/namespaces/en.rs @@ -35,6 +35,13 @@ pub trait EnNamespace { #[method(name = "genesisConfig")] async fn genesis_config(&self) -> RpcResult; + /// MAIN NODE ONLY: + /// Gets the AttestationStatus of L1 batches. + /// This is a temporary RPC used for testing L1 batch signing + /// by consensus attesters. + #[method(name = "attestationStatus")] + async fn attestation_status(&self) -> RpcResult; + /// Get tokens that are white-listed and it can be used by paymasters. #[method(name = "whitelistedTokensForAA")] async fn whitelisted_tokens_for_aa(&self) -> RpcResult>; diff --git a/core/node/api_server/Cargo.toml b/core/node/api_server/Cargo.toml index 2a09ce5d176c..f7d40210b485 100644 --- a/core/node/api_server/Cargo.toml +++ b/core/node/api_server/Cargo.toml @@ -12,6 +12,7 @@ categories.workspace = true [dependencies] zksync_config.workspace = true +zksync_consensus_roles.workspace = true zksync_contracts.workspace = true zksync_types.workspace = true zksync_dal.workspace = true diff --git a/core/node/api_server/src/web3/backend_jsonrpsee/namespaces/en.rs b/core/node/api_server/src/web3/backend_jsonrpsee/namespaces/en.rs index ef5c6ee40dd9..625d774465e5 100644 --- a/core/node/api_server/src/web3/backend_jsonrpsee/namespaces/en.rs +++ b/core/node/api_server/src/web3/backend_jsonrpsee/namespaces/en.rs @@ -25,6 +25,12 @@ impl EnNamespaceServer for EnNamespace { .map_err(|err| self.current_method().map_err(err)) } + async fn attestation_status(&self) -> RpcResult { + self.attestation_status_impl() + .await + .map_err(|err| self.current_method().map_err(err)) + } + async fn sync_tokens(&self, block_number: Option) -> RpcResult> { self.sync_tokens_impl(block_number) .await diff --git a/core/node/api_server/src/web3/namespaces/en.rs b/core/node/api_server/src/web3/namespaces/en.rs index 2661d9d81bf7..5f635b527b9d 100644 --- a/core/node/api_server/src/web3/namespaces/en.rs +++ b/core/node/api_server/src/web3/namespaces/en.rs @@ -1,5 +1,6 @@ use anyhow::Context as _; use zksync_config::{configs::EcosystemContracts, GenesisConfig}; +use zksync_consensus_roles::attester; use zksync_dal::{CoreDal, DalError}; use zksync_types::{ api::en, protocol_version::ProtocolSemanticVersion, tokens::TokenInfo, Address, L1BatchNumber, @@ -16,14 +17,20 @@ pub(crate) struct EnNamespace { state: RpcState, } +fn to_l1_batch_number(n: attester::BatchNumber) -> anyhow::Result { + Ok(L1BatchNumber( + n.0.try_into().context("L1BatchNumber overflow")?, + )) +} + impl EnNamespace { pub fn new(state: RpcState) -> Self { Self { state } } pub async fn consensus_genesis_impl(&self) -> Result, Web3Error> { - let mut storage = self.state.acquire_connection().await?; - let Some(genesis) = storage + let mut conn = self.state.acquire_connection().await?; + let Some(genesis) = conn .consensus_dal() .genesis() .await @@ -36,6 +43,21 @@ impl EnNamespace { ))) } + #[tracing::instrument(skip(self))] + pub async fn attestation_status_impl(&self) -> Result { + Ok(en::AttestationStatus { + next_batch_to_attest: to_l1_batch_number( + self.state + .acquire_connection() + .await? + .consensus_dal() + .next_batch_to_attest() + .await + .context("next_batch_to_attest()")?, + )?, + }) + } + pub(crate) fn current_method(&self) -> &MethodTracer { &self.state.current_method } diff --git a/core/node/consensus/src/storage/connection.rs b/core/node/consensus/src/storage/connection.rs index 7bff2c4bcf0e..6bae0a490508 100644 --- a/core/node/consensus/src/storage/connection.rs +++ b/core/node/consensus/src/storage/connection.rs @@ -1,8 +1,9 @@ use anyhow::Context as _; use zksync_concurrency::{ctx, error::Wrap as _, time}; +use zksync_consensus_crypto::keccak256::Keccak256; use zksync_consensus_roles::{attester, validator}; use zksync_consensus_storage::{self as storage, BatchStoreState}; -use zksync_dal::{consensus_dal::Payload, Core, CoreDal, DalError}; +use zksync_dal::{consensus_dal, consensus_dal::Payload, Core, CoreDal, DalError}; use zksync_l1_contract_interface::i_executor::structures::StoredBatchInfo; use zksync_node_sync::{fetcher::IoCursorExt as _, ActionQueueSender, SyncState}; use zksync_state_keeper::io::common::IoCursor; @@ -115,35 +116,26 @@ impl<'a> Connection<'a> { .await??) } - /// Wrapper for `consensus_dal().insert_batch_certificate()`. + /// Wrapper for `consensus_dal().insert_batch_certificate()`, + /// which additionally verifies that the batch hash matches the stored batch. pub async fn insert_batch_certificate( &mut self, ctx: &ctx::Ctx, cert: &attester::BatchQC, ) -> Result<(), InsertCertificateError> { - use crate::storage::consensus_dal::InsertCertificateError as E; - - let l1_batch_number = L1BatchNumber(cert.message.number.0 as u32); - - let Some(l1_batch) = self - .0 - .blocks_dal() - .get_l1_batch_metadata(l1_batch_number) + use consensus_dal::InsertCertificateError as E; + let want_hash = self + .batch_hash(ctx, cert.message.number) .await - .map_err(E::Dal)? - else { - return Err(E::MissingPayload.into()); - }; - - let l1_batch_info = StoredBatchInfo::from(&l1_batch); - - if l1_batch_info.hash().0 != *cert.message.hash.0.as_bytes() { + .wrap("batch_hash()")? + .ok_or(E::MissingPayload)?; + if want_hash != cert.message.hash { return Err(E::PayloadMismatch.into()); } - Ok(ctx .wait(self.0.consensus_dal().insert_batch_certificate(cert)) - .await??) + .await? + .map_err(E::Other)?) } /// Wrapper for `consensus_dal().replica_state()`. @@ -166,6 +158,25 @@ impl<'a> Connection<'a> { .context("sqlx")?) } + /// Wrapper for `consensus_dal().batch_hash()`. + pub async fn batch_hash( + &mut self, + ctx: &ctx::Ctx, + number: attester::BatchNumber, + ) -> ctx::Result> { + let n = L1BatchNumber(number.0.try_into().context("overflow")?); + let Some(meta) = ctx + .wait(self.0.blocks_dal().get_l1_batch_metadata(n)) + .await? + .context("get_l1_batch_metadata()")? + else { + return Ok(None); + }; + Ok(Some(attester::BatchHash(Keccak256::from_bytes( + StoredBatchInfo::from(&meta).hash().0, + )))) + } + /// Wrapper for `blocks_dal().get_l1_batch_metadata()`. pub async fn batch( &mut self, diff --git a/core/node/consensus/src/storage/mod.rs b/core/node/consensus/src/storage/mod.rs index 6660f75332bc..424002603c7c 100644 --- a/core/node/consensus/src/storage/mod.rs +++ b/core/node/consensus/src/storage/mod.rs @@ -27,6 +27,15 @@ pub enum InsertCertificateError { Inner(#[from] consensus_dal::InsertCertificateError), } +impl From for InsertCertificateError { + fn from(err: ctx::Error) -> Self { + match err { + ctx::Error::Canceled(err) => Self::Canceled(err), + ctx::Error::Internal(err) => Self::Inner(err.into()), + } + } +} + #[derive(Debug)] pub(crate) struct PayloadQueue { inner: IoCursor, diff --git a/core/node/consensus/src/storage/store.rs b/core/node/consensus/src/storage/store.rs index b1dc3e0b60c2..ce73c946a029 100644 --- a/core/node/consensus/src/storage/store.rs +++ b/core/node/consensus/src/storage/store.rs @@ -3,13 +3,11 @@ use std::sync::Arc; use anyhow::Context as _; use zksync_concurrency::{ctx, error::Wrap as _, scope, sync, time}; use zksync_consensus_bft::PayloadManager; -use zksync_consensus_crypto::keccak256::Keccak256; use zksync_consensus_roles::{attester, validator}; use zksync_consensus_storage::{self as storage, BatchStoreState}; use zksync_dal::consensus_dal::{self, Payload}; -use zksync_l1_contract_interface::i_executor::structures::StoredBatchInfo; use zksync_node_sync::fetcher::{FetchedBlock, FetchedTransaction}; -use zksync_types::{L1BatchNumber, L2BlockNumber}; +use zksync_types::L2BlockNumber; use super::{Connection, PayloadQueue}; use crate::storage::{ConnectionPool, InsertCertificateError}; @@ -526,26 +524,16 @@ impl storage::PersistentBatchStore for Store { ctx: &ctx::Ctx, number: attester::BatchNumber, ) -> ctx::Result> { - let Some(batch) = self + let Some(hash) = self .conn(ctx) .await? - .batch( - ctx, - L1BatchNumber(u32::try_from(number.0).context("number")?), - ) + .batch_hash(ctx, number) .await - .wrap("batch")? + .wrap("batch_hash()")? else { return Ok(None); }; - - let info = StoredBatchInfo::from(&batch); - let hash = Keccak256::from_bytes(info.hash().0); - - Ok(Some(attester::Batch { - number, - hash: attester::BatchHash(hash), - })) + Ok(Some(attester::Batch { number, hash })) } /// Returns the QC of the batch with the given number. diff --git a/core/node/consensus/src/tests.rs b/core/node/consensus/src/tests.rs index 7d269376b65c..9890165ad81f 100644 --- a/core/node/consensus/src/tests.rs +++ b/core/node/consensus/src/tests.rs @@ -6,10 +6,11 @@ use zksync_config::configs::consensus::{ValidatorPublicKey, WeightedValidator}; use zksync_consensus_crypto::TextFmt as _; use zksync_consensus_network::testonly::{new_configs, new_fullnode}; use zksync_consensus_roles::{ - validator, + attester, validator, validator::testonly::{Setup, SetupSpec}, }; use zksync_consensus_storage::BlockStore; +use zksync_node_sync::MainNodeClient; use zksync_types::{L1BatchNumber, ProtocolVersionId}; use crate::{ @@ -669,6 +670,58 @@ async fn test_centralized_fetcher(from_snapshot: bool, version: ProtocolVersionI .unwrap(); } +#[test_casing(2, VERSIONS)] +#[tokio::test] +async fn test_attestation_status_api(version: ProtocolVersionId) { + zksync_concurrency::testonly::abort_on_panic(); + let ctx = &ctx::test_root(&ctx::RealClock); + scope::run!(ctx, |ctx, s| async { + let validator_pool = ConnectionPool::test(false, version).await; + let (mut validator, runner) = + testonly::StateKeeper::new(ctx, validator_pool.clone()).await?; + s.spawn_bg(runner.run(ctx).instrument(tracing::info_span!("validator"))); + + // API server needs at least 1 L1 batch to start. + validator.seal_batch().await; + let api = validator.connect(ctx).await?; + + // If the main node has no L1 batch certificates, + // the first one to sign should be `last_sealed_batch`. + validator_pool + .wait_for_batch(ctx, validator.last_sealed_batch()) + .await?; + let status = api.fetch_attestation_status().await?; + assert_eq!(status.next_batch_to_attest, validator.last_sealed_batch()); + + // Insert a cert, then check again. + validator_pool + .wait_for_batch(ctx, status.next_batch_to_attest) + .await?; + { + let mut conn = validator_pool.connection(ctx).await?; + let number = attester::BatchNumber(status.next_batch_to_attest.0.into()); + let hash = conn.batch_hash(ctx, number).await?.unwrap(); + let cert = attester::BatchQC { + signatures: attester::MultiSig::default(), + message: attester::Batch { number, hash }, + }; + conn.insert_batch_certificate(ctx, &cert) + .await + .context("insert_batch_certificate()")?; + } + let want = status.next_batch_to_attest + 1; + let got = api + .fetch_attestation_status() + .await + .context("fetch_attestation_status()")?; + assert_eq!(want, got.next_batch_to_attest); + + Ok(()) + }) + .await + .unwrap(); +} + /// Tests that generated L1 batch witnesses can be verified successfully. /// TODO: add tests for verification failures. #[test_casing(2, VERSIONS)] diff --git a/core/node/node_sync/src/client.rs b/core/node/node_sync/src/client.rs index 3d71d86f163b..c4aaa383bb0c 100644 --- a/core/node/node_sync/src/client.rs +++ b/core/node/node_sync/src/client.rs @@ -45,6 +45,8 @@ pub trait MainNodeClient: 'static + Send + Sync + fmt::Debug { async fn fetch_consensus_genesis(&self) -> EnrichedClientResult>; async fn fetch_genesis_config(&self) -> EnrichedClientResult; + + async fn fetch_attestation_status(&self) -> EnrichedClientResult; } #[async_trait] @@ -136,6 +138,12 @@ impl MainNodeClient for Box> { .rpc_context("consensus_genesis") .await } + + async fn fetch_attestation_status(&self) -> EnrichedClientResult { + self.attestation_status() + .rpc_context("attestation_status") + .await + } } /// Main node health check. diff --git a/core/node/node_sync/src/testonly.rs b/core/node/node_sync/src/testonly.rs index 502da17ffd98..677f548c6281 100644 --- a/core/node/node_sync/src/testonly.rs +++ b/core/node/node_sync/src/testonly.rs @@ -77,6 +77,10 @@ impl MainNodeClient for MockMainNodeClient { unimplemented!() } + async fn fetch_attestation_status(&self) -> EnrichedClientResult { + unimplemented!() + } + async fn fetch_genesis_config(&self) -> EnrichedClientResult { Ok(mock_genesis_config()) } From 97cc2e28fb39a1444fd728e17be2310c63158502 Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Fri, 26 Jul 2024 13:04:00 +0400 Subject: [PATCH 44/52] docs(prover): Improve default configuration for prover binaries (#2505) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ Changes the default prometheus ports for prover binaries so that there is no need to override them when running together. Updates the docs accordingly. Also mentions that you can run WVG with multiple threads. ## Why ❔ Convenience. ## Checklist - [ ] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [ ] Tests for the changes have been added / updated. - [ ] Documentation comments have been added / updated. - [ ] Code has been formatted via `zk fmt` and `zk lint`. --- etc/env/base/fri_witness_generator.toml | 1 + etc/env/base/fri_witness_vector_generator.toml | 2 +- prover/docs/03_launch.md | 8 ++++---- 3 files changed, 6 insertions(+), 5 deletions(-) diff --git a/etc/env/base/fri_witness_generator.toml b/etc/env/base/fri_witness_generator.toml index 76f67dca8425..96ff73868262 100644 --- a/etc/env/base/fri_witness_generator.toml +++ b/etc/env/base/fri_witness_generator.toml @@ -7,3 +7,4 @@ recursion_tip_generation_timeout_in_secs = 900 scheduler_generation_timeout_in_secs = 900 max_attempts = 10 shall_save_to_public_bucket = true +prometheus_listener_port = 3116 diff --git a/etc/env/base/fri_witness_vector_generator.toml b/etc/env/base/fri_witness_vector_generator.toml index 1e8837965bb1..ad925100cef5 100644 --- a/etc/env/base/fri_witness_vector_generator.toml +++ b/etc/env/base/fri_witness_vector_generator.toml @@ -1,7 +1,7 @@ [fri_witness_vector_generator] prover_instance_wait_timeout_in_secs=200 prover_instance_poll_time_in_milli_secs=250 -prometheus_listener_port=3314 +prometheus_listener_port=3420 prometheus_pushgateway_url="http://127.0.0.1:9091" prometheus_push_interval_ms=100 specialized_group_id=100 diff --git a/prover/docs/03_launch.md b/prover/docs/03_launch.md index 2c5809e994e5..395a35e90227 100644 --- a/prover/docs/03_launch.md +++ b/prover/docs/03_launch.md @@ -62,7 +62,7 @@ until it happens, witness generators will panic and won't be able to start. Once a job is created, start witness generators: ``` -API_PROMETHEUS_LISTENER_PORT=3116 zk f cargo run --release --bin zksync_witness_generator -- --all_rounds +zk f cargo run --release --bin zksync_witness_generator -- --all_rounds ``` `--all_rounds` means that witness generator will produce witnesses of all kinds. You can run a witness generator for @@ -71,11 +71,11 @@ each round separately, but it's mostly useful in production environments. ### Witness vector generator ``` -FRI_WITNESS_VECTOR_GENERATOR_PROMETHEUS_LISTENER_PORT=3420 zk f cargo run --release --bin zksync_witness_vector_generator +zk f cargo run --release --bin zksync_witness_vector_generator -- --threads 10 ``` -WVG prepares inputs for prover, and it's a single-threaded time-consuming operation. You may run several instances (make -sure to use different ports). The exact amount of WVGs needed to "feed" one prover depends on CPU/GPU specs, but a +WVG prepares inputs for prover, and it's a single-threaded time-consuming operation. You may run several jobs by +changing the `threads` parameter. The exact amount of WVGs needed to "feed" one prover depends on CPU/GPU specs, but a ballpark estimate (useful for local development) is 10 WVGs per prover. ### Prover From 4222d135b62eb4de103c4aebb35e9c302d94ad63 Mon Sep 17 00:00:00 2001 From: Danil Date: Fri, 26 Jul 2024 11:24:24 +0200 Subject: [PATCH 45/52] feat(zk_toolbox): use configs from the main repo (#2470) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ Use general, genesis, and secrets configs from the main repository and not the copypastes. ## Why ❔ It allows us easily maintain the upcoming changes ## Checklist - [ ] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [ ] Tests for the changes have been added / updated. - [ ] Documentation comments have been added / updated. - [ ] Code has been formatted via `zk fmt` and `zk lint`. --------- Signed-off-by: Danil --- Cargo.lock | 12 +- Cargo.toml | 2 +- core/lib/basic_types/src/commitment.rs | 3 +- core/lib/config/src/configs/general.rs | 2 +- core/lib/config/src/configs/secrets.rs | 8 +- core/lib/protobuf_config/src/lib.rs | 15 +- core/lib/protobuf_config/src/secrets.rs | 5 +- prover/Cargo.lock | 12 +- prover/Cargo.toml | 2 +- zk_toolbox/Cargo.lock | 48 ++-- zk_toolbox/Cargo.toml | 9 +- zk_toolbox/crates/common/Cargo.toml | 4 +- zk_toolbox/crates/common/src/cmd.rs | 18 +- zk_toolbox/crates/common/src/db.rs | 2 +- zk_toolbox/crates/common/src/ethereum.rs | 4 +- zk_toolbox/crates/common/src/forge.rs | 18 +- zk_toolbox/crates/config/Cargo.toml | 6 +- zk_toolbox/crates/config/src/chain.rs | 54 ++--- zk_toolbox/crates/config/src/consts.rs | 4 +- zk_toolbox/crates/config/src/contracts.rs | 6 +- zk_toolbox/crates/config/src/ecosystem.rs | 17 +- zk_toolbox/crates/config/src/external_node.rs | 37 ++-- .../forge_interface/accept_ownership/mod.rs | 4 +- .../forge_interface/deploy_ecosystem/input.rs | 31 +-- .../deploy_ecosystem/output.rs | 6 +- .../initialize_bridges/input.rs | 12 +- .../initialize_bridges/output.rs | 4 +- .../src/forge_interface/paymaster/mod.rs | 11 +- .../forge_interface/register_chain/input.rs | 12 +- .../forge_interface/register_chain/output.rs | 4 +- zk_toolbox/crates/config/src/general.rs | 205 ++++++------------ zk_toolbox/crates/config/src/genesis.rs | 54 ++--- zk_toolbox/crates/config/src/lib.rs | 1 + zk_toolbox/crates/config/src/secrets.rs | 76 ++++--- zk_toolbox/crates/config/src/traits.rs | 61 ++++-- zk_toolbox/crates/config/src/wallets.rs | 6 +- zk_toolbox/crates/types/Cargo.toml | 2 +- zk_toolbox/crates/types/src/chain_id.rs | 18 -- .../l1_batch_commit_data_generator_mode.rs | 22 -- zk_toolbox/crates/types/src/l1_network.rs | 6 +- zk_toolbox/crates/types/src/lib.rs | 9 +- .../crates/types/src/protocol_version.rs | 87 -------- zk_toolbox/crates/types/src/prover_mode.rs | 14 +- .../crates/types/src/wallet_creation.rs | 4 +- zk_toolbox/crates/zk_inception/Cargo.toml | 2 +- .../src/commands/chain/args/create.rs | 49 +++-- .../zk_inception/src/commands/chain/create.rs | 4 +- .../src/commands/chain/deploy_paymaster.rs | 13 +- .../src/commands/chain/genesis.rs | 15 +- .../zk_inception/src/commands/chain/init.rs | 7 +- .../src/commands/chain/initialize_bridges.rs | 14 +- .../src/commands/ecosystem/args/create.rs | 7 +- .../src/commands/external_node/init.rs | 14 +- .../commands/external_node/prepare_configs.rs | 49 +++-- .../src/commands/prover/args/init.rs | 5 +- .../src/commands/prover/args/run.rs | 6 +- .../zk_inception/src/commands/prover/init.rs | 4 +- .../crates/zk_inception/src/messages.rs | 2 + zk_toolbox/crates/zk_supervisor/Cargo.toml | 1 - .../commands/database/args/new_migration.rs | 3 +- .../src/commands/database/drop.rs | 2 +- zk_toolbox/crates/zk_supervisor/src/dals.rs | 16 +- .../crates/zk_supervisor/src/messages.rs | 5 +- 63 files changed, 531 insertions(+), 624 deletions(-) delete mode 100644 zk_toolbox/crates/types/src/chain_id.rs delete mode 100644 zk_toolbox/crates/types/src/l1_batch_commit_data_generator_mode.rs delete mode 100644 zk_toolbox/crates/types/src/protocol_version.rs diff --git a/Cargo.lock b/Cargo.lock index 5d2fa02b3d0f..0503253b526c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6339,24 +6339,24 @@ dependencies = [ [[package]] name = "strum" -version = "0.24.1" +version = "0.26.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "063e6045c0e62079840579a7e47a355ae92f60eb74daaf156fb1e84ba164e63f" +checksum = "8fec0f0aef304996cf250b31b5a10dee7980c85da9d759361292b8bca5a18f06" dependencies = [ "strum_macros", ] [[package]] name = "strum_macros" -version = "0.24.3" +version = "0.26.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e385be0d24f186b4ce2f9982191e7101bb737312ad61c1f2f984f34bcf85d59" +checksum = "4c6bee85a5a24955dc440386795aa378cd9cf82acd5f764469152d2270e581be" dependencies = [ - "heck 0.4.1", + "heck 0.5.0", "proc-macro2 1.0.69", "quote 1.0.33", "rustversion", - "syn 1.0.109", + "syn 2.0.38", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index 49b2cc50c567..e731bf6533b0 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -167,7 +167,7 @@ sha3 = "0.10.8" sqlx = "0.7.3" static_assertions = "1.1" structopt = "0.3.20" -strum = "0.24" +strum = "0.26" tempfile = "3.0.2" test-casing = "0.1.2" test-log = "0.2.15" diff --git a/core/lib/basic_types/src/commitment.rs b/core/lib/basic_types/src/commitment.rs index 873dff2313a2..eca339f40f42 100644 --- a/core/lib/basic_types/src/commitment.rs +++ b/core/lib/basic_types/src/commitment.rs @@ -1,4 +1,5 @@ use serde::{Deserialize, Serialize}; +use strum::{Display, EnumIter}; use crate::{ ethabi, @@ -6,7 +7,7 @@ use crate::{ U256, }; -#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Serialize, Deserialize)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Serialize, Deserialize, EnumIter, Display)] pub enum L1BatchCommitmentMode { #[default] Rollup, diff --git a/core/lib/config/src/configs/general.rs b/core/lib/config/src/configs/general.rs index 122d1e278553..5707b5c70492 100644 --- a/core/lib/config/src/configs/general.rs +++ b/core/lib/config/src/configs/general.rs @@ -18,7 +18,7 @@ use crate::{ SnapshotsCreatorConfig, }; -#[derive(Debug, PartialEq)] +#[derive(Debug, Clone, PartialEq)] pub struct GeneralConfig { pub postgres_config: Option, pub api_config: Option, diff --git a/core/lib/config/src/configs/secrets.rs b/core/lib/config/src/configs/secrets.rs index 02bb52e00718..71197f5d9306 100644 --- a/core/lib/config/src/configs/secrets.rs +++ b/core/lib/config/src/configs/secrets.rs @@ -30,9 +30,11 @@ impl DatabaseSecrets { /// Returns a copy of the replica database URL as a `Result` to simplify error propagation. pub fn replica_url(&self) -> anyhow::Result { - self.server_replica_url - .clone() - .context("Replica DB URL is absent") + if let Some(replica_url) = &self.server_replica_url { + Ok(replica_url.clone()) + } else { + self.master_url() + } } /// Returns a copy of the prover database URL as a `Result` to simplify error propagation. diff --git a/core/lib/protobuf_config/src/lib.rs b/core/lib/protobuf_config/src/lib.rs index 839f3e3cf8ca..e910b5760d31 100644 --- a/core/lib/protobuf_config/src/lib.rs +++ b/core/lib/protobuf_config/src/lib.rs @@ -40,7 +40,10 @@ mod wallets; use std::{path::PathBuf, str::FromStr}; use anyhow::Context; -use zksync_protobuf::{serde::serialize_proto, ProtoRepr}; +use zksync_protobuf::{ + build::{prost_reflect, prost_reflect::ReflectMessage, serde}, + ProtoRepr, +}; use zksync_types::{H160, H256}; fn parse_h256(bytes: &str) -> anyhow::Result { @@ -71,3 +74,13 @@ pub fn encode_yaml_repr(value: &T::Type) -> anyhow::Result serialize_proto(&T::build(value), &mut s)?; Ok(buffer) } + +fn serialize_proto( + x: &T, + s: S, +) -> Result { + let opts = prost_reflect::SerializeOptions::new() + .use_proto_field_name(true) + .stringify_64_bit_integers(false); + x.transcode_to_dynamic().serialize_with_options(s, &opts) +} diff --git a/core/lib/protobuf_config/src/secrets.rs b/core/lib/protobuf_config/src/secrets.rs index 43f537a5fbfa..3047c0093bb0 100644 --- a/core/lib/protobuf_config/src/secrets.rs +++ b/core/lib/protobuf_config/src/secrets.rs @@ -41,15 +41,12 @@ impl ProtoRepr for proto::DatabaseSecrets { .map(str::parse::) .transpose() .context("master_url")?; - let mut server_replica_url = self + let server_replica_url = self .server_replica_url .as_deref() .map(str::parse::) .transpose() .context("replica_url")?; - if server_replica_url.is_none() { - server_replica_url.clone_from(&server_url) - } let prover_url = self .prover_url .as_deref() diff --git a/prover/Cargo.lock b/prover/Cargo.lock index 0afe435859d6..d3023bba6dc9 100644 --- a/prover/Cargo.lock +++ b/prover/Cargo.lock @@ -6043,24 +6043,24 @@ dependencies = [ [[package]] name = "strum" -version = "0.24.1" +version = "0.26.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "063e6045c0e62079840579a7e47a355ae92f60eb74daaf156fb1e84ba164e63f" +checksum = "8fec0f0aef304996cf250b31b5a10dee7980c85da9d759361292b8bca5a18f06" dependencies = [ "strum_macros", ] [[package]] name = "strum_macros" -version = "0.24.3" +version = "0.26.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e385be0d24f186b4ce2f9982191e7101bb737312ad61c1f2f984f34bcf85d59" +checksum = "4c6bee85a5a24955dc440386795aa378cd9cf82acd5f764469152d2270e581be" dependencies = [ - "heck 0.4.1", + "heck 0.5.0", "proc-macro2 1.0.85", "quote 1.0.36", "rustversion", - "syn 1.0.109", + "syn 2.0.66", ] [[package]] diff --git a/prover/Cargo.toml b/prover/Cargo.toml index c06c0774639a..824ce4c4451a 100644 --- a/prover/Cargo.toml +++ b/prover/Cargo.toml @@ -47,7 +47,7 @@ serde_json = "1.0" sha3 = "0.10.8" sqlx = { version = "0.7.3", default-features = false } structopt = "0.3.26" -strum = { version = "0.24" } +strum = { version = "0.26" } tempfile = "3" tokio = "1" toml_edit = "0.14.4" diff --git a/zk_toolbox/Cargo.lock b/zk_toolbox/Cargo.lock index 375e35b060c2..0c6b1b0a0cbb 100644 --- a/zk_toolbox/Cargo.lock +++ b/zk_toolbox/Cargo.lock @@ -667,7 +667,7 @@ dependencies = [ "serde_json", "serde_yaml", "sqlx", - "strum_macros 0.26.4", + "strum", "thiserror", "tokio", "toml", @@ -692,13 +692,14 @@ dependencies = [ "rand", "serde", "serde_json", - "strum 0.26.3", - "strum_macros 0.26.4", + "strum", "thiserror", "types", "url", "xshell", + "zksync_basic_types", "zksync_config", + "zksync_protobuf", "zksync_protobuf_config", ] @@ -1403,7 +1404,7 @@ dependencies = [ "rlp", "serde", "serde_json", - "strum 0.26.3", + "strum", "syn 2.0.68", "tempfile", "thiserror", @@ -4928,35 +4929,13 @@ version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" -[[package]] -name = "strum" -version = "0.24.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "063e6045c0e62079840579a7e47a355ae92f60eb74daaf156fb1e84ba164e63f" -dependencies = [ - "strum_macros 0.24.3", -] - [[package]] name = "strum" version = "0.26.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8fec0f0aef304996cf250b31b5a10dee7980c85da9d759361292b8bca5a18f06" dependencies = [ - "strum_macros 0.26.4", -] - -[[package]] -name = "strum_macros" -version = "0.24.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e385be0d24f186b4ce2f9982191e7101bb737312ad61c1f2f984f34bcf85d59" -dependencies = [ - "heck 0.4.1", - "proc-macro2", - "quote", - "rustversion", - "syn 1.0.109", + "strum_macros", ] [[package]] @@ -5549,9 +5528,9 @@ dependencies = [ "clap", "ethers", "serde", - "strum 0.26.3", - "strum_macros 0.26.4", + "strum", "thiserror", + "zksync_basic_types", ] [[package]] @@ -6280,14 +6259,14 @@ dependencies = [ "serde_json", "serde_yaml", "slugify-rs", - "strum 0.26.3", - "strum_macros 0.26.4", + "strum", "thiserror", "tokio", "toml", "types", "url", "xshell", + "zksync_basic_types", "zksync_config", ] @@ -6301,8 +6280,7 @@ dependencies = [ "config", "human-panic", "serde", - "strum 0.26.3", - "strum_macros 0.26.4", + "strum", "tokio", "url", "xshell", @@ -6335,7 +6313,7 @@ dependencies = [ "serde", "serde_json", "serde_with", - "strum 0.24.1", + "strum", "thiserror", "tiny-keccak", "url", @@ -6510,7 +6488,7 @@ dependencies = [ "secp256k1", "serde", "serde_json", - "strum 0.24.1", + "strum", "thiserror", "tracing", "zksync_basic_types", diff --git a/zk_toolbox/Cargo.toml b/zk_toolbox/Cargo.toml index 138a8e3af129..0c5e2188c66a 100644 --- a/zk_toolbox/Cargo.toml +++ b/zk_toolbox/Cargo.toml @@ -25,12 +25,16 @@ keywords = ["zk", "cryptography", "blockchain", "ZKStack", "ZKsync"] common = { path = "crates/common" } config = { path = "crates/config" } types = { path = "crates/types" } + +# ZkSync deps zksync_config = { path = "../core/lib/config" } zksync_protobuf_config = { path = "../core/lib/protobuf_config" } +zksync_basic_types = { path = "../core/lib/basic_types" } +zksync_protobuf = "=0.1.0-rc.4" # External dependencies anyhow = "1.0.82" -clap = { version = "4.4", features = ["derive", "wrap_help"] } +clap = { version = "4.4", features = ["derive", "wrap_help", "string"] } slugify-rs = "0.0.3" cliclack = "0.2.5" console = "0.15.8" @@ -44,8 +48,7 @@ serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" serde_yaml = "0.9" sqlx = { version = "0.7.4", features = ["runtime-tokio", "migrate", "postgres"] } -strum = { version = "0.26.2", features = ["derive"] } -strum_macros = "0.26.2" +strum = { version = "0.26", features = ["derive"] } thiserror = "1.0.57" tokio = { version = "1.37", features = ["full"] } toml = "0.8.12" diff --git a/zk_toolbox/crates/common/Cargo.toml b/zk_toolbox/crates/common/Cargo.toml index 6b362905160f..3d9d406f6234 100644 --- a/zk_toolbox/crates/common/Cargo.toml +++ b/zk_toolbox/crates/common/Cargo.toml @@ -22,9 +22,9 @@ serde.workspace = true serde_json.workspace = true serde_yaml.workspace = true sqlx.workspace = true -strum_macros.workspace = true tokio.workspace = true toml.workspace = true url.workspace = true xshell.workspace = true -thiserror = "1.0.57" +thiserror.workspace = true +strum.workspace = true diff --git a/zk_toolbox/crates/common/src/cmd.rs b/zk_toolbox/crates/common/src/cmd.rs index ca0f285882a3..7bf0147b69c0 100644 --- a/zk_toolbox/crates/common/src/cmd.rs +++ b/zk_toolbox/crates/common/src/cmd.rs @@ -19,6 +19,8 @@ use crate::{ pub struct Cmd<'a> { inner: xshell::Cmd<'a>, force_run: bool, + // For resume functionality we must pipe the output, otherwise it only shows less information + piped_std_err: bool, } #[derive(thiserror::Error, Debug)] @@ -72,6 +74,7 @@ impl<'a> Cmd<'a> { Self { inner: cmd, force_run: false, + piped_std_err: false, } } @@ -81,6 +84,11 @@ impl<'a> Cmd<'a> { self } + pub fn with_piped_std_err(mut self) -> Self { + self.piped_std_err = true; + self + } + /// Set env variables for the command. pub fn env, V: AsRef>(mut self, key: K, value: V) -> Self { self.inner = self.inner.env(key, value); @@ -93,7 +101,7 @@ impl<'a> Cmd<'a> { let output = if global_config().verbose || self.force_run { logger::debug(format!("Running: {}", self.inner)); logger::new_empty_line(); - let output = run_low_level_process_command(self.inner.into())?; + let output = run_low_level_process_command(self.inner.into(), self.piped_std_err)?; if let Ok(data) = String::from_utf8(output.stderr.clone()) { if !data.is_empty() { logger::info(data) @@ -152,9 +160,13 @@ fn check_output_status(command_text: &str, output: &std::process::Output) -> Cmd Ok(()) } -fn run_low_level_process_command(mut command: Command) -> io::Result { +fn run_low_level_process_command(mut command: Command, piped_std_err: bool) -> io::Result { command.stdout(Stdio::inherit()); - command.stderr(Stdio::piped()); + if piped_std_err { + command.stderr(Stdio::piped()); + } else { + command.stderr(Stdio::inherit()); + } let child = command.spawn()?; child.wait_with_output() } diff --git a/zk_toolbox/crates/common/src/db.rs b/zk_toolbox/crates/common/src/db.rs index c0a681bc74c0..eda5471170dd 100644 --- a/zk_toolbox/crates/common/src/db.rs +++ b/zk_toolbox/crates/common/src/db.rs @@ -27,7 +27,7 @@ impl DatabaseConfig { } /// Create a new `Db` instance from a URL. - pub fn from_url(url: Url) -> anyhow::Result { + pub fn from_url(url: &Url) -> anyhow::Result { let name = url .path_segments() .ok_or(anyhow!("Failed to parse database name from URL"))? diff --git a/zk_toolbox/crates/common/src/ethereum.rs b/zk_toolbox/crates/common/src/ethereum.rs index 451bc311145f..c035d588370d 100644 --- a/zk_toolbox/crates/common/src/ethereum.rs +++ b/zk_toolbox/crates/common/src/ethereum.rs @@ -13,7 +13,7 @@ use crate::wallets::Wallet; pub fn create_ethers_client( private_key: H256, l1_rpc: String, - chain_id: Option, + chain_id: Option, ) -> anyhow::Result, ethers::prelude::Wallet>> { let mut wallet = LocalWallet::from_bytes(private_key.as_bytes())?; if let Some(chain_id) = chain_id { @@ -27,7 +27,7 @@ pub async fn distribute_eth( main_wallet: Wallet, addresses: Vec
, l1_rpc: String, - chain_id: u32, + chain_id: u64, amount: u128, ) -> anyhow::Result<()> { let client = create_ethers_client(main_wallet.private_key.unwrap(), l1_rpc, Some(chain_id))?; diff --git a/zk_toolbox/crates/common/src/forge.rs b/zk_toolbox/crates/common/src/forge.rs index de91c0e72500..f00921a0bf20 100644 --- a/zk_toolbox/crates/common/src/forge.rs +++ b/zk_toolbox/crates/common/src/forge.rs @@ -12,7 +12,7 @@ use ethers::{ utils::{hex, hex::ToHex}, }; use serde::{Deserialize, Serialize}; -use strum_macros::Display; +use strum::Display; use xshell::{cmd, Shell}; use crate::{ @@ -62,16 +62,24 @@ impl ForgeScript { if self.args.resume { let mut args = args_no_resume.clone(); args.push(ForgeScriptArg::Resume.to_string()); - let res = Cmd::new(cmd!(shell, "forge script {script_path} --legacy {args...}")).run(); + let res = Cmd::new(cmd!(shell, "forge script {script_path} --legacy {args...}")) + .with_piped_std_err() + .run(); if !res.resume_not_successful_because_has_not_began() { return Ok(res?); } } - let res = Cmd::new(cmd!( + let mut cmd = Cmd::new(cmd!( shell, "forge script {script_path} --legacy {args_no_resume...}" - )) - .run(); + )); + + if self.args.resume { + cmd = cmd.with_piped_std_err(); + } + + let res = cmd.run(); + // We won't catch this error if resume is not set. if res.proposal_error() { return Ok(()); } diff --git a/zk_toolbox/crates/config/Cargo.toml b/zk_toolbox/crates/config/Cargo.toml index 32cce24b3152..57fbc778334e 100644 --- a/zk_toolbox/crates/config/Cargo.toml +++ b/zk_toolbox/crates/config/Cargo.toml @@ -19,10 +19,12 @@ rand.workspace = true serde.workspace = true serde_json.workspace = true strum.workspace = true -strum_macros.workspace = true thiserror.workspace = true types.workspace = true url.workspace = true xshell.workspace = true -zksync_config.workspace = true + zksync_protobuf_config.workspace = true +zksync_protobuf.workspace = true +zksync_config.workspace = true +zksync_basic_types.workspace = true diff --git a/zk_toolbox/crates/config/src/chain.rs b/zk_toolbox/crates/config/src/chain.rs index 01dc1cae6434..e8b6df00644d 100644 --- a/zk_toolbox/crates/config/src/chain.rs +++ b/zk_toolbox/crates/config/src/chain.rs @@ -4,20 +4,17 @@ use std::{ }; use serde::{Deserialize, Serialize, Serializer}; -use types::{ - BaseToken, ChainId, L1BatchCommitDataGeneratorMode, L1Network, ProverMode, WalletCreation, -}; +use types::{BaseToken, L1BatchCommitmentMode, L1Network, ProverMode, WalletCreation}; use xshell::Shell; -use zksync_config::configs::GeneralConfig as ZkSyncGeneralConfig; -use zksync_protobuf_config::{decode_yaml_repr, encode_yaml_repr}; +use zksync_basic_types::L2ChainId; use crate::{ - consts::{ - CONFIG_NAME, CONTRACTS_FILE, GENERAL_FILE, GENESIS_FILE, L1_CONTRACTS_FOUNDRY, - SECRETS_FILE, WALLETS_FILE, - }, + consts::{CONFIG_NAME, GENERAL_FILE, L1_CONTRACTS_FOUNDRY, SECRETS_FILE, WALLETS_FILE}, create_localhost_wallets, - traits::{FileConfigWithDefaultName, ReadConfig, SaveConfig, SaveConfigWithBasePath}, + traits::{ + FileConfigWithDefaultName, ReadConfig, ReadConfigWithBasePath, SaveConfig, + SaveConfigWithBasePath, ZkToolboxConfig, + }, ContractsConfig, GeneralConfig, GenesisConfig, SecretsConfig, WalletsConfig, }; @@ -29,12 +26,12 @@ pub struct ChainConfigInternal { // needs for local setups only pub id: u32, pub name: String, - pub chain_id: ChainId, + pub chain_id: L2ChainId, pub prover_version: ProverMode, pub configs: PathBuf, pub rocks_db_path: PathBuf, pub external_node_config_path: Option, - pub l1_batch_commit_data_generator_mode: L1BatchCommitDataGeneratorMode, + pub l1_batch_commit_data_generator_mode: L1BatchCommitmentMode, pub base_token: BaseToken, pub wallet_creation: WalletCreation, } @@ -45,14 +42,14 @@ pub struct ChainConfigInternal { pub struct ChainConfig { pub id: u32, pub name: String, - pub chain_id: ChainId, + pub chain_id: L2ChainId, pub prover_version: ProverMode, pub l1_network: L1Network, pub link_to_code: PathBuf, pub rocks_db_path: PathBuf, pub configs: PathBuf, pub external_node_config_path: Option, - pub l1_batch_commit_data_generator_mode: L1BatchCommitDataGeneratorMode, + pub l1_batch_commit_data_generator_mode: L1BatchCommitmentMode, pub base_token: BaseToken, pub wallet_creation: WalletCreation, pub shell: OnceCell, @@ -73,11 +70,11 @@ impl ChainConfig { } pub fn get_genesis_config(&self) -> anyhow::Result { - GenesisConfig::read(self.get_shell(), self.configs.join(GENESIS_FILE)) + GenesisConfig::read_with_base_path(self.get_shell(), &self.configs) } pub fn get_general_config(&self) -> anyhow::Result { - GeneralConfig::read(self.get_shell(), self.configs.join(GENERAL_FILE)) + GeneralConfig::read_with_base_path(self.get_shell(), &self.configs) } pub fn get_wallets_config(&self) -> anyhow::Result { @@ -93,11 +90,11 @@ impl ChainConfig { anyhow::bail!("Wallets configs has not been found"); } pub fn get_contracts_config(&self) -> anyhow::Result { - ContractsConfig::read(self.get_shell(), self.configs.join(CONTRACTS_FILE)) + ContractsConfig::read_with_base_path(self.get_shell(), &self.configs) } pub fn get_secrets_config(&self) -> anyhow::Result { - SecretsConfig::read(self.get_shell(), self.configs.join(SECRETS_FILE)) + SecretsConfig::read_with_base_path(self.get_shell(), &self.configs) } pub fn path_to_general_config(&self) -> PathBuf { @@ -108,23 +105,8 @@ impl ChainConfig { self.configs.join(SECRETS_FILE) } - pub fn get_zksync_general_config(&self) -> anyhow::Result { - decode_yaml_repr::( - &self.configs.join(GENERAL_FILE), - false, - ) - } - - pub fn save_zksync_general_config( - &self, - general_config: &ZkSyncGeneralConfig, - ) -> anyhow::Result<()> { - let path = self.configs.join(GENERAL_FILE); - let bytes = encode_yaml_repr::( - general_config, - )?; - self.get_shell().write_file(path, bytes)?; - Ok(()) + pub fn save_general_config(&self, general_config: &GeneralConfig) -> anyhow::Result<()> { + general_config.save_with_base_path(self.get_shell(), &self.configs) } pub fn path_to_foundry(&self) -> PathBuf { @@ -160,3 +142,5 @@ impl ChainConfig { impl FileConfigWithDefaultName for ChainConfigInternal { const FILE_NAME: &'static str = CONFIG_NAME; } + +impl ZkToolboxConfig for ChainConfigInternal {} diff --git a/zk_toolbox/crates/config/src/consts.rs b/zk_toolbox/crates/config/src/consts.rs index a00274fb35f3..fecb6e78c9a5 100644 --- a/zk_toolbox/crates/config/src/consts.rs +++ b/zk_toolbox/crates/config/src/consts.rs @@ -1,5 +1,3 @@ -use types::ChainId; - /// Name of the main configuration file pub(crate) const CONFIG_NAME: &str = "ZkStack.yaml"; /// Name of the wallets file @@ -35,7 +33,7 @@ pub(crate) const ECOSYSTEM_PATH: &str = "etc/ecosystem"; /// Path to l1 contracts foundry folder inside zksync-era pub(crate) const L1_CONTRACTS_FOUNDRY: &str = "contracts/l1-contracts"; -pub(crate) const ERA_CHAIN_ID: ChainId = ChainId(270); +pub(crate) const ERA_CHAIN_ID: u32 = 270; pub(crate) const TEST_CONFIG_PATH: &str = "etc/test_config/constant/eth.json"; pub(crate) const BASE_PATH: &str = "m/44'/60'/0'"; diff --git a/zk_toolbox/crates/config/src/contracts.rs b/zk_toolbox/crates/config/src/contracts.rs index a4c00a10a455..0a0b3c22ab5b 100644 --- a/zk_toolbox/crates/config/src/contracts.rs +++ b/zk_toolbox/crates/config/src/contracts.rs @@ -8,7 +8,7 @@ use crate::{ initialize_bridges::output::InitializeBridgeOutput, register_chain::output::RegisterChainOutput, }, - traits::{FileConfig, FileConfigWithDefaultName}, + traits::{FileConfigWithDefaultName, ZkToolboxConfig}, }; #[derive(Debug, Deserialize, Serialize, Clone, Default)] @@ -89,6 +89,8 @@ impl FileConfigWithDefaultName for ContractsConfig { const FILE_NAME: &'static str = CONTRACTS_FILE; } +impl ZkToolboxConfig for ContractsConfig {} + #[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Default)] pub struct EcosystemContracts { pub bridgehub_proxy_addr: Address, @@ -98,7 +100,7 @@ pub struct EcosystemContracts { pub diamond_cut_data: String, } -impl FileConfig for EcosystemContracts {} +impl ZkToolboxConfig for EcosystemContracts {} #[derive(Debug, Serialize, Deserialize, Clone, Default)] pub struct BridgesContracts { diff --git a/zk_toolbox/crates/config/src/ecosystem.rs b/zk_toolbox/crates/config/src/ecosystem.rs index a0121a2b25db..76d8a0c45b22 100644 --- a/zk_toolbox/crates/config/src/ecosystem.rs +++ b/zk_toolbox/crates/config/src/ecosystem.rs @@ -6,8 +6,9 @@ use std::{ use common::logger; use serde::{Deserialize, Serialize, Serializer}; use thiserror::Error; -use types::{ChainId, L1Network, ProverMode, WalletCreation}; +use types::{L1Network, ProverMode, WalletCreation}; use xshell::Shell; +use zksync_basic_types::L2ChainId; use crate::{ consts::{ @@ -17,7 +18,7 @@ use crate::{ }, create_localhost_wallets, forge_interface::deploy_ecosystem::input::{Erc20DeploymentConfig, InitialDeploymentConfig}, - traits::{FileConfigWithDefaultName, ReadConfig, SaveConfig}, + traits::{FileConfigWithDefaultName, ReadConfig, SaveConfig, ZkToolboxConfig}, ChainConfig, ChainConfigInternal, ContractsConfig, WalletsConfig, }; @@ -32,7 +33,7 @@ struct EcosystemConfigInternal { pub chains: PathBuf, pub config: PathBuf, pub default_chain: String, - pub era_chain_id: ChainId, + pub era_chain_id: L2ChainId, pub prover_version: ProverMode, pub wallet_creation: WalletCreation, } @@ -48,7 +49,7 @@ pub struct EcosystemConfig { pub chains: PathBuf, pub config: PathBuf, pub default_chain: String, - pub era_chain_id: ChainId, + pub era_chain_id: L2ChainId, pub prover_version: ProverMode, pub wallet_creation: WalletCreation, pub shell: OnceCell, @@ -90,6 +91,10 @@ impl FileConfigWithDefaultName for EcosystemConfig { const FILE_NAME: &'static str = CONFIG_NAME; } +impl ZkToolboxConfig for EcosystemConfigInternal {} + +impl ZkToolboxConfig for EcosystemConfig {} + impl EcosystemConfig { fn get_shell(&self) -> &Shell { self.shell.get().expect("Must be initialized") @@ -245,8 +250,8 @@ pub enum EcosystemConfigFromFileError { InvalidConfig { source: anyhow::Error }, } -pub fn get_default_era_chain_id() -> ChainId { - ERA_CHAIN_ID +pub fn get_default_era_chain_id() -> L2ChainId { + L2ChainId::from(ERA_CHAIN_ID) } // Find file in all parents repository and return necessary path or an empty error if nothing has been found diff --git a/zk_toolbox/crates/config/src/external_node.rs b/zk_toolbox/crates/config/src/external_node.rs index 87acb15e4d8c..a07ff5dc1400 100644 --- a/zk_toolbox/crates/config/src/external_node.rs +++ b/zk_toolbox/crates/config/src/external_node.rs @@ -1,23 +1,28 @@ -use std::num::NonZeroUsize; +use std::path::Path; -use serde::{Deserialize, Serialize}; -use types::{ChainId, L1BatchCommitDataGeneratorMode}; +use xshell::Shell; +pub use zksync_config::configs::en_config::ENConfig; +use zksync_protobuf_config::{decode_yaml_repr, encode_yaml_repr}; -use crate::{consts::EN_CONFIG_FILE, traits::FileConfigWithDefaultName}; +use crate::{ + consts::EN_CONFIG_FILE, + traits::{FileConfigWithDefaultName, ReadConfig, SaveConfig}, +}; -#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)] -pub struct ENConfig { - // Genesis - pub l2_chain_id: ChainId, - pub l1_chain_id: u32, - pub l1_batch_commit_data_generator_mode: L1BatchCommitDataGeneratorMode, +impl FileConfigWithDefaultName for ENConfig { + const FILE_NAME: &'static str = EN_CONFIG_FILE; +} - // Main node configuration - pub main_node_url: String, - #[serde(skip_serializing_if = "Option::is_none")] - pub main_node_rate_limit_rps: Option, +impl SaveConfig for ENConfig { + fn save(&self, shell: &Shell, path: impl AsRef) -> anyhow::Result<()> { + let bytes = encode_yaml_repr::(self)?; + Ok(shell.write_file(path, bytes)?) + } } -impl FileConfigWithDefaultName for ENConfig { - const FILE_NAME: &'static str = EN_CONFIG_FILE; +impl ReadConfig for ENConfig { + fn read(shell: &Shell, path: impl AsRef) -> anyhow::Result { + let path = shell.current_dir().join(path); + decode_yaml_repr::(&path, false) + } } diff --git a/zk_toolbox/crates/config/src/forge_interface/accept_ownership/mod.rs b/zk_toolbox/crates/config/src/forge_interface/accept_ownership/mod.rs index 58b5aa1f9d49..636cffc49f89 100644 --- a/zk_toolbox/crates/config/src/forge_interface/accept_ownership/mod.rs +++ b/zk_toolbox/crates/config/src/forge_interface/accept_ownership/mod.rs @@ -1,9 +1,9 @@ use ethers::types::Address; use serde::{Deserialize, Serialize}; -use crate::traits::FileConfig; +use crate::traits::ZkToolboxConfig; -impl FileConfig for AcceptOwnershipInput {} +impl ZkToolboxConfig for AcceptOwnershipInput {} #[derive(Debug, Deserialize, Serialize, Clone)] pub struct AcceptOwnershipInput { diff --git a/zk_toolbox/crates/config/src/forge_interface/deploy_ecosystem/input.rs b/zk_toolbox/crates/config/src/forge_interface/deploy_ecosystem/input.rs index e0ad2ac70cdc..0dc117ae8cda 100644 --- a/zk_toolbox/crates/config/src/forge_interface/deploy_ecosystem/input.rs +++ b/zk_toolbox/crates/config/src/forge_interface/deploy_ecosystem/input.rs @@ -6,11 +6,11 @@ use ethers::{ }; use rand::Rng; use serde::{Deserialize, Serialize}; -use types::ChainId; +use zksync_basic_types::L2ChainId; use crate::{ consts::INITIAL_DEPLOYMENT_FILE, - traits::{FileConfig, FileConfigWithDefaultName}, + traits::{FileConfigWithDefaultName, ZkToolboxConfig}, ContractsConfig, GenesisConfig, WalletsConfig, }; @@ -61,6 +61,8 @@ impl FileConfigWithDefaultName for InitialDeploymentConfig { const FILE_NAME: &'static str = INITIAL_DEPLOYMENT_FILE; } +impl ZkToolboxConfig for InitialDeploymentConfig {} + #[derive(Debug, Deserialize, Serialize, Clone)] pub struct Erc20DeploymentConfig { pub tokens: Vec, @@ -70,6 +72,8 @@ impl FileConfigWithDefaultName for Erc20DeploymentConfig { const FILE_NAME: &'static str = INITIAL_DEPLOYMENT_FILE; } +impl ZkToolboxConfig for Erc20DeploymentConfig {} + impl Default for Erc20DeploymentConfig { fn default() -> Self { Self { @@ -111,21 +115,21 @@ pub struct Erc20DeploymentTokensConfig { #[derive(Debug, Deserialize, Serialize, Clone)] pub struct DeployL1Config { - pub era_chain_id: ChainId, + pub era_chain_id: L2ChainId, pub owner_address: Address, pub testnet_verifier: bool, pub contracts: ContractsDeployL1Config, pub tokens: TokensDeployL1Config, } -impl FileConfig for DeployL1Config {} +impl ZkToolboxConfig for DeployL1Config {} impl DeployL1Config { pub fn new( genesis_config: &GenesisConfig, wallets_config: &WalletsConfig, initial_deployment_config: &InitialDeploymentConfig, - era_chain_id: ChainId, + era_chain_id: L2ChainId, testnet_verifier: bool, ) -> Self { Self { @@ -147,16 +151,17 @@ impl DeployL1Config { .diamond_init_max_pubdata_per_batch, diamond_init_minimal_l2_gas_price: initial_deployment_config .diamond_init_minimal_l2_gas_price, - bootloader_hash: genesis_config.bootloader_hash, - default_aa_hash: genesis_config.default_aa_hash, + bootloader_hash: genesis_config.bootloader_hash.unwrap(), + default_aa_hash: genesis_config.default_aa_hash.unwrap(), diamond_init_priority_tx_max_pubdata: initial_deployment_config .diamond_init_priority_tx_max_pubdata, diamond_init_pubdata_pricing_mode: initial_deployment_config .diamond_init_pubdata_pricing_mode, - genesis_batch_commitment: genesis_config.genesis_batch_commitment, - genesis_rollup_leaf_index: genesis_config.genesis_rollup_leaf_index, - genesis_root: genesis_config.genesis_root, - latest_protocol_version: genesis_config.genesis_protocol_semantic_version.pack(), + // These values are not optional in genesis config with file based configuration + genesis_batch_commitment: genesis_config.genesis_commitment.unwrap(), + genesis_rollup_leaf_index: genesis_config.rollup_last_leaf_index.unwrap(), + genesis_root: genesis_config.genesis_root_hash.unwrap(), + latest_protocol_version: genesis_config.protocol_version.unwrap().pack(), recursion_circuits_set_vks_hash: H256::zero(), recursion_leaf_level_vk_hash: H256::zero(), recursion_node_level_vk_hash: H256::zero(), @@ -181,7 +186,7 @@ pub struct ContractsDeployL1Config { pub create2_factory_addr: Option
, pub validator_timelock_execution_delay: u64, pub genesis_root: H256, - pub genesis_rollup_leaf_index: u32, + pub genesis_rollup_leaf_index: u64, pub genesis_batch_commitment: H256, pub latest_protocol_version: U256, pub recursion_node_level_vk_hash: H256, @@ -212,7 +217,7 @@ pub struct DeployErc20Config { pub additional_addresses_for_minting: Vec
, } -impl FileConfig for DeployErc20Config {} +impl ZkToolboxConfig for DeployErc20Config {} impl DeployErc20Config { pub fn new( diff --git a/zk_toolbox/crates/config/src/forge_interface/deploy_ecosystem/output.rs b/zk_toolbox/crates/config/src/forge_interface/deploy_ecosystem/output.rs index 874414ccc1a4..77f212114916 100644 --- a/zk_toolbox/crates/config/src/forge_interface/deploy_ecosystem/output.rs +++ b/zk_toolbox/crates/config/src/forge_interface/deploy_ecosystem/output.rs @@ -5,7 +5,7 @@ use serde::{Deserialize, Serialize}; use crate::{ consts::ERC20_CONFIGS_FILE, - traits::{FileConfig, FileConfigWithDefaultName}, + traits::{FileConfigWithDefaultName, ZkToolboxConfig}, }; #[derive(Debug, Deserialize, Serialize, Clone)] @@ -21,7 +21,7 @@ pub struct DeployL1Output { pub deployed_addresses: DeployL1DeployedAddressesOutput, } -impl FileConfig for DeployL1Output {} +impl ZkToolboxConfig for DeployL1Output {} #[derive(Debug, Deserialize, Serialize, Clone)] pub struct DeployL1ContractsConfigOutput { @@ -96,3 +96,5 @@ pub struct DeployErc20Output { impl FileConfigWithDefaultName for DeployErc20Output { const FILE_NAME: &'static str = ERC20_CONFIGS_FILE; } + +impl ZkToolboxConfig for DeployErc20Output {} diff --git a/zk_toolbox/crates/config/src/forge_interface/initialize_bridges/input.rs b/zk_toolbox/crates/config/src/forge_interface/initialize_bridges/input.rs index e884c0a3a39e..d06e36185607 100644 --- a/zk_toolbox/crates/config/src/forge_interface/initialize_bridges/input.rs +++ b/zk_toolbox/crates/config/src/forge_interface/initialize_bridges/input.rs @@ -1,15 +1,15 @@ use ethers::types::Address; use serde::{Deserialize, Serialize}; -use types::ChainId; +use zksync_basic_types::L2ChainId; -use crate::{traits::FileConfig, ChainConfig}; +use crate::{traits::ZkToolboxConfig, ChainConfig}; -impl FileConfig for InitializeBridgeInput {} +impl ZkToolboxConfig for InitializeBridgeInput {} #[derive(Debug, Clone, Serialize, Deserialize)] pub struct InitializeBridgeInput { - pub era_chain_id: ChainId, - pub chain_id: ChainId, + pub era_chain_id: L2ChainId, + pub chain_id: L2ChainId, pub l1_shared_bridge: Address, pub bridgehub: Address, pub governance: Address, @@ -17,7 +17,7 @@ pub struct InitializeBridgeInput { } impl InitializeBridgeInput { - pub fn new(chain_config: &ChainConfig, era_chain_id: ChainId) -> anyhow::Result { + pub fn new(chain_config: &ChainConfig, era_chain_id: L2ChainId) -> anyhow::Result { let contracts = chain_config.get_contracts_config()?; let wallets = chain_config.get_wallets_config()?; Ok(Self { diff --git a/zk_toolbox/crates/config/src/forge_interface/initialize_bridges/output.rs b/zk_toolbox/crates/config/src/forge_interface/initialize_bridges/output.rs index d03474a6a089..8da3707ed706 100644 --- a/zk_toolbox/crates/config/src/forge_interface/initialize_bridges/output.rs +++ b/zk_toolbox/crates/config/src/forge_interface/initialize_bridges/output.rs @@ -1,9 +1,9 @@ use ethers::types::Address; use serde::{Deserialize, Serialize}; -use crate::traits::FileConfig; +use crate::traits::ZkToolboxConfig; -impl FileConfig for InitializeBridgeOutput {} +impl ZkToolboxConfig for InitializeBridgeOutput {} #[derive(Debug, Clone, Serialize, Deserialize)] pub struct InitializeBridgeOutput { diff --git a/zk_toolbox/crates/config/src/forge_interface/paymaster/mod.rs b/zk_toolbox/crates/config/src/forge_interface/paymaster/mod.rs index e634f1eb3dab..9631fe743180 100644 --- a/zk_toolbox/crates/config/src/forge_interface/paymaster/mod.rs +++ b/zk_toolbox/crates/config/src/forge_interface/paymaster/mod.rs @@ -1,12 +1,12 @@ use ethers::types::Address; use serde::{Deserialize, Serialize}; -use types::ChainId; +use zksync_basic_types::L2ChainId; -use crate::{traits::FileConfig, ChainConfig}; +use crate::{traits::ZkToolboxConfig, ChainConfig}; #[derive(Debug, Serialize, Deserialize, Clone)] pub struct DeployPaymasterInput { - pub chain_id: ChainId, + pub chain_id: L2ChainId, pub l1_shared_bridge: Address, pub bridgehub: Address, } @@ -21,11 +21,12 @@ impl DeployPaymasterInput { }) } } -impl FileConfig for DeployPaymasterInput {} + +impl ZkToolboxConfig for DeployPaymasterInput {} #[derive(Debug, Serialize, Deserialize, Clone)] pub struct DeployPaymasterOutput { pub paymaster: Address, } -impl FileConfig for DeployPaymasterOutput {} +impl ZkToolboxConfig for DeployPaymasterOutput {} diff --git a/zk_toolbox/crates/config/src/forge_interface/register_chain/input.rs b/zk_toolbox/crates/config/src/forge_interface/register_chain/input.rs index 3849aa341e2b..29494ba5d8f5 100644 --- a/zk_toolbox/crates/config/src/forge_interface/register_chain/input.rs +++ b/zk_toolbox/crates/config/src/forge_interface/register_chain/input.rs @@ -1,9 +1,10 @@ use ethers::types::Address; use rand::Rng; use serde::{Deserialize, Serialize}; -use types::{ChainId, L1BatchCommitDataGeneratorMode}; +use types::L1BatchCommitmentMode; +use zksync_basic_types::L2ChainId; -use crate::{traits::FileConfig, ChainConfig, ContractsConfig}; +use crate::{traits::ZkToolboxConfig, ChainConfig, ContractsConfig}; #[derive(Debug, Deserialize, Serialize, Clone)] struct Bridgehub { @@ -14,6 +15,7 @@ struct Bridgehub { struct StateTransition { state_transition_proxy_addr: Address, } + #[derive(Debug, Deserialize, Serialize, Clone)] struct DeployedAddresses { state_transition: StateTransition, @@ -36,7 +38,7 @@ pub struct RegisterChainL1Config { #[derive(Debug, Deserialize, Serialize, Clone)] pub struct ChainL1Config { - pub chain_chain_id: ChainId, + pub chain_chain_id: L2ChainId, pub base_token_addr: Address, pub bridgehub_create_new_chain_salt: u64, pub validium_mode: bool, @@ -48,7 +50,7 @@ pub struct ChainL1Config { pub governance_min_delay: u64, } -impl FileConfig for RegisterChainL1Config {} +impl ZkToolboxConfig for RegisterChainL1Config {} impl RegisterChainL1Config { pub fn new(chain_config: &ChainConfig, contracts: &ContractsConfig) -> anyhow::Result { @@ -80,7 +82,7 @@ impl RegisterChainL1Config { // TODO verify bridgehub_create_new_chain_salt: rand::thread_rng().gen_range(0..=i64::MAX) as u64, validium_mode: chain_config.l1_batch_commit_data_generator_mode - == L1BatchCommitDataGeneratorMode::Validium, + == L1BatchCommitmentMode::Validium, validator_sender_operator_commit_eth: wallets_config.operator.address, validator_sender_operator_blobs_eth: wallets_config.blob_operator.address, }, diff --git a/zk_toolbox/crates/config/src/forge_interface/register_chain/output.rs b/zk_toolbox/crates/config/src/forge_interface/register_chain/output.rs index 2f39b76c3933..f9521b16328f 100644 --- a/zk_toolbox/crates/config/src/forge_interface/register_chain/output.rs +++ b/zk_toolbox/crates/config/src/forge_interface/register_chain/output.rs @@ -1,7 +1,7 @@ use ethers::types::Address; use serde::{Deserialize, Serialize}; -use crate::traits::FileConfig; +use crate::traits::ZkToolboxConfig; #[derive(Debug, Deserialize, Serialize, Clone)] pub struct RegisterChainOutput { @@ -10,4 +10,4 @@ pub struct RegisterChainOutput { pub chain_admin_addr: Address, } -impl FileConfig for RegisterChainOutput {} +impl ZkToolboxConfig for RegisterChainOutput {} diff --git a/zk_toolbox/crates/config/src/general.rs b/zk_toolbox/crates/config/src/general.rs index b97384f26f8c..091d18936616 100644 --- a/zk_toolbox/crates/config/src/general.rs +++ b/zk_toolbox/crates/config/src/general.rs @@ -1,57 +1,66 @@ -use std::path::PathBuf; +use std::path::{Path, PathBuf}; -use serde::{Deserialize, Serialize}; +use anyhow::Context; use url::Url; +use xshell::Shell; +pub use zksync_config::configs::GeneralConfig; +use zksync_protobuf_config::{decode_yaml_repr, encode_yaml_repr}; -use crate::{consts::GENERAL_FILE, traits::FileConfigWithDefaultName}; +use crate::{ + consts::GENERAL_FILE, + traits::{FileConfigWithDefaultName, ReadConfig, SaveConfig}, +}; pub struct RocksDbs { pub state_keeper: PathBuf, pub merkle_tree: PathBuf, } -#[derive(Debug, Deserialize, Serialize, Clone)] -pub struct GeneralConfig { - pub db: RocksDBConfig, - pub eth: EthConfig, - pub api: ApiConfig, - #[serde(flatten)] - pub other: serde_json::Value, +pub fn set_rocks_db_config(config: &mut GeneralConfig, rocks_dbs: RocksDbs) -> anyhow::Result<()> { + config + .db_config + .as_mut() + .context("DB config is not presented")? + .state_keeper_db_path = rocks_dbs.state_keeper.to_str().unwrap().to_string(); + config + .db_config + .as_mut() + .context("DB config is not presented")? + .merkle_tree + .path = rocks_dbs.merkle_tree.to_str().unwrap().to_string(); + Ok(()) } -impl GeneralConfig { - pub fn set_rocks_db_config(&mut self, rocks_dbs: RocksDbs) -> anyhow::Result<()> { - self.db.state_keeper_db_path = rocks_dbs.state_keeper; - self.db.merkle_tree.path = rocks_dbs.merkle_tree; - Ok(()) - } - - pub fn ports_config(&self) -> PortsConfig { - PortsConfig { - web3_json_rpc_http_port: self.api.web3_json_rpc.http_port, - web3_json_rpc_ws_port: self.api.web3_json_rpc.ws_port, - healthcheck_port: self.api.healthcheck.port, - merkle_tree_port: self.api.merkle_tree.port, - prometheus_listener_port: self.api.prometheus.listener_port, - } - } - - pub fn update_ports(&mut self, ports_config: &PortsConfig) -> anyhow::Result<()> { - self.api.web3_json_rpc.http_port = ports_config.web3_json_rpc_http_port; - update_port_in_url( - &mut self.api.web3_json_rpc.http_url, - ports_config.web3_json_rpc_http_port, - )?; - self.api.web3_json_rpc.ws_port = ports_config.web3_json_rpc_ws_port; - update_port_in_url( - &mut self.api.web3_json_rpc.ws_url, - ports_config.web3_json_rpc_ws_port, - )?; - self.api.healthcheck.port = ports_config.healthcheck_port; - self.api.merkle_tree.port = ports_config.merkle_tree_port; - self.api.prometheus.listener_port = ports_config.prometheus_listener_port; - Ok(()) - } +pub fn ports_config(config: &GeneralConfig) -> Option { + let api = config.api_config.as_ref()?; + Some(PortsConfig { + web3_json_rpc_http_port: api.web3_json_rpc.http_port, + web3_json_rpc_ws_port: api.web3_json_rpc.ws_port, + healthcheck_port: api.healthcheck.port, + merkle_tree_port: api.merkle_tree.port, + prometheus_listener_port: api.prometheus.listener_port, + }) +} + +pub fn update_ports(config: &mut GeneralConfig, ports_config: &PortsConfig) -> anyhow::Result<()> { + let api = config + .api_config + .as_mut() + .context("Api config is not presented")?; + api.web3_json_rpc.http_port = ports_config.web3_json_rpc_http_port; + update_port_in_url( + &mut api.web3_json_rpc.http_url, + ports_config.web3_json_rpc_http_port, + )?; + api.web3_json_rpc.ws_port = ports_config.web3_json_rpc_ws_port; + update_port_in_url( + &mut api.web3_json_rpc.ws_url, + ports_config.web3_json_rpc_ws_port, + )?; + api.healthcheck.port = ports_config.healthcheck_port; + api.merkle_tree.port = ports_config.merkle_tree_port; + api.prometheus.listener_port = ports_config.prometheus_listener_port; + Ok(()) } fn update_port_in_url(http_url: &mut String, port: u16) -> anyhow::Result<()> { @@ -67,101 +76,6 @@ impl FileConfigWithDefaultName for GeneralConfig { const FILE_NAME: &'static str = GENERAL_FILE; } -#[derive(Debug, Deserialize, Serialize, Clone)] -pub struct RocksDBConfig { - pub state_keeper_db_path: PathBuf, - pub merkle_tree: MerkleTreeDB, - #[serde(flatten)] - pub other: serde_json::Value, -} - -#[derive(Debug, Deserialize, Serialize, Clone)] -pub struct MerkleTreeDB { - pub path: PathBuf, - #[serde(flatten)] - pub other: serde_json::Value, -} - -#[derive(Debug, Deserialize, Serialize, Clone)] -pub struct EthConfig { - pub sender: EthSender, - #[serde(flatten)] - pub other: serde_json::Value, -} - -#[derive(Debug, Deserialize, Serialize, Clone)] -pub struct EthSender { - pub proof_sending_mode: String, - pub pubdata_sending_mode: String, - #[serde(flatten)] - pub other: serde_json::Value, -} - -#[derive(Debug, Deserialize, Serialize, Clone)] -pub struct ApiConfig { - /// Configuration options for the Web3 JSON RPC servers. - pub web3_json_rpc: Web3JsonRpcConfig, - /// Configuration options for the Prometheus exporter. - pub prometheus: PrometheusConfig, - /// Configuration options for the Health check. - pub healthcheck: HealthCheckConfig, - /// Configuration options for Merkle tree API. - pub merkle_tree: MerkleTreeApiConfig, - #[serde(flatten)] - pub other: serde_json::Value, -} - -#[derive(Debug, Deserialize, Serialize, Clone)] -pub struct Web3JsonRpcConfig { - /// Port to which the HTTP RPC server is listening. - pub http_port: u16, - /// URL to access HTTP RPC server. - pub http_url: String, - /// Port to which the WebSocket RPC server is listening. - pub ws_port: u16, - /// URL to access WebSocket RPC server. - pub ws_url: String, - /// Max possible limit of entities to be requested once. - pub req_entities_limit: Option, - #[serde(flatten)] - pub other: serde_json::Value, -} - -#[derive(Debug, Deserialize, Serialize, Clone)] -pub struct PrometheusConfig { - /// Port to which the Prometheus exporter server is listening. - pub listener_port: u16, - /// URL of the push gateway. - pub pushgateway_url: String, - /// Push interval in ms. - pub push_interval_ms: Option, - #[serde(flatten)] - pub other: serde_json::Value, -} - -#[derive(Debug, Deserialize, Serialize, Clone)] -pub struct HealthCheckConfig { - /// Port to which the REST server is listening. - pub port: u16, - /// Time limit in milliseconds to mark a health check as slow and log the corresponding warning. - /// If not specified, the default value in the health check crate will be used. - pub slow_time_limit_ms: Option, - /// Time limit in milliseconds to abort a health check and return "not ready" status for the corresponding component. - /// If not specified, the default value in the health check crate will be used. - pub hard_time_limit_ms: Option, - #[serde(flatten)] - pub other: serde_json::Value, -} - -/// Configuration for the Merkle tree API. -#[derive(Debug, Deserialize, Serialize, Clone)] -pub struct MerkleTreeApiConfig { - /// Port to bind the Merkle tree API server to. - pub port: u16, - #[serde(flatten)] - pub other: serde_json::Value, -} - pub struct PortsConfig { pub web3_json_rpc_http_port: u16, pub web3_json_rpc_ws_port: u16, @@ -181,3 +95,18 @@ impl PortsConfig { } } } + +impl SaveConfig for GeneralConfig { + fn save(&self, shell: &Shell, path: impl AsRef) -> anyhow::Result<()> { + let bytes = + encode_yaml_repr::(self)?; + Ok(shell.write_file(path, bytes)?) + } +} + +impl ReadConfig for GeneralConfig { + fn read(shell: &Shell, path: impl AsRef) -> anyhow::Result { + let path = shell.current_dir().join(path); + decode_yaml_repr::(&path, false) + } +} diff --git a/zk_toolbox/crates/config/src/genesis.rs b/zk_toolbox/crates/config/src/genesis.rs index e666931870a8..1df1da85fe1f 100644 --- a/zk_toolbox/crates/config/src/genesis.rs +++ b/zk_toolbox/crates/config/src/genesis.rs @@ -1,34 +1,36 @@ -use ethers::types::{Address, H256}; -use serde::{Deserialize, Serialize}; -use types::{ChainId, L1BatchCommitDataGeneratorMode, ProtocolSemanticVersion}; +use std::path::Path; -use crate::{consts::GENESIS_FILE, traits::FileConfigWithDefaultName, ChainConfig}; +use xshell::Shell; +use zksync_basic_types::L1ChainId; +pub use zksync_config::GenesisConfig; +use zksync_protobuf_config::{decode_yaml_repr, encode_yaml_repr}; -#[derive(Debug, Deserialize, Serialize, Clone)] -pub struct GenesisConfig { - pub l2_chain_id: ChainId, - pub l1_chain_id: u32, - pub l1_batch_commit_data_generator_mode: Option, - pub bootloader_hash: H256, - pub default_aa_hash: H256, - pub fee_account: Address, - pub genesis_batch_commitment: H256, - pub genesis_rollup_leaf_index: u32, - pub genesis_root: H256, - pub genesis_protocol_version: u64, - pub genesis_protocol_semantic_version: ProtocolSemanticVersion, - #[serde(flatten)] - pub other: serde_json::Value, -} +use crate::{ + consts::GENESIS_FILE, + traits::{FileConfigWithDefaultName, ReadConfig, SaveConfig}, + ChainConfig, +}; -impl GenesisConfig { - pub fn update_from_chain_config(&mut self, config: &ChainConfig) { - self.l2_chain_id = config.chain_id; - self.l1_chain_id = config.l1_network.chain_id(); - self.l1_batch_commit_data_generator_mode = Some(config.l1_batch_commit_data_generator_mode); - } +pub fn update_from_chain_config(genesis: &mut GenesisConfig, config: &ChainConfig) { + genesis.l2_chain_id = config.chain_id; + genesis.l1_chain_id = L1ChainId(config.l1_network.chain_id()); + genesis.l1_batch_commit_data_generator_mode = config.l1_batch_commit_data_generator_mode; } impl FileConfigWithDefaultName for GenesisConfig { const FILE_NAME: &'static str = GENESIS_FILE; } + +impl SaveConfig for GenesisConfig { + fn save(&self, shell: &Shell, path: impl AsRef) -> anyhow::Result<()> { + let bytes = encode_yaml_repr::(self)?; + Ok(shell.write_file(path, bytes)?) + } +} + +impl ReadConfig for GenesisConfig { + fn read(shell: &Shell, path: impl AsRef) -> anyhow::Result { + let path = shell.current_dir().join(path); + decode_yaml_repr::(&path, false) + } +} diff --git a/zk_toolbox/crates/config/src/lib.rs b/zk_toolbox/crates/config/src/lib.rs index a80a2b6fe5de..47d4040eb6bf 100644 --- a/zk_toolbox/crates/config/src/lib.rs +++ b/zk_toolbox/crates/config/src/lib.rs @@ -9,6 +9,7 @@ pub use manipulations::*; pub use secrets::*; pub use wallet_creation::*; pub use wallets::*; +pub use zksync_protobuf_config::{decode_yaml_repr, encode_yaml_repr}; mod chain; mod consts; diff --git a/zk_toolbox/crates/config/src/secrets.rs b/zk_toolbox/crates/config/src/secrets.rs index 98a9be6ffe61..5bcad19ad339 100644 --- a/zk_toolbox/crates/config/src/secrets.rs +++ b/zk_toolbox/crates/config/src/secrets.rs @@ -1,48 +1,54 @@ +use std::{path::Path, str::FromStr}; + +use anyhow::Context; use common::db::DatabaseConfig; -use serde::{Deserialize, Serialize}; -use url::Url; +use xshell::Shell; +use zksync_basic_types::url::SensitiveUrl; +pub use zksync_config::configs::Secrets as SecretsConfig; +use zksync_protobuf_config::{decode_yaml_repr, encode_yaml_repr}; -use crate::{consts::SECRETS_FILE, traits::FileConfigWithDefaultName}; +use crate::{ + consts::SECRETS_FILE, + traits::{FileConfigWithDefaultName, ReadConfig, SaveConfig}, +}; -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct DatabaseSecrets { - pub server_url: Url, - #[serde(skip_serializing_if = "Option::is_none")] - pub prover_url: Option, - #[serde(flatten)] - pub other: serde_json::Value, +pub fn set_databases( + secrets: &mut SecretsConfig, + server_db_config: &DatabaseConfig, + prover_db_config: &DatabaseConfig, +) -> anyhow::Result<()> { + let database = secrets + .database + .as_mut() + .context("Databases must be presented")?; + database.server_url = Some(SensitiveUrl::from(server_db_config.full_url())); + database.prover_url = Some(SensitiveUrl::from(prover_db_config.full_url())); + Ok(()) } -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct L1Secret { - pub l1_rpc_url: String, - #[serde(flatten)] - pub other: serde_json::Value, +pub fn set_l1_rpc_url(secrets: &mut SecretsConfig, l1_rpc_url: String) -> anyhow::Result<()> { + secrets + .l1 + .as_mut() + .context("L1 Secrets must be presented")? + .l1_rpc_url = SensitiveUrl::from_str(&l1_rpc_url)?; + Ok(()) } -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct SecretsConfig { - pub database: DatabaseSecrets, - pub l1: L1Secret, - #[serde(flatten)] - pub other: serde_json::Value, +impl FileConfigWithDefaultName for SecretsConfig { + const FILE_NAME: &'static str = SECRETS_FILE; } -impl SecretsConfig { - pub fn set_databases( - &mut self, - server_db_config: &DatabaseConfig, - prover_db_config: &DatabaseConfig, - ) { - self.database.server_url = server_db_config.full_url(); - self.database.prover_url = Some(prover_db_config.full_url()); - } - - pub fn set_l1_rpc_url(&mut self, l1_rpc_url: String) { - self.l1.l1_rpc_url = l1_rpc_url; +impl SaveConfig for SecretsConfig { + fn save(&self, shell: &Shell, path: impl AsRef) -> anyhow::Result<()> { + let bytes = encode_yaml_repr::(self)?; + Ok(shell.write_file(path, bytes)?) } } -impl FileConfigWithDefaultName for SecretsConfig { - const FILE_NAME: &'static str = SECRETS_FILE; +impl ReadConfig for SecretsConfig { + fn read(shell: &Shell, path: impl AsRef) -> anyhow::Result { + let path = shell.current_dir().join(path); + decode_yaml_repr::(&path, false) + } } diff --git a/zk_toolbox/crates/config/src/traits.rs b/zk_toolbox/crates/config/src/traits.rs index 772c5d964dab..1f00b39b040a 100644 --- a/zk_toolbox/crates/config/src/traits.rs +++ b/zk_toolbox/crates/config/src/traits.rs @@ -7,7 +7,8 @@ use common::files::{ use serde::{de::DeserializeOwned, Serialize}; use xshell::Shell; -pub trait FileConfig {} +// Configs that we use only inside zk toolbox, we don't have protobuf implementation for them. +pub trait ZkToolboxConfig {} pub trait FileConfigWithDefaultName { const FILE_NAME: &'static str; @@ -17,25 +18,38 @@ pub trait FileConfigWithDefaultName { } } -impl FileConfig for T where T: FileConfigWithDefaultName {} - -impl SaveConfig for T where T: FileConfig + Serialize {} - -impl SaveConfigWithComment for T where T: FileConfig + Serialize {} +impl SaveConfig for T { + fn save(&self, shell: &Shell, path: impl AsRef) -> anyhow::Result<()> { + save_with_comment(shell, path, self, "") + } +} -impl ReadConfigWithBasePath for T where T: FileConfigWithDefaultName + Clone + DeserializeOwned {} +impl ReadConfigWithBasePath for T +where + T: FileConfigWithDefaultName + Clone + ReadConfig, +{ + fn read_with_base_path(shell: &Shell, base_path: impl AsRef) -> anyhow::Result { + ::read(shell, base_path.as_ref().join(Self::FILE_NAME)) + } +} -impl SaveConfigWithBasePath for T where T: FileConfigWithDefaultName + Serialize {} +impl SaveConfigWithBasePath for T where T: FileConfigWithDefaultName + SaveConfig {} -impl SaveConfigWithCommentAndBasePath for T where T: FileConfigWithDefaultName + Serialize {} +impl SaveConfigWithCommentAndBasePath for T where + T: FileConfigWithDefaultName + Serialize + SaveConfig +{ +} +/// Reads a config file from a given path, correctly parsing file extension. +/// Supported file extensions are: `yaml`, `yml`, `toml`, `json`. pub trait ReadConfig: Sized { fn read(shell: &Shell, path: impl AsRef) -> anyhow::Result; } -/// Reads a config file from a given path, correctly parsing file extension. -/// Supported file extensions are: `yaml`, `yml`, `toml`, `json`. -impl ReadConfig for T { +impl ReadConfig for T +where + T: DeserializeOwned + Clone + ZkToolboxConfig, +{ fn read(shell: &Shell, path: impl AsRef) -> anyhow::Result { let error_context = || format!("Failed to parse config file {:?}.", path.as_ref()); @@ -53,18 +67,14 @@ impl ReadConfig for T { /// Reads a config file from a base path, correctly parsing file extension. /// Supported file extensions are: `yaml`, `yml`, `toml`, `json`. -pub trait ReadConfigWithBasePath: ReadConfig + FileConfigWithDefaultName { - fn read_with_base_path(shell: &Shell, base_path: impl AsRef) -> anyhow::Result { - ::read(shell, base_path.as_ref().join(Self::FILE_NAME)) - } +pub trait ReadConfigWithBasePath: ReadConfig + FileConfigWithDefaultName + Clone { + fn read_with_base_path(shell: &Shell, base_path: impl AsRef) -> anyhow::Result; } /// Saves a config file to a given path, correctly parsing file extension. /// Supported file extensions are: `yaml`, `yml`, `toml`, `json`. -pub trait SaveConfig: Serialize + Sized { - fn save(&self, shell: &Shell, path: impl AsRef) -> anyhow::Result<()> { - save_with_comment(shell, path, self, "") - } +pub trait SaveConfig { + fn save(&self, shell: &Shell, path: impl AsRef) -> anyhow::Result<()>; } /// Saves a config file from a base path, correctly parsing file extension. @@ -81,7 +91,16 @@ pub trait SaveConfigWithBasePath: SaveConfig + FileConfigWithDefaultName { /// Saves a config file to a given path, correctly parsing file extension. /// Supported file extensions are: `yaml`, `yml`, `toml`. -pub trait SaveConfigWithComment: Serialize + Sized { +pub trait SaveConfigWithComment: Sized { + fn save_with_comment( + &self, + shell: &Shell, + path: impl AsRef, + comment: &str, + ) -> anyhow::Result<()>; +} + +impl SaveConfigWithComment for T { fn save_with_comment( &self, shell: &Shell, diff --git a/zk_toolbox/crates/config/src/wallets.rs b/zk_toolbox/crates/config/src/wallets.rs index 91958195c232..460c4e3574a3 100644 --- a/zk_toolbox/crates/config/src/wallets.rs +++ b/zk_toolbox/crates/config/src/wallets.rs @@ -5,7 +5,7 @@ use serde::{Deserialize, Serialize}; use crate::{ consts::WALLETS_FILE, - traits::{FileConfig, FileConfigWithDefaultName}, + traits::{FileConfigWithDefaultName, ZkToolboxConfig}, }; #[derive(Debug, Clone, Serialize, Deserialize)] @@ -60,4 +60,6 @@ pub(crate) struct EthMnemonicConfig { pub(crate) base_path: String, } -impl FileConfig for EthMnemonicConfig {} +impl ZkToolboxConfig for EthMnemonicConfig {} + +impl ZkToolboxConfig for WalletsConfig {} diff --git a/zk_toolbox/crates/types/Cargo.toml b/zk_toolbox/crates/types/Cargo.toml index efd8f84d7088..7dc96cab768d 100644 --- a/zk_toolbox/crates/types/Cargo.toml +++ b/zk_toolbox/crates/types/Cargo.toml @@ -15,5 +15,5 @@ clap.workspace = true ethers.workspace = true serde.workspace = true strum.workspace = true -strum_macros.workspace = true thiserror.workspace = true +zksync_basic_types.workspace = true diff --git a/zk_toolbox/crates/types/src/chain_id.rs b/zk_toolbox/crates/types/src/chain_id.rs deleted file mode 100644 index 258175d3fde5..000000000000 --- a/zk_toolbox/crates/types/src/chain_id.rs +++ /dev/null @@ -1,18 +0,0 @@ -use std::fmt::Display; - -use serde::{Deserialize, Serialize}; - -#[derive(Copy, Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)] -pub struct ChainId(pub u32); - -impl Display for ChainId { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{}", self.0) - } -} - -impl From for ChainId { - fn from(value: u32) -> Self { - Self(value) - } -} diff --git a/zk_toolbox/crates/types/src/l1_batch_commit_data_generator_mode.rs b/zk_toolbox/crates/types/src/l1_batch_commit_data_generator_mode.rs deleted file mode 100644 index cdb8f5919c2d..000000000000 --- a/zk_toolbox/crates/types/src/l1_batch_commit_data_generator_mode.rs +++ /dev/null @@ -1,22 +0,0 @@ -use clap::ValueEnum; -use serde::{Deserialize, Serialize}; -use strum_macros::EnumIter; - -#[derive( - Debug, - Serialize, - Deserialize, - Clone, - Copy, - ValueEnum, - EnumIter, - strum_macros::Display, - Default, - PartialEq, - Eq, -)] -pub enum L1BatchCommitDataGeneratorMode { - #[default] - Rollup, - Validium, -} diff --git a/zk_toolbox/crates/types/src/l1_network.rs b/zk_toolbox/crates/types/src/l1_network.rs index f7367673f6c8..822235611a33 100644 --- a/zk_toolbox/crates/types/src/l1_network.rs +++ b/zk_toolbox/crates/types/src/l1_network.rs @@ -1,6 +1,6 @@ use clap::ValueEnum; use serde::{Deserialize, Serialize}; -use strum_macros::EnumIter; +use strum::EnumIter; #[derive( Copy, @@ -15,7 +15,7 @@ use strum_macros::EnumIter; Deserialize, ValueEnum, EnumIter, - strum_macros::Display, + strum::Display, )] pub enum L1Network { #[default] @@ -26,7 +26,7 @@ pub enum L1Network { impl L1Network { #[must_use] - pub fn chain_id(&self) -> u32 { + pub fn chain_id(&self) -> u64 { match self { L1Network::Localhost => 9, L1Network::Sepolia => 11_155_111, diff --git a/zk_toolbox/crates/types/src/lib.rs b/zk_toolbox/crates/types/src/lib.rs index c405013990cf..4cc7f160a45b 100644 --- a/zk_toolbox/crates/types/src/lib.rs +++ b/zk_toolbox/crates/types/src/lib.rs @@ -1,15 +1,12 @@ mod base_token; -mod chain_id; -mod l1_batch_commit_data_generator_mode; mod l1_network; -mod protocol_version; mod prover_mode; mod wallet_creation; pub use base_token::*; -pub use chain_id::*; -pub use l1_batch_commit_data_generator_mode::*; pub use l1_network::*; -pub use protocol_version::ProtocolSemanticVersion; pub use prover_mode::*; pub use wallet_creation::*; +pub use zksync_basic_types::{ + commitment::L1BatchCommitmentMode, protocol_version::ProtocolSemanticVersion, +}; diff --git a/zk_toolbox/crates/types/src/protocol_version.rs b/zk_toolbox/crates/types/src/protocol_version.rs deleted file mode 100644 index 5b619c883a3e..000000000000 --- a/zk_toolbox/crates/types/src/protocol_version.rs +++ /dev/null @@ -1,87 +0,0 @@ -use std::{fmt, num::ParseIntError, str::FromStr}; - -use ethers::prelude::U256; -use serde::{de::Error, Deserialize, Deserializer, Serialize, Serializer}; - -pub const PACKED_SEMVER_MINOR_OFFSET: u32 = 32; - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] -pub struct ProtocolSemanticVersion { - pub minor: u16, - pub patch: u16, -} - -impl ProtocolSemanticVersion { - const MAJOR_VERSION: u8 = 0; - - pub fn new(minor: u16, patch: u16) -> Self { - Self { minor, patch } - } - - pub fn pack(&self) -> U256 { - (U256::from(self.minor) << U256::from(PACKED_SEMVER_MINOR_OFFSET)) | U256::from(self.patch) - } -} - -impl fmt::Display for ProtocolSemanticVersion { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{}.{}.{}", Self::MAJOR_VERSION, self.minor, self.patch) - } -} - -#[derive(Debug, thiserror::Error)] -pub enum ParseProtocolSemanticVersionError { - #[error("invalid format")] - InvalidFormat, - #[error("non zero major version")] - NonZeroMajorVersion, - #[error("{0}")] - ParseIntError(ParseIntError), -} - -impl FromStr for ProtocolSemanticVersion { - type Err = ParseProtocolSemanticVersionError; - - fn from_str(s: &str) -> Result { - let parts: Vec<&str> = s.split('.').collect(); - if parts.len() != 3 { - return Err(ParseProtocolSemanticVersionError::InvalidFormat); - } - - let major = parts[0] - .parse::() - .map_err(ParseProtocolSemanticVersionError::ParseIntError)?; - if major != 0 { - return Err(ParseProtocolSemanticVersionError::NonZeroMajorVersion); - } - - let minor = parts[1] - .parse::() - .map_err(ParseProtocolSemanticVersionError::ParseIntError)?; - - let patch = parts[2] - .parse::() - .map_err(ParseProtocolSemanticVersionError::ParseIntError)?; - - Ok(ProtocolSemanticVersion { minor, patch }) - } -} - -impl<'de> Deserialize<'de> for ProtocolSemanticVersion { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - let s = String::deserialize(deserializer)?; - ProtocolSemanticVersion::from_str(&s).map_err(D::Error::custom) - } -} - -impl Serialize for ProtocolSemanticVersion { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - serializer.serialize_str(&self.to_string()) - } -} diff --git a/zk_toolbox/crates/types/src/prover_mode.rs b/zk_toolbox/crates/types/src/prover_mode.rs index d9b4fb965e8c..b6667402ba26 100644 --- a/zk_toolbox/crates/types/src/prover_mode.rs +++ b/zk_toolbox/crates/types/src/prover_mode.rs @@ -1,21 +1,11 @@ use clap::ValueEnum; use serde::{Deserialize, Serialize}; -use strum_macros::EnumIter; +use strum::EnumIter; #[derive( - Debug, - Serialize, - Deserialize, - Clone, - Copy, - ValueEnum, - EnumIter, - strum_macros::Display, - PartialEq, - Eq, + Debug, Serialize, Deserialize, Clone, Copy, ValueEnum, EnumIter, strum::Display, PartialEq, Eq, )] pub enum ProverMode { NoProofs, Gpu, - Cpu, } diff --git a/zk_toolbox/crates/types/src/wallet_creation.rs b/zk_toolbox/crates/types/src/wallet_creation.rs index 43cb5e969b93..8457d109e43b 100644 --- a/zk_toolbox/crates/types/src/wallet_creation.rs +++ b/zk_toolbox/crates/types/src/wallet_creation.rs @@ -1,6 +1,6 @@ use clap::ValueEnum; use serde::{Deserialize, Serialize}; -use strum_macros::EnumIter; +use strum::EnumIter; #[derive( Clone, @@ -15,7 +15,7 @@ use strum_macros::EnumIter; Deserialize, ValueEnum, EnumIter, - strum_macros::Display, + strum::Display, )] pub enum WalletCreation { /// Load wallets from localhost mnemonic, they are funded for localhost env diff --git a/zk_toolbox/crates/zk_inception/Cargo.toml b/zk_toolbox/crates/zk_inception/Cargo.toml index 3a8b57e162f6..2626611376fe 100644 --- a/zk_toolbox/crates/zk_inception/Cargo.toml +++ b/zk_toolbox/crates/zk_inception/Cargo.toml @@ -26,10 +26,10 @@ ethers.workspace = true common.workspace = true tokio.workspace = true types.workspace = true -strum_macros.workspace = true strum.workspace = true toml.workspace = true url.workspace = true thiserror.workspace = true zksync_config.workspace = true slugify-rs.workspace = true +zksync_basic_types.workspace = true diff --git a/zk_toolbox/crates/zk_inception/src/commands/chain/args/create.rs b/zk_toolbox/crates/zk_inception/src/commands/chain/args/create.rs index 97a3de69c738..63f3a3736cd7 100644 --- a/zk_toolbox/crates/zk_inception/src/commands/chain/args/create.rs +++ b/zk_toolbox/crates/zk_inception/src/commands/chain/args/create.rs @@ -1,12 +1,11 @@ use std::{path::PathBuf, str::FromStr}; -use clap::Parser; +use clap::{Parser, ValueEnum}; use common::{Prompt, PromptConfirm, PromptSelect}; use serde::{Deserialize, Serialize}; use slugify_rs::slugify; -use strum::IntoEnumIterator; -use strum_macros::{Display, EnumIter}; -use types::{BaseToken, L1BatchCommitDataGeneratorMode, L1Network, ProverMode, WalletCreation}; +use strum::{Display, EnumIter, IntoEnumIterator}; +use types::{BaseToken, L1BatchCommitmentMode, L1Network, ProverMode, WalletCreation}; use crate::{ defaults::L2_CHAIN_ID, @@ -23,28 +22,44 @@ use crate::{ }, }; +// We need to duplicate it for using enum inside the arguments +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, EnumIter, Display, ValueEnum)] +enum L1BatchCommitmentModeInternal { + Rollup, + Validium, +} + +impl From for L1BatchCommitmentMode { + fn from(val: L1BatchCommitmentModeInternal) -> Self { + match val { + L1BatchCommitmentModeInternal::Rollup => L1BatchCommitmentMode::Rollup, + L1BatchCommitmentModeInternal::Validium => L1BatchCommitmentMode::Validium, + } + } +} + #[derive(Debug, Serialize, Deserialize, Parser)] pub struct ChainCreateArgs { #[arg(long)] - pub chain_name: Option, + chain_name: Option, #[arg(value_parser = clap::value_parser ! (u32).range(1..))] - pub chain_id: Option, + chain_id: Option, #[clap(long, help = MSG_PROVER_MODE_HELP, value_enum)] - pub prover_mode: Option, + prover_mode: Option, #[clap(long, help = MSG_WALLET_CREATION_HELP, value_enum)] - pub wallet_creation: Option, + wallet_creation: Option, #[clap(long, help = MSG_WALLET_PATH_HELP)] - pub wallet_path: Option, + wallet_path: Option, #[clap(long, help = MSG_L1_COMMIT_DATA_GENERATOR_MODE_HELP)] - pub l1_batch_commit_data_generator_mode: Option, + l1_batch_commit_data_generator_mode: Option, #[clap(long, help = MSG_BASE_TOKEN_ADDRESS_HELP)] - pub base_token_address: Option, + base_token_address: Option, #[clap(long, help = MSG_BASE_TOKEN_PRICE_NOMINATOR_HELP)] - pub base_token_price_nominator: Option, + base_token_price_nominator: Option, #[clap(long, help = MSG_BASE_TOKEN_PRICE_DENOMINATOR_HELP)] - pub base_token_price_denominator: Option, + base_token_price_denominator: Option, #[clap(long, help = MSG_SET_AS_DEFAULT_HELP, default_missing_value = "true", num_args = 0..=1)] - pub set_as_default: Option, + pub(crate) set_as_default: Option, } impl ChainCreateArgs { @@ -81,7 +96,7 @@ impl ChainCreateArgs { let l1_batch_commit_data_generator_mode = PromptSelect::new( MSG_L1_BATCH_COMMIT_DATA_GENERATOR_MODE_PROMPT, - L1BatchCommitDataGeneratorMode::iter(), + L1BatchCommitmentModeInternal::iter(), ) .ask(); @@ -139,7 +154,7 @@ impl ChainCreateArgs { chain_id, prover_version, wallet_creation, - l1_batch_commit_data_generator_mode, + l1_batch_commit_data_generator_mode: l1_batch_commit_data_generator_mode.into(), wallet_path, base_token, set_as_default, @@ -153,7 +168,7 @@ pub struct ChainCreateArgsFinal { pub chain_id: u32, pub prover_version: ProverMode, pub wallet_creation: WalletCreation, - pub l1_batch_commit_data_generator_mode: L1BatchCommitDataGeneratorMode, + pub l1_batch_commit_data_generator_mode: L1BatchCommitmentMode, pub wallet_path: Option, pub base_token: BaseToken, pub set_as_default: bool, diff --git a/zk_toolbox/crates/zk_inception/src/commands/chain/create.rs b/zk_toolbox/crates/zk_inception/src/commands/chain/create.rs index dc8f408db3b3..70f4442cca62 100644 --- a/zk_toolbox/crates/zk_inception/src/commands/chain/create.rs +++ b/zk_toolbox/crates/zk_inception/src/commands/chain/create.rs @@ -5,8 +5,8 @@ use config::{ create_local_configs_dir, create_wallets, traits::SaveConfigWithBasePath, ChainConfig, EcosystemConfig, }; -use types::ChainId; use xshell::Shell; +use zksync_basic_types::L2ChainId; use crate::{ commands::chain::args::create::{ChainCreateArgs, ChainCreateArgsFinal}, @@ -62,7 +62,7 @@ pub(crate) fn create_chain_inner( let chain_config = ChainConfig { id: chain_id, name: default_chain_name.clone(), - chain_id: ChainId::from(args.chain_id), + chain_id: L2ChainId::from(args.chain_id), prover_version: args.prover_version, l1_network: ecosystem_config.l1_network, link_to_code: ecosystem_config.link_to_code.clone(), diff --git a/zk_toolbox/crates/zk_inception/src/commands/chain/deploy_paymaster.rs b/zk_toolbox/crates/zk_inception/src/commands/chain/deploy_paymaster.rs index 4f82a92c2edc..81ac457cd884 100644 --- a/zk_toolbox/crates/zk_inception/src/commands/chain/deploy_paymaster.rs +++ b/zk_toolbox/crates/zk_inception/src/commands/chain/deploy_paymaster.rs @@ -15,7 +15,9 @@ use config::{ use xshell::Shell; use crate::{ - messages::{MSG_CHAIN_NOT_INITIALIZED, MSG_DEPLOYING_PAYMASTER}, + messages::{ + MSG_CHAIN_NOT_INITIALIZED, MSG_DEPLOYING_PAYMASTER, MSG_L1_SECRETS_MUST_BE_PRESENTED, + }, utils::forge::{check_the_balance, fill_forge_private_key}, }; @@ -47,7 +49,14 @@ pub async fn deploy_paymaster( let mut forge = Forge::new(&foundry_contracts_path) .script(&DEPLOY_PAYMASTER_SCRIPT_PARAMS.script(), forge_args.clone()) .with_ffi() - .with_rpc_url(secrets.l1.l1_rpc_url.clone()) + .with_rpc_url( + secrets + .l1 + .context(MSG_L1_SECRETS_MUST_BE_PRESENTED)? + .l1_rpc_url + .expose_str() + .to_string(), + ) .with_broadcast(); forge = fill_forge_private_key( diff --git a/zk_toolbox/crates/zk_inception/src/commands/chain/genesis.rs b/zk_toolbox/crates/zk_inception/src/commands/chain/genesis.rs index b42a1138229d..3cd5440ba83d 100644 --- a/zk_toolbox/crates/zk_inception/src/commands/chain/genesis.rs +++ b/zk_toolbox/crates/zk_inception/src/commands/chain/genesis.rs @@ -9,12 +9,14 @@ use common::{ spinner::Spinner, }; use config::{ + set_databases, set_rocks_db_config, traits::{FileConfigWithDefaultName, SaveConfigWithBasePath}, ChainConfig, ContractsConfig, EcosystemConfig, GeneralConfig, GenesisConfig, SecretsConfig, WalletsConfig, }; use types::ProverMode; use xshell::Shell; +use zksync_config::configs::eth_sender::ProofSendingMode; use super::args::genesis::GenesisArgsFinal; use crate::{ @@ -55,14 +57,21 @@ pub async fn genesis( let rocks_db = recreate_rocksdb_dirs(shell, &config.rocks_db_path, RocksDBDirOption::Main) .context(MSG_RECREATE_ROCKS_DB_ERRROR)?; let mut general = config.get_general_config()?; - general.set_rocks_db_config(rocks_db)?; + set_rocks_db_config(&mut general, rocks_db)?; if config.prover_version != ProverMode::NoProofs { - general.eth.sender.proof_sending_mode = "ONLY_REAL_PROOFS".to_string(); + general + .eth + .as_mut() + .context("eth")? + .sender + .as_mut() + .context("sender")? + .proof_sending_mode = ProofSendingMode::OnlyRealProofs; } general.save_with_base_path(shell, &config.configs)?; let mut secrets = config.get_secrets_config()?; - secrets.set_databases(&args.server_db, &args.prover_db); + set_databases(&mut secrets, &args.server_db, &args.prover_db)?; secrets.save_with_base_path(shell, &config.configs)?; logger::note( diff --git a/zk_toolbox/crates/zk_inception/src/commands/chain/init.rs b/zk_toolbox/crates/zk_inception/src/commands/chain/init.rs index 640f4a492869..bf44b5d5c999 100644 --- a/zk_toolbox/crates/zk_inception/src/commands/chain/init.rs +++ b/zk_toolbox/crates/zk_inception/src/commands/chain/init.rs @@ -11,8 +11,9 @@ use config::{ register_chain::{input::RegisterChainL1Config, output::RegisterChainOutput}, script_params::REGISTER_CHAIN_SCRIPT_PARAMS, }, + set_l1_rpc_url, traits::{ReadConfig, SaveConfig, SaveConfigWithBasePath}, - ChainConfig, ContractsConfig, EcosystemConfig, + update_from_chain_config, ChainConfig, ContractsConfig, EcosystemConfig, }; use xshell::Shell; @@ -59,7 +60,7 @@ pub async fn init( copy_configs(shell, &ecosystem_config.link_to_code, &chain_config.configs)?; let mut genesis_config = chain_config.get_genesis_config()?; - genesis_config.update_from_chain_config(chain_config); + update_from_chain_config(&mut genesis_config, chain_config); genesis_config.save_with_base_path(shell, &chain_config.configs)?; // Copy ecosystem contracts @@ -74,7 +75,7 @@ pub async fn init( ) .await?; let mut secrets = chain_config.get_secrets_config()?; - secrets.set_l1_rpc_url(init_args.l1_rpc_url.clone()); + set_l1_rpc_url(&mut secrets, init_args.l1_rpc_url.clone())?; secrets.save_with_base_path(shell, &chain_config.configs)?; let spinner = Spinner::new(MSG_REGISTERING_CHAIN_SPINNER); diff --git a/zk_toolbox/crates/zk_inception/src/commands/chain/initialize_bridges.rs b/zk_toolbox/crates/zk_inception/src/commands/chain/initialize_bridges.rs index e81971eba7cb..b60daa9dbd61 100644 --- a/zk_toolbox/crates/zk_inception/src/commands/chain/initialize_bridges.rs +++ b/zk_toolbox/crates/zk_inception/src/commands/chain/initialize_bridges.rs @@ -18,7 +18,10 @@ use config::{ use xshell::{cmd, Shell}; use crate::{ - messages::{MSG_CHAIN_NOT_INITIALIZED, MSG_INITIALIZING_BRIDGES_SPINNER}, + messages::{ + MSG_CHAIN_NOT_INITIALIZED, MSG_INITIALIZING_BRIDGES_SPINNER, + MSG_L1_SECRETS_MUST_BE_PRESENTED, + }, utils::forge::{check_the_balance, fill_forge_private_key}, }; @@ -67,7 +70,14 @@ pub async fn initialize_bridges( forge_args.clone(), ) .with_ffi() - .with_rpc_url(secrets.l1.l1_rpc_url.clone()) + .with_rpc_url( + secrets + .l1 + .context(MSG_L1_SECRETS_MUST_BE_PRESENTED)? + .l1_rpc_url + .expose_str() + .to_string(), + ) .with_broadcast(); forge = fill_forge_private_key( diff --git a/zk_toolbox/crates/zk_inception/src/commands/ecosystem/args/create.rs b/zk_toolbox/crates/zk_inception/src/commands/ecosystem/args/create.rs index d3d5fe129678..746558dd4e97 100644 --- a/zk_toolbox/crates/zk_inception/src/commands/ecosystem/args/create.rs +++ b/zk_toolbox/crates/zk_inception/src/commands/ecosystem/args/create.rs @@ -5,8 +5,7 @@ use clap::Parser; use common::{cmd::Cmd, logger, Prompt, PromptConfirm, PromptSelect}; use serde::{Deserialize, Serialize}; use slugify_rs::slugify; -use strum::IntoEnumIterator; -use strum_macros::EnumIter; +use strum::{EnumIter, IntoEnumIterator}; use types::{L1Network, WalletCreation}; use xshell::{cmd, Shell}; @@ -32,7 +31,9 @@ pub struct EcosystemCreateArgs { #[clap(flatten)] #[serde(flatten)] pub chain: ChainCreateArgs, - #[clap(long, help = MSG_START_CONTAINERS_HELP, default_missing_value = "true", num_args = 0..=1)] + #[clap( + long, help = MSG_START_CONTAINERS_HELP, default_missing_value = "true", num_args = 0..=1 + )] pub start_containers: Option, } diff --git a/zk_toolbox/crates/zk_inception/src/commands/external_node/init.rs b/zk_toolbox/crates/zk_inception/src/commands/external_node/init.rs index c6101e88739c..28c3e80aaab0 100644 --- a/zk_toolbox/crates/zk_inception/src/commands/external_node/init.rs +++ b/zk_toolbox/crates/zk_inception/src/commands/external_node/init.rs @@ -10,8 +10,9 @@ use xshell::Shell; use crate::{ consts::SERVER_MIGRATIONS, messages::{ - MSG_CHAIN_NOT_INITIALIZED, MSG_EXTERNAL_NODE_CONFIG_NOT_INITIALIZED, - MSG_FAILED_TO_DROP_SERVER_DATABASE_ERR, MSG_INITIALIZING_DATABASES_SPINNER, + MSG_CHAIN_NOT_INITIALIZED, MSG_DATABASE_MUST_BE_PRESENTED, + MSG_EXTERNAL_NODE_CONFIG_NOT_INITIALIZED, MSG_FAILED_TO_DROP_SERVER_DATABASE_ERR, + MSG_INITIALIZING_DATABASES_SPINNER, }, utils::rocks_db::{recreate_rocksdb_dirs, RocksDBDirOption}, }; @@ -36,7 +37,14 @@ pub async fn init(shell: &Shell, chain_config: &ChainConfig) -> anyhow::Result<( .clone() .context(MSG_EXTERNAL_NODE_CONFIG_NOT_INITIALIZED)?, )?; - let db_config = DatabaseConfig::from_url(secrets.database.server_url)?; + let db_config = DatabaseConfig::from_url( + secrets + .database + .as_ref() + .context(MSG_DATABASE_MUST_BE_PRESENTED)? + .master_url()? + .expose_url(), + )?; drop_db_if_exists(&db_config) .await .context(MSG_FAILED_TO_DROP_SERVER_DATABASE_ERR)?; diff --git a/zk_toolbox/crates/zk_inception/src/commands/external_node/prepare_configs.rs b/zk_toolbox/crates/zk_inception/src/commands/external_node/prepare_configs.rs index 09e9d1b460c4..b799a68aeb8f 100644 --- a/zk_toolbox/crates/zk_inception/src/commands/external_node/prepare_configs.rs +++ b/zk_toolbox/crates/zk_inception/src/commands/external_node/prepare_configs.rs @@ -1,12 +1,14 @@ -use std::path::Path; +use std::{path::Path, str::FromStr}; use anyhow::Context; use common::{config::global_config, logger}; use config::{ - external_node::ENConfig, traits::SaveConfigWithBasePath, ChainConfig, DatabaseSecrets, - EcosystemConfig, L1Secret, SecretsConfig, + external_node::ENConfig, ports_config, set_rocks_db_config, traits::SaveConfigWithBasePath, + update_ports, ChainConfig, EcosystemConfig, SecretsConfig, }; use xshell::Shell; +use zksync_basic_types::url::SensitiveUrl; +use zksync_config::configs::{DatabaseSecrets, L1Secrets}; use crate::{ commands::external_node::args::prepare_configs::{PrepareConfigArgs, PrepareConfigFinal}, @@ -48,30 +50,39 @@ fn prepare_configs( let en_config = ENConfig { l2_chain_id: genesis.l2_chain_id, l1_chain_id: genesis.l1_chain_id, - l1_batch_commit_data_generator_mode: genesis - .l1_batch_commit_data_generator_mode - .unwrap_or_default(), - main_node_url: general.api.web3_json_rpc.http_url.clone(), + l1_batch_commit_data_generator_mode: genesis.l1_batch_commit_data_generator_mode, + main_node_url: SensitiveUrl::from_str( + &general + .api_config + .as_ref() + .context("api_config")? + .web3_json_rpc + .http_url, + )?, main_node_rate_limit_rps: None, }; let mut general_en = general.clone(); - general_en.update_ports(&general.ports_config().next_empty_ports_config())?; + + update_ports( + &mut general_en, + &ports_config(&general) + .context("da")? + .next_empty_ports_config(), + )?; let secrets = SecretsConfig { - database: DatabaseSecrets { - server_url: args.db.full_url(), + consensus: None, + database: Some(DatabaseSecrets { + server_url: Some(args.db.full_url().into()), prover_url: None, - other: Default::default(), - }, - l1: L1Secret { - l1_rpc_url: args.l1_rpc_url.clone(), - other: Default::default(), - }, - other: Default::default(), + server_replica_url: None, + }), + l1: Some(L1Secrets { + l1_rpc_url: SensitiveUrl::from_str(&args.l1_rpc_url).context("l1_rpc_url")?, + }), }; secrets.save_with_base_path(shell, en_configs_path)?; let dirs = recreate_rocksdb_dirs(shell, &config.rocks_db_path, RocksDBDirOption::ExternalNode)?; - general_en.set_rocks_db_config(dirs)?; - + set_rocks_db_config(&mut general_en, dirs)?; general_en.save_with_base_path(shell, en_configs_path)?; en_config.save_with_base_path(shell, en_configs_path)?; diff --git a/zk_toolbox/crates/zk_inception/src/commands/prover/args/init.rs b/zk_toolbox/crates/zk_inception/src/commands/prover/args/init.rs index c398b1852c61..4943c596a1d6 100644 --- a/zk_toolbox/crates/zk_inception/src/commands/prover/args/init.rs +++ b/zk_toolbox/crates/zk_inception/src/commands/prover/args/init.rs @@ -1,8 +1,7 @@ use clap::{Parser, ValueEnum}; use common::{logger, Prompt, PromptConfirm, PromptSelect}; use serde::{Deserialize, Serialize}; -use strum::IntoEnumIterator; -use strum_macros::EnumIter; +use strum::{EnumIter, IntoEnumIterator}; use xshell::Shell; use super::init_bellman_cuda::InitBellmanCudaArgs; @@ -55,7 +54,7 @@ pub struct ProverInitArgs { pub setup_key_config: SetupKeyConfigTmp, } -#[derive(Debug, Clone, ValueEnum, EnumIter, strum_macros::Display, PartialEq, Eq)] +#[derive(Debug, Clone, ValueEnum, EnumIter, strum::Display, PartialEq, Eq)] #[allow(clippy::upper_case_acronyms)] enum ProofStoreConfig { Local, diff --git a/zk_toolbox/crates/zk_inception/src/commands/prover/args/run.rs b/zk_toolbox/crates/zk_inception/src/commands/prover/args/run.rs index 678c548cea64..4b485099cc80 100644 --- a/zk_toolbox/crates/zk_inception/src/commands/prover/args/run.rs +++ b/zk_toolbox/crates/zk_inception/src/commands/prover/args/run.rs @@ -13,7 +13,7 @@ pub struct ProverRunArgs { } #[derive( - Debug, Clone, ValueEnum, strum::EnumString, EnumIter, PartialEq, Eq, Copy, strum_macros::Display, + Debug, Clone, ValueEnum, strum::EnumString, EnumIter, PartialEq, Eq, Copy, strum::Display, )] pub enum ProverComponent { #[strum(to_string = "Gateway")] @@ -34,9 +34,7 @@ pub struct WitnessGeneratorArgs { pub round: Option, } -#[derive( - Debug, Clone, ValueEnum, strum::EnumString, EnumIter, PartialEq, Eq, strum_macros::Display, -)] +#[derive(Debug, Clone, ValueEnum, strum::EnumString, EnumIter, PartialEq, Eq, strum::Display)] pub enum WitnessGeneratorRound { #[strum(to_string = "All rounds")] AllRounds, diff --git a/zk_toolbox/crates/zk_inception/src/commands/prover/init.rs b/zk_toolbox/crates/zk_inception/src/commands/prover/init.rs index 31785338bf3e..54b53b8576db 100644 --- a/zk_toolbox/crates/zk_inception/src/commands/prover/init.rs +++ b/zk_toolbox/crates/zk_inception/src/commands/prover/init.rs @@ -29,7 +29,7 @@ pub(crate) async fn run(args: ProverInitArgs, shell: &Shell) -> anyhow::Result<( .load_chain(Some(ecosystem_config.default_chain.clone())) .context(MSG_CHAIN_NOT_FOUND_ERR)?; let mut general_config = chain_config - .get_zksync_general_config() + .get_general_config() .context(MSG_GENERAL_CONFIG_NOT_FOUND_ERR)?; let setup_key_path = get_default_setup_key_path(&ecosystem_config)?; @@ -67,7 +67,7 @@ pub(crate) async fn run(args: ProverInitArgs, shell: &Shell) -> anyhow::Result<( proof_compressor_config.universal_setup_path = args.setup_key_config.setup_key_path; general_config.proof_compressor_config = Some(proof_compressor_config); - chain_config.save_zksync_general_config(&general_config)?; + chain_config.save_general_config(&general_config)?; init_bellman_cuda(shell, args.bellman_cuda_config).await?; diff --git a/zk_toolbox/crates/zk_inception/src/messages.rs b/zk_toolbox/crates/zk_inception/src/messages.rs index a33143b4bd66..555aade78cbb 100644 --- a/zk_toolbox/crates/zk_inception/src/messages.rs +++ b/zk_toolbox/crates/zk_inception/src/messages.rs @@ -129,6 +129,8 @@ pub(super) const MSG_CREATING_CHAIN_CONFIGURATIONS_SPINNER: &str = "Creating chain configurations..."; /// Chain genesis related messages +pub(super) const MSG_L1_SECRETS_MUST_BE_PRESENTED: &str = "L1 secret must be presented"; +pub(super) const MSG_DATABASE_MUST_BE_PRESENTED: &str = "Database secret must be presented"; pub(super) const MSG_SERVER_DB_URL_HELP: &str = "Server database url without database name"; pub(super) const MSG_SERVER_DB_NAME_HELP: &str = "Server database name"; pub(super) const MSG_PROVER_DB_URL_HELP: &str = "Prover database url without database name"; diff --git a/zk_toolbox/crates/zk_supervisor/Cargo.toml b/zk_toolbox/crates/zk_supervisor/Cargo.toml index d8f5d7862a04..54d9a819a7b9 100644 --- a/zk_toolbox/crates/zk_supervisor/Cargo.toml +++ b/zk_toolbox/crates/zk_supervisor/Cargo.toml @@ -17,7 +17,6 @@ common.workspace = true config.workspace = true human-panic.workspace = true strum.workspace = true -strum_macros.workspace = true tokio.workspace = true url.workspace = true xshell.workspace = true diff --git a/zk_toolbox/crates/zk_supervisor/src/commands/database/args/new_migration.rs b/zk_toolbox/crates/zk_supervisor/src/commands/database/args/new_migration.rs index ef053ca50c77..64b7a507abea 100644 --- a/zk_toolbox/crates/zk_supervisor/src/commands/database/args/new_migration.rs +++ b/zk_toolbox/crates/zk_supervisor/src/commands/database/args/new_migration.rs @@ -1,7 +1,6 @@ use clap::{Parser, ValueEnum}; use common::{Prompt, PromptSelect}; -use strum::IntoEnumIterator; -use strum_macros::{Display, EnumIter}; +use strum::{Display, EnumIter, IntoEnumIterator}; use crate::messages::{ MSG_DATABASE_NEW_MIGRATION_DATABASE_HELP, MSG_DATABASE_NEW_MIGRATION_DB_PROMPT, diff --git a/zk_toolbox/crates/zk_supervisor/src/commands/database/drop.rs b/zk_toolbox/crates/zk_supervisor/src/commands/database/drop.rs index fb6996b40ee3..075f21d3b1a3 100644 --- a/zk_toolbox/crates/zk_supervisor/src/commands/database/drop.rs +++ b/zk_toolbox/crates/zk_supervisor/src/commands/database/drop.rs @@ -35,7 +35,7 @@ pub async fn run(shell: &Shell, args: DatabaseCommonArgs) -> anyhow::Result<()> pub async fn drop_database(dal: Dal) -> anyhow::Result<()> { let spinner = Spinner::new(&msg_database_loading(MSG_DATABASE_DROP_GERUND, &dal.path)); - let db = DatabaseConfig::from_url(dal.url)?; + let db = DatabaseConfig::from_url(&dal.url)?; drop_db_if_exists(&db).await?; spinner.finish(); Ok(()) diff --git a/zk_toolbox/crates/zk_supervisor/src/dals.rs b/zk_toolbox/crates/zk_supervisor/src/dals.rs index 854a6b979494..2d2af41500b4 100644 --- a/zk_toolbox/crates/zk_supervisor/src/dals.rs +++ b/zk_toolbox/crates/zk_supervisor/src/dals.rs @@ -4,7 +4,7 @@ use config::{EcosystemConfig, SecretsConfig}; use url::Url; use xshell::Shell; -use crate::messages::{MSG_CHAIN_NOT_FOUND_ERR, MSG_PROVER_URL_MUST_BE_PRESENTED}; +use crate::messages::{MSG_CHAIN_NOT_FOUND_ERR, MSG_DATABASE_MUST_BE_PRESENTED}; const CORE_DAL_PATH: &str = "core/lib/dal"; const PROVER_DAL_PATH: &str = "prover/crates/lib/prover_dal"; @@ -48,8 +48,10 @@ pub fn get_prover_dal(shell: &Shell) -> anyhow::Result { path: PROVER_DAL_PATH.to_string(), url: secrets .database - .prover_url - .context(MSG_PROVER_URL_MUST_BE_PRESENTED)? + .as_ref() + .context(MSG_DATABASE_MUST_BE_PRESENTED)? + .prover_url()? + .expose_url() .clone(), }) } @@ -59,7 +61,13 @@ pub fn get_core_dal(shell: &Shell) -> anyhow::Result { Ok(Dal { path: CORE_DAL_PATH.to_string(), - url: secrets.database.server_url.clone(), + url: secrets + .database + .as_ref() + .context(MSG_DATABASE_MUST_BE_PRESENTED)? + .master_url()? + .expose_url() + .clone(), }) } diff --git a/zk_toolbox/crates/zk_supervisor/src/messages.rs b/zk_toolbox/crates/zk_supervisor/src/messages.rs index 863f1c4b1aef..ecbe604a7ba3 100644 --- a/zk_toolbox/crates/zk_supervisor/src/messages.rs +++ b/zk_toolbox/crates/zk_supervisor/src/messages.rs @@ -37,9 +37,7 @@ pub(super) const MSG_DATABASE_RESET_GERUND: &str = "Resetting"; pub(super) const MSG_DATABASE_RESET_PAST: &str = "reset"; pub(super) const MSG_DATABASE_SETUP_GERUND: &str = "Setting up"; pub(super) const MSG_DATABASE_SETUP_PAST: &str = "set up"; - -pub(super) const MSG_PROVER_URL_MUST_BE_PRESENTED: &str = "Prover url must be presented"; - +pub(super) const MSG_DATABASE_MUST_BE_PRESENTED: &str = "Database config must be presented"; pub(super) const MSG_DATABASE_COMMON_PROVER_HELP: &str = "Prover database"; pub(super) const MSG_DATABASE_COMMON_CORE_HELP: &str = "Core database"; pub(super) const MSG_DATABASE_NEW_MIGRATION_DATABASE_HELP: &str = @@ -96,6 +94,7 @@ pub(super) const MSG_REVERT_TEST_ENABLE_CONSENSUS_HELP: &str = "Enable consensus pub(super) const MSG_REVERT_TEST_INSTALLING_DEPENDENCIES: &str = "Building and installing dependencies. This process may take a lot of time..."; pub(super) const MSG_REVERT_TEST_RUN_INFO: &str = "Running revert and restart test"; + pub(super) fn msg_revert_tests_run(external_node: bool) -> String { let base = "Running integration tests"; if external_node { From 245d71983f3ae7e4b7c8760ecbb7c9893f116971 Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Fri, 26 Jul 2024 13:51:41 +0400 Subject: [PATCH 46/52] fix(zk): Do not cause recompilation when running 'zk test rust' (#2506) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ Right now, we're running `cargo sqlx prepare --check` for local databases always. It's justified for `zk db setup`, because we compile with `SQLX_OFFLINE = true` to prevent stale `.sqlx-data`, but it makes no sense for unit tests. ## Why ❔ Recompilations are long. ## Checklist - [ ] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [ ] Tests for the changes have been added / updated. - [ ] Documentation comments have been added / updated. - [ ] Code has been formatted via `zk fmt` and `zk lint`. --- infrastructure/zk/src/database.ts | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/infrastructure/zk/src/database.ts b/infrastructure/zk/src/database.ts index c818bd3be93f..f83227ed37e5 100644 --- a/infrastructure/zk/src/database.ts +++ b/infrastructure/zk/src/database.ts @@ -56,7 +56,7 @@ async function resetTestDal(dalPath: DalPath, dbUrl: string) { await utils.spawn('docker compose -f docker-compose-unit-tests.yml up -d'); await waitForDal(dbUrl, 100); console.log('setting up a database template'); - await setupForDal(dalPath, dbUrl); + await setupForDal(dalPath, dbUrl, false); console.log('disallowing connections to the template'); await utils.spawn( `psql "${dbUrl}" -c "update pg_database set datallowconn = false where datname = current_database()"` @@ -119,7 +119,7 @@ export async function generateMigration(dbType: DbType, name: string) { process.chdir(process.env.ZKSYNC_HOME as string); } -export async function setupForDal(dalPath: DalPath, dbUrl: string) { +export async function setupForDal(dalPath: DalPath, dbUrl: string, shouldCheck: boolean = false) { process.chdir(dalPath); const localDbUrl = 'postgres://postgres:notsecurepassword@localhost'; if (dbUrl.startsWith(localDbUrl)) { @@ -132,7 +132,8 @@ export async function setupForDal(dalPath: DalPath, dbUrl: string) { await utils.spawn(`cargo sqlx migrate run --database-url ${dbUrl}`); const isLocalSetup = process.env.ZKSYNC_LOCAL_SETUP; - if (dbUrl.startsWith(localDbUrl) && !isLocalSetup) { + shouldCheck = shouldCheck && dbUrl.startsWith(localDbUrl) && !isLocalSetup; + if (shouldCheck) { // Dont't do this preparation for local (docker) setup - as it requires full cargo compilation. await utils.spawn( `cargo sqlx prepare --check --database-url ${dbUrl} -- --tests || cargo sqlx prepare --database-url ${dbUrl} -- --tests` @@ -145,7 +146,7 @@ export async function setupForDal(dalPath: DalPath, dbUrl: string) { export async function setup(opts: DbOpts) { let dals = getDals(opts); for (const [dalPath, dbUrl] of dals.entries()) { - await setupForDal(dalPath, dbUrl); + await setupForDal(dalPath, dbUrl, true); } } From 21fbd77b8c4379b180abcd296a6c74697967acd8 Mon Sep 17 00:00:00 2001 From: Danil Date: Fri, 26 Jul 2024 13:03:41 +0200 Subject: [PATCH 47/52] fix(zk_toolbox): Set proper pubdata sending mode (#2507) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ Fix the bug, where we don't send correct pub data with validium ## Why ❔ ## Checklist - [ ] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [ ] Tests for the changes have been added / updated. - [ ] Documentation comments have been added / updated. - [ ] Code has been formatted via `zk fmt` and `zk lint`. --------- Signed-off-by: Danil --- etc/env/file_based/general.yaml | 15 ++++++++++----- .../zk_inception/src/commands/chain/genesis.rs | 15 ++++++++++++++- 2 files changed, 24 insertions(+), 6 deletions(-) diff --git a/etc/env/file_based/general.yaml b/etc/env/file_based/general.yaml index 34ca63e4a99c..2765f1a7319d 100644 --- a/etc/env/file_based/general.yaml +++ b/etc/env/file_based/general.yaml @@ -63,7 +63,7 @@ api: estimate_gas_scale_factor: 1.2 estimate_gas_acceptable_overestimation: 1000 max_tx_size: 1000000 - api_namespaces: [ eth,net,web3,zks,pubsub,debug ] + api_namespaces: [eth,net,web3,zks,pubsub,debug] max_response_body_size_overrides: - method: eth_getTransactionReceipt # no size specified, meaning no size limit - method: zks_getProof @@ -130,7 +130,7 @@ eth: aggregated_block_execute_deadline: 10 timestamp_criteria_max_allowed_lag: 30 max_eth_tx_data_size: 120000 - aggregated_proof_sizes: [ 1 ] + aggregated_proof_sizes: [1] max_aggregated_tx_gas: 4000000 max_acceptable_priority_fee_in_gwei: 100000000000 pubdata_sending_mode: BLOBS @@ -343,9 +343,9 @@ protective_reads_writer: first_processed_batch: 0 basic_witness_input_producer: - db_path: "./db/main/basic_witness_input_producer" - window_size: 3 - first_processed_batch: 0 + db_path: "./db/main/basic_witness_input_producer" + window_size: 3 + first_processed_batch: 0 snapshot_recovery: enabled: false @@ -369,3 +369,8 @@ core_object_store: file_backed: file_backed_base_path: artifacts max_retries: 10 + +da_dispatcher: + polling_interval_ms: 5000 + max_rows_to_dispatch: 100 + max_retries: 5 diff --git a/zk_toolbox/crates/zk_inception/src/commands/chain/genesis.rs b/zk_toolbox/crates/zk_inception/src/commands/chain/genesis.rs index 3cd5440ba83d..4adf1b3b7553 100644 --- a/zk_toolbox/crates/zk_inception/src/commands/chain/genesis.rs +++ b/zk_toolbox/crates/zk_inception/src/commands/chain/genesis.rs @@ -16,7 +16,8 @@ use config::{ }; use types::ProverMode; use xshell::Shell; -use zksync_config::configs::eth_sender::ProofSendingMode; +use zksync_basic_types::commitment::L1BatchCommitmentMode; +use zksync_config::configs::eth_sender::{ProofSendingMode, PubdataSendingMode}; use super::args::genesis::GenesisArgsFinal; use crate::{ @@ -68,6 +69,18 @@ pub async fn genesis( .context("sender")? .proof_sending_mode = ProofSendingMode::OnlyRealProofs; } + + if config.l1_batch_commit_data_generator_mode == L1BatchCommitmentMode::Validium { + general + .eth + .as_mut() + .context("eth")? + .sender + .as_mut() + .context("sender")? + .pubdata_sending_mode = PubdataSendingMode::Custom + } + general.save_with_base_path(shell, &config.configs)?; let mut secrets = config.get_secrets_config()?; From 2fa2249dca15b1968fceec11e485850395f03c9d Mon Sep 17 00:00:00 2001 From: Joonatan Saarhelo Date: Fri, 26 Jul 2024 12:40:57 +0100 Subject: [PATCH 48/52] fix: VM performance diff: don't show 0 as N/A (#2276) No difference in number of instructions executed now shows as 0 and N/A is only shown when data is missing for some reason. --- .../vm-benchmark/src/compare_iai_results.rs | 41 +++++++++---------- 1 file changed, 19 insertions(+), 22 deletions(-) diff --git a/core/tests/vm-benchmark/src/compare_iai_results.rs b/core/tests/vm-benchmark/src/compare_iai_results.rs index b9b6440704cc..d2c9d73f7e36 100644 --- a/core/tests/vm-benchmark/src/compare_iai_results.rs +++ b/core/tests/vm-benchmark/src/compare_iai_results.rs @@ -23,7 +23,7 @@ fn main() { .keys() .collect::>() .intersection(&iai_after.keys().collect()) - .flat_map(|&name| { + .filter_map(|&name| { let diff = percent_difference(iai_before[name], iai_after[name]); if diff.abs() > 2. { Some((name, format!("{:+.1}%", diff))) @@ -32,35 +32,25 @@ fn main() { } }) .collect::>(); + let duration_changes = opcodes_before .keys() .collect::>() .intersection(&opcodes_after.keys().collect()) - .flat_map(|&name| { + .map(|&name| { let opcodes_abs_diff = (opcodes_after[name] as i64) - (opcodes_before[name] as i64); - - if opcodes_abs_diff != 0 { - Some(( - name, - format!( - "{:+} ({:+.1}%)", - opcodes_abs_diff, - percent_difference(opcodes_before[name], opcodes_after[name]) - ), - )) - } else { - None - } + (name, opcodes_abs_diff) }) .collect::>(); let mut nonzero_diff = false; - for name in perf_changes - .keys() - .collect::>() - .union(&duration_changes.keys().collect()) - { + for name in perf_changes.keys().collect::>().union( + &duration_changes + .iter() + .filter_map(|(key, value)| (*value != 0).then_some(key)) + .collect(), + ) { // write the header before writing the first line of diff if !nonzero_diff { println!("Benchmark name | change in estimated runtime | change in number of opcodes executed \n--- | --- | ---"); @@ -71,8 +61,15 @@ fn main() { println!( "{} | {} | {}", name, - perf_changes.get(**name).unwrap_or(&n_a), - duration_changes.get(**name).unwrap_or(&n_a), + perf_changes.get(**name).unwrap_or(&n_a.clone()), + duration_changes + .get(**name) + .map(|abs_diff| format!( + "{:+} ({:+.1}%)", + abs_diff, + percent_difference(opcodes_before[**name], opcodes_after[**name]) + )) + .unwrap_or(n_a), ); } From 0c0d10af703d3f8958c49d0ed46d6cda64945fa1 Mon Sep 17 00:00:00 2001 From: Manuel Mauro Date: Fri, 26 Jul 2024 17:11:37 +0300 Subject: [PATCH 49/52] feat: Add recovery tests to zk_supervisor (#2444) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ Add recovery tests to zk_supervisor ## Why ❔ ## Checklist - [x] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [x] Tests for the changes have been added / updated. - [x] Documentation comments have been added / updated. - [x] Code has been formatted via `zk fmt` and `zk lint`. --------- Signed-off-by: Danil Co-authored-by: Manuel Co-authored-by: Danil --- .github/workflows/ci-zk-toolbox-reusable.yml | 13 ++- core/tests/recovery-test/src/index.ts | 42 ++++----- core/tests/recovery-test/src/utils.ts | 71 ++++++++++++++ .../tests/genesis-recovery.test.ts | 59 +++++++----- .../tests/snapshot-recovery.test.ts | 92 +++++++++++++++---- core/tests/recovery-test/tests/utils.ts | 47 ++++++++++ etc/env/file_based/general.yaml | 10 +- .../crates/zk_supervisor/src/commands/mod.rs | 1 + .../zk_supervisor/src/commands/snapshot.rs | 40 ++++++++ .../src/commands/test/args/mod.rs | 1 + .../src/commands/test/args/recovery.rs | 10 ++ .../zk_supervisor/src/commands/test/mod.rs | 10 +- .../src/commands/test/recovery.rs | 52 +++++++++++ zk_toolbox/crates/zk_supervisor/src/main.rs | 5 +- .../crates/zk_supervisor/src/messages.rs | 10 ++ 15 files changed, 395 insertions(+), 68 deletions(-) create mode 100644 core/tests/recovery-test/src/utils.ts create mode 100644 core/tests/recovery-test/tests/utils.ts create mode 100644 zk_toolbox/crates/zk_supervisor/src/commands/snapshot.rs create mode 100644 zk_toolbox/crates/zk_supervisor/src/commands/test/args/recovery.rs create mode 100644 zk_toolbox/crates/zk_supervisor/src/commands/test/recovery.rs diff --git a/.github/workflows/ci-zk-toolbox-reusable.yml b/.github/workflows/ci-zk-toolbox-reusable.yml index 8c8434c6711a..4a4b467e4197 100644 --- a/.github/workflows/ci-zk-toolbox-reusable.yml +++ b/.github/workflows/ci-zk-toolbox-reusable.yml @@ -113,11 +113,22 @@ jobs: run: | ci_run zk_supervisor test integration --ignore-prerequisites --verbose - - name: Run external node server + - name: Init external node server run: | ci_run zk_inception external-node configs --db-url=postgres://postgres:notsecurepassword@postgres:5432 \ --db-name=zksync_en_localhost_era --l1-rpc-url=http://reth:8545 ci_run zk_inception external-node init --ignore-prerequisites + + - name: Run recovery tests (from snapshot) + run: | + ci_run zk_supervisor test recovery --snapshot --ignore-prerequisites --verbose + + - name: Run recovery tests (from genesis) + run: | + ci_run zk_supervisor test recovery --ignore-prerequisites --verbose + + - name: Run external node server + run: | ci_run zk_inception external-node run --ignore-prerequisites &>external_node.log & ci_run sleep 5 diff --git a/core/tests/recovery-test/src/index.ts b/core/tests/recovery-test/src/index.ts index 9e30a6d7831e..5fbac69ace6e 100644 --- a/core/tests/recovery-test/src/index.ts +++ b/core/tests/recovery-test/src/index.ts @@ -10,6 +10,7 @@ import * as zksync from 'zksync-ethers'; import * as ethers from 'ethers'; import path from 'node:path'; import { expect } from 'chai'; +import { runExternalNodeInBackground } from './utils'; export interface Health { readonly status: string; @@ -65,11 +66,9 @@ export async function sleep(millis: number) { await new Promise((resolve) => setTimeout(resolve, millis)); } -export async function getExternalNodeHealth() { - const EXTERNAL_NODE_HEALTH_URL = 'http://127.0.0.1:3081/health'; - +export async function getExternalNodeHealth(url: string) { try { - const response: HealthCheckResponse = await fetch(EXTERNAL_NODE_HEALTH_URL).then((response) => response.json()); + const response: HealthCheckResponse = await fetch(url).then((response) => response.json()); return response; } catch (e) { let displayedError = e; @@ -84,12 +83,13 @@ export async function getExternalNodeHealth() { } } -export async function dropNodeDatabase(env: { [key: string]: string }) { - await executeNodeCommand(env, 'zk db reset'); -} - -export async function dropNodeStorage(env: { [key: string]: string }) { - await executeNodeCommand(env, 'zk clean --database'); +export async function dropNodeData(useZkSupervisor: boolean, env: { [key: string]: string }) { + if (useZkSupervisor) { + await executeNodeCommand(env, 'zk_inception external-node init'); + } else { + await executeNodeCommand(env, 'zk db reset'); + await executeNodeCommand(env, 'zk clean --database'); + } } async function executeNodeCommand(env: { [key: string]: string }, command: string) { @@ -127,15 +127,6 @@ export enum NodeComponents { WITH_TREE_FETCHER_AND_NO_TREE = 'core,api,tree_fetcher' } -function externalNodeArgs(components: NodeComponents = NodeComponents.STANDARD) { - const enableConsensus = process.env.ENABLE_CONSENSUS === 'true'; - const args = ['external-node', '--', `--components=${components}`]; - if (enableConsensus) { - args.push('--enable-consensus'); - } - return args; -} - export class NodeProcess { static async stopAll(signal: 'INT' | 'KILL' = 'INT') { interface ChildProcessError extends Error { @@ -157,15 +148,20 @@ export class NodeProcess { static async spawn( env: { [key: string]: string }, logsFile: FileHandle | string, + pathToHome: string, + useZkInception: boolean, components: NodeComponents = NodeComponents.STANDARD ) { const logs = typeof logsFile === 'string' ? await fs.open(logsFile, 'w') : logsFile; - const childProcess = spawn('zk', externalNodeArgs(components), { - cwd: process.env.ZKSYNC_HOME!!, + + let childProcess = runExternalNodeInBackground({ + components: [components], stdio: [null, logs.fd, logs.fd], - shell: true, - env + cwd: pathToHome, + env, + useZkInception }); + return new NodeProcess(childProcess, logs); } diff --git a/core/tests/recovery-test/src/utils.ts b/core/tests/recovery-test/src/utils.ts new file mode 100644 index 000000000000..cfec302e94f4 --- /dev/null +++ b/core/tests/recovery-test/src/utils.ts @@ -0,0 +1,71 @@ +import { spawn as _spawn, ChildProcessWithoutNullStreams, type ProcessEnvOptions } from 'child_process'; + +// executes a command in background and returns a child process handle +// by default pipes data to parent's stdio but this can be overridden +export function background({ + command, + stdio = 'inherit', + cwd, + env +}: { + command: string; + stdio: any; + cwd?: ProcessEnvOptions['cwd']; + env?: ProcessEnvOptions['env']; +}): ChildProcessWithoutNullStreams { + command = command.replace(/\n/g, ' '); + console.log(`Running command in background: ${command}`); + return _spawn(command, { stdio: stdio, shell: true, detached: true, cwd, env }); +} + +export function runInBackground({ + command, + components, + stdio, + cwd, + env +}: { + command: string; + components?: string[]; + stdio: any; + cwd?: Parameters[0]['cwd']; + env?: Parameters[0]['env']; +}): ChildProcessWithoutNullStreams { + if (components && components.length > 0) { + command += ` --components=${components.join(',')}`; + } + + return background({ + command, + stdio, + cwd, + env + }); +} + +export function runExternalNodeInBackground({ + components, + stdio, + cwd, + env, + useZkInception +}: { + components?: string[]; + stdio: any; + cwd?: Parameters[0]['cwd']; + env?: Parameters[0]['env']; + useZkInception?: boolean; +}): ChildProcessWithoutNullStreams { + let command = ''; + if (useZkInception) { + command = 'zk_inception external-node run'; + } else { + command = 'zk external-node --'; + + const enableConsensus = process.env.ENABLE_CONSENSUS === 'true'; + if (enableConsensus) { + command += ' --enable-consensus'; + } + } + return runInBackground({ command, components, stdio, cwd, env }); +} diff --git a/core/tests/recovery-test/tests/genesis-recovery.test.ts b/core/tests/recovery-test/tests/genesis-recovery.test.ts index 8ba9fc2fc79e..54b9699788f2 100644 --- a/core/tests/recovery-test/tests/genesis-recovery.test.ts +++ b/core/tests/recovery-test/tests/genesis-recovery.test.ts @@ -2,15 +2,12 @@ import { expect } from 'chai'; import * as zksync from 'zksync-ethers'; import { ethers } from 'ethers'; -import { - NodeProcess, - dropNodeDatabase, - dropNodeStorage, - getExternalNodeHealth, - NodeComponents, - sleep, - FundedWallet -} from '../src'; +import { NodeProcess, dropNodeData, getExternalNodeHealth, NodeComponents, sleep, FundedWallet } from '../src'; +import { loadConfig, shouldLoadConfigFromFile } from 'utils/build/file-configs'; +import path from 'path'; + +const pathToHome = path.join(__dirname, '../../../..'); +const fileConfig = shouldLoadConfigFromFile(); /** * Tests recovery of an external node from scratch. @@ -43,18 +40,38 @@ describe('genesis recovery', () => { let externalNodeProcess: NodeProcess; let externalNodeBatchNumber: number; + let apiWeb3JsonRpcHttpUrl: string; + let ethRpcUrl: string; + let externalNodeUrl: string; + let extNodeHealthUrl: string; + before('prepare environment', async () => { expect(process.env.ZKSYNC_ENV, '`ZKSYNC_ENV` should not be set to allow running both server and EN components') .to.be.undefined; - mainNode = new zksync.Provider('http://127.0.0.1:3050'); - externalNode = new zksync.Provider('http://127.0.0.1:3060'); + + if (fileConfig.loadFromFile) { + const secretsConfig = loadConfig({ pathToHome, chain: fileConfig.chain, config: 'secrets.yaml' }); + const generalConfig = loadConfig({ pathToHome, chain: fileConfig.chain, config: 'general.yaml' }); + + ethRpcUrl = secretsConfig.l1.l1_rpc_url; + apiWeb3JsonRpcHttpUrl = generalConfig.api.web3_json_rpc.http_url; + externalNodeUrl = 'http://127.0.0.1:3150'; + extNodeHealthUrl = 'http://127.0.0.1:3171/health'; + } else { + ethRpcUrl = process.env.ETH_CLIENT_WEB3_URL ?? 'http://127.0.0.1:8545'; + apiWeb3JsonRpcHttpUrl = 'http://127.0.0.1:3050'; + externalNodeUrl = 'http://127.0.0.1:3060'; + extNodeHealthUrl = 'http://127.0.0.1:3081/health'; + } + + mainNode = new zksync.Provider(apiWeb3JsonRpcHttpUrl); + externalNode = new zksync.Provider(externalNodeUrl); await NodeProcess.stopAll('KILL'); }); let fundedWallet: FundedWallet; before('create test wallet', async () => { - const ethRpcUrl = process.env.ETH_CLIENT_WEB3_URL ?? 'http://127.0.0.1:8545'; console.log(`Using L1 RPC at ${ethRpcUrl}`); const eth = new ethers.JsonRpcProvider(ethRpcUrl); fundedWallet = await FundedWallet.create(mainNode, eth); @@ -78,18 +95,16 @@ describe('genesis recovery', () => { } }); - step('drop external node database', async () => { - await dropNodeDatabase(externalNodeEnv); - }); - - step('drop external node storage', async () => { - await dropNodeStorage(externalNodeEnv); + step('drop external node data', async () => { + await dropNodeData(fileConfig.loadFromFile, externalNodeEnv); }); step('initialize external node w/o a tree', async () => { externalNodeProcess = await NodeProcess.spawn( externalNodeEnv, 'genesis-recovery.log', + pathToHome, + fileConfig.loadFromFile, NodeComponents.WITH_TREE_FETCHER_AND_NO_TREE ); @@ -103,7 +118,7 @@ describe('genesis recovery', () => { while (!treeFetcherSucceeded || !reorgDetectorSucceeded || !consistencyCheckerSucceeded) { await sleep(1000); - const health = await getExternalNodeHealth(); + const health = await getExternalNodeHealth(extNodeHealthUrl); if (health === null) { continue; } @@ -170,13 +185,15 @@ describe('genesis recovery', () => { externalNodeProcess = await NodeProcess.spawn( externalNodeEnv, externalNodeProcess.logs, + pathToHome, + fileConfig.loadFromFile, NodeComponents.WITH_TREE_FETCHER ); let isNodeReady = false; while (!isNodeReady) { await sleep(1000); - const health = await getExternalNodeHealth(); + const health = await getExternalNodeHealth(extNodeHealthUrl); if (health === null) { continue; } @@ -197,7 +214,7 @@ describe('genesis recovery', () => { while (!treeSucceeded || !reorgDetectorSucceeded || !consistencyCheckerSucceeded) { await sleep(1000); - const health = await getExternalNodeHealth(); + const health = await getExternalNodeHealth(extNodeHealthUrl); if (health === null) { continue; } diff --git a/core/tests/recovery-test/tests/snapshot-recovery.test.ts b/core/tests/recovery-test/tests/snapshot-recovery.test.ts index f0bd1d83d432..bd508b0045c1 100644 --- a/core/tests/recovery-test/tests/snapshot-recovery.test.ts +++ b/core/tests/recovery-test/tests/snapshot-recovery.test.ts @@ -11,11 +11,21 @@ import { sleep, NodeComponents, NodeProcess, - dropNodeDatabase, - dropNodeStorage, + dropNodeData, executeCommandWithLogs, FundedWallet } from '../src'; +import { + setChunkSize, + setDataRetentionSec, + setRemovalDelaySec, + setSnapshotRecovery, + setTreeRecoveryParallelPersistenceBuffer +} from './utils'; +import { loadConfig, shouldLoadConfigFromFile } from 'utils/build/file-configs'; + +const pathToHome = path.join(__dirname, '../../../..'); +const fileConfig = shouldLoadConfigFromFile(); interface AllSnapshotsResponse { readonly snapshotsL1BatchNumbers: number[]; @@ -90,16 +100,39 @@ describe('snapshot recovery', () => { let fundedWallet: FundedWallet; + let apiWeb3JsonRpcHttpUrl: string; + let ethRpcUrl: string; + let externalNodeUrl: string; + let extNodeHealthUrl: string; + before('prepare environment', async () => { expect(process.env.ZKSYNC_ENV, '`ZKSYNC_ENV` should not be set to allow running both server and EN components') .to.be.undefined; - mainNode = new zksync.Provider('http://127.0.0.1:3050'); - externalNode = new zksync.Provider('http://127.0.0.1:3060'); + + if (fileConfig.loadFromFile) { + const secretsConfig = loadConfig({ pathToHome, chain: fileConfig.chain, config: 'secrets.yaml' }); + const generalConfig = loadConfig({ pathToHome, chain: fileConfig.chain, config: 'general.yaml' }); + + ethRpcUrl = secretsConfig.l1.l1_rpc_url; + apiWeb3JsonRpcHttpUrl = generalConfig.api.web3_json_rpc.http_url; + externalNodeUrl = 'http://127.0.0.1:3150'; + extNodeHealthUrl = 'http://127.0.0.1:3171/health'; + + setSnapshotRecovery(pathToHome, fileConfig, true); + setTreeRecoveryParallelPersistenceBuffer(pathToHome, fileConfig, 4); + } else { + ethRpcUrl = process.env.ETH_CLIENT_WEB3_URL ?? 'http://127.0.0.1:8545'; + apiWeb3JsonRpcHttpUrl = 'http://127.0.0.1:3050'; + externalNodeUrl = 'http://127.0.0.1:3060'; + extNodeHealthUrl = 'http://127.0.0.1:3081/health'; + } + + mainNode = new zksync.Provider(apiWeb3JsonRpcHttpUrl); + externalNode = new zksync.Provider(externalNodeUrl); await NodeProcess.stopAll('KILL'); }); before('create test wallet', async () => { - const ethRpcUrl = process.env.ETH_CLIENT_WEB3_URL ?? 'http://127.0.0.1:8545'; console.log(`Using L1 RPC at ${ethRpcUrl}`); const eth = new ethers.JsonRpcProvider(ethRpcUrl); fundedWallet = await FundedWallet.create(mainNode, eth); @@ -110,6 +143,14 @@ describe('snapshot recovery', () => { await externalNodeProcess.stopAndWait('KILL'); await externalNodeProcess.logs.close(); } + + if (fileConfig.loadFromFile) { + setSnapshotRecovery(pathToHome, fileConfig, false); + setChunkSize(pathToHome, fileConfig, 10); + setDataRetentionSec(pathToHome, fileConfig, 3600); + setRemovalDelaySec(pathToHome, fileConfig, 60); + setTreeRecoveryParallelPersistenceBuffer(pathToHome, fileConfig, 1); + } }); async function getAllSnapshots() { @@ -128,7 +169,10 @@ describe('snapshot recovery', () => { } step('create snapshot', async () => { - await executeCommandWithLogs('zk run snapshots-creator', 'snapshot-creator.log'); + await executeCommandWithLogs( + fileConfig.loadFromFile ? `zk_supervisor snapshot create` : 'zk run snapshots-creator', + 'snapshot-creator.log' + ); }); step('validate snapshot', async () => { @@ -181,16 +225,17 @@ describe('snapshot recovery', () => { } }); - step('drop external node database', async () => { - await dropNodeDatabase(externalNodeEnv); - }); - - step('drop external node storage', async () => { - await dropNodeStorage(externalNodeEnv); + step('drop external node data', async () => { + await dropNodeData(fileConfig.loadFromFile, externalNodeEnv); }); step('initialize external node', async () => { - externalNodeProcess = await NodeProcess.spawn(externalNodeEnv, 'snapshot-recovery.log'); + externalNodeProcess = await NodeProcess.spawn( + externalNodeEnv, + 'snapshot-recovery.log', + pathToHome, + fileConfig.loadFromFile + ); let recoveryFinished = false; let consistencyCheckerSucceeded = false; @@ -198,7 +243,7 @@ describe('snapshot recovery', () => { while (!recoveryFinished || !consistencyCheckerSucceeded || !reorgDetectorSucceeded) { await sleep(1000); - const health = await getExternalNodeHealth(); + const health = await getExternalNodeHealth(extNodeHealthUrl); if (health === null) { continue; } @@ -299,15 +344,28 @@ describe('snapshot recovery', () => { EN_PRUNING_CHUNK_SIZE: '1' }; externalNodeEnv = { ...externalNodeEnv, ...pruningParams }; + + if (fileConfig.loadFromFile) { + setChunkSize(pathToHome, fileConfig, 1); + setDataRetentionSec(pathToHome, fileConfig, 0); + setRemovalDelaySec(pathToHome, fileConfig, 1); + } + console.log('Starting EN with pruning params', pruningParams); - externalNodeProcess = await NodeProcess.spawn(externalNodeEnv, externalNodeProcess.logs, components); + externalNodeProcess = await NodeProcess.spawn( + externalNodeEnv, + externalNodeProcess.logs, + pathToHome, + fileConfig.loadFromFile, + components + ); let isDbPrunerReady = false; let isTreePrunerReady = disableTreeDuringPruning; // skip health checks if we don't run the tree let isTreeFetcherReady = false; while (!isDbPrunerReady || !isTreePrunerReady || !isTreeFetcherReady) { await sleep(1000); - const health = await getExternalNodeHealth(); + const health = await getExternalNodeHealth(extNodeHealthUrl); if (health === null) { continue; } @@ -350,7 +408,7 @@ describe('snapshot recovery', () => { while (!isDbPruned || !isTreePruned) { await sleep(1000); - const health = (await getExternalNodeHealth())!; + const health = (await getExternalNodeHealth(extNodeHealthUrl))!; const dbPrunerHealth = health.components.db_pruner!; console.log('DB pruner health', dbPrunerHealth); diff --git a/core/tests/recovery-test/tests/utils.ts b/core/tests/recovery-test/tests/utils.ts new file mode 100644 index 000000000000..0f56884def83 --- /dev/null +++ b/core/tests/recovery-test/tests/utils.ts @@ -0,0 +1,47 @@ +import * as fs from 'fs'; +import { getConfigPath } from 'utils/build/file-configs'; + +export function setSnapshotRecovery(pathToHome: string, fileConfig: any, value: boolean) { + const generalConfigPath = getConfigPath({ + pathToHome, + chain: fileConfig.chain, + configsFolder: 'configs/external_node', + config: 'general.yaml' + }); + const generalConfig = fs.readFileSync(generalConfigPath, 'utf8'); + // NOTE weak approach. It assumes the enabled property to be the first within snapshot_recovery + const regex = /(\bsnapshot_recovery:\s*\n\s*enabled:\s*)\w+/; + const newGeneralConfig = generalConfig.replace(regex, `$1${value}`); + + fs.writeFileSync(generalConfigPath, newGeneralConfig, 'utf8'); +} + +export function setTreeRecoveryParallelPersistenceBuffer(pathToHome: string, fileConfig: any, value: number) { + setPropertyInGeneralConfig(pathToHome, fileConfig, 'tree_recovery_parallel_persistence_buffer', value); +} + +export function setChunkSize(pathToHome: string, fileConfig: any, value: number) { + setPropertyInGeneralConfig(pathToHome, fileConfig, 'chunk_size', value); +} + +export function setDataRetentionSec(pathToHome: string, fileConfig: any, value: number) { + setPropertyInGeneralConfig(pathToHome, fileConfig, 'data_retention_sec', value); +} + +export function setRemovalDelaySec(pathToHome: string, fileConfig: any, value: number) { + setPropertyInGeneralConfig(pathToHome, fileConfig, 'removal_delay_sec', value); +} + +function setPropertyInGeneralConfig(pathToHome: string, fileConfig: any, property: string, value: number) { + const generalConfigPath = getConfigPath({ + pathToHome, + chain: fileConfig.chain, + configsFolder: 'configs/external_node', + config: 'general.yaml' + }); + const generalConfig = fs.readFileSync(generalConfigPath, 'utf8'); + const regex = new RegExp(`${property}:\\s*\\d+`, 'g'); + const newGeneralConfig = generalConfig.replace(regex, `${property}: ${value}`); + + fs.writeFileSync(generalConfigPath, newGeneralConfig, 'utf8'); +} diff --git a/etc/env/file_based/general.yaml b/etc/env/file_based/general.yaml index 2765f1a7319d..059b993d326b 100644 --- a/etc/env/file_based/general.yaml +++ b/etc/env/file_based/general.yaml @@ -63,7 +63,7 @@ api: estimate_gas_scale_factor: 1.2 estimate_gas_acceptable_overestimation: 1000 max_tx_size: 1000000 - api_namespaces: [eth,net,web3,zks,pubsub,debug] + api_namespaces: [en,eth,net,web3,zks,pubsub,debug] max_response_body_size_overrides: - method: eth_getTransactionReceipt # no size specified, meaning no size limit - method: zks_getProof @@ -153,8 +153,8 @@ snapshot_creator: file_backed: file_backed_base_path: artifacts max_retries: 10 - concurrent_queries_count: 1 - storage_logs_chunk_size: 2 + concurrent_queries_count: 25 + storage_logs_chunk_size: 1000000 prover: @@ -349,6 +349,10 @@ basic_witness_input_producer: snapshot_recovery: enabled: false + object_store: + max_retries: 100 + file_backed: + file_backed_base_path: artifacts postgres: max_concurrency: 10 tree: diff --git a/zk_toolbox/crates/zk_supervisor/src/commands/mod.rs b/zk_toolbox/crates/zk_supervisor/src/commands/mod.rs index b2c6df6a4864..cc2b0a12b339 100644 --- a/zk_toolbox/crates/zk_supervisor/src/commands/mod.rs +++ b/zk_toolbox/crates/zk_supervisor/src/commands/mod.rs @@ -1,3 +1,4 @@ pub mod clean; pub mod database; +pub mod snapshot; pub mod test; diff --git a/zk_toolbox/crates/zk_supervisor/src/commands/snapshot.rs b/zk_toolbox/crates/zk_supervisor/src/commands/snapshot.rs new file mode 100644 index 000000000000..aac9f5345d42 --- /dev/null +++ b/zk_toolbox/crates/zk_supervisor/src/commands/snapshot.rs @@ -0,0 +1,40 @@ +use anyhow::Context; +use clap::Subcommand; +use common::{cmd::Cmd, logger}; +use config::EcosystemConfig; +use xshell::{cmd, Shell}; + +use crate::messages::{MSG_CHAIN_NOT_FOUND_ERR, MSG_RUNNING_SNAPSHOT_CREATOR}; + +#[derive(Subcommand, Debug)] +pub enum SnapshotCommands { + Create, +} + +pub(crate) async fn run(shell: &Shell, args: SnapshotCommands) -> anyhow::Result<()> { + match args { + SnapshotCommands::Create => { + create(shell).await?; + } + } + + Ok(()) +} + +async fn create(shell: &Shell) -> anyhow::Result<()> { + let ecosystem = EcosystemConfig::from_file(shell)?; + let chain = ecosystem + .load_chain(Some(ecosystem.default_chain.clone())) + .context(MSG_CHAIN_NOT_FOUND_ERR)?; + + let config_path = chain.path_to_general_config(); + let secrets_path = chain.path_to_secrets_config(); + + logger::info(MSG_RUNNING_SNAPSHOT_CREATOR); + + let mut cmd = Cmd::new(cmd!(shell, "cargo run --bin snapshots_creator --release -- --config-path={config_path} --secrets-path={secrets_path}")) + .env("RUST_LOG", "snapshots_creator=debug"); + + cmd = cmd.with_force_run(); + cmd.run().context("MSG") +} diff --git a/zk_toolbox/crates/zk_supervisor/src/commands/test/args/mod.rs b/zk_toolbox/crates/zk_supervisor/src/commands/test/args/mod.rs index 6a00b2152bdd..fc6098488971 100644 --- a/zk_toolbox/crates/zk_supervisor/src/commands/test/args/mod.rs +++ b/zk_toolbox/crates/zk_supervisor/src/commands/test/args/mod.rs @@ -1,2 +1,3 @@ pub mod integration; +pub mod recovery; pub mod revert; diff --git a/zk_toolbox/crates/zk_supervisor/src/commands/test/args/recovery.rs b/zk_toolbox/crates/zk_supervisor/src/commands/test/args/recovery.rs new file mode 100644 index 000000000000..3bddc6bce1f1 --- /dev/null +++ b/zk_toolbox/crates/zk_supervisor/src/commands/test/args/recovery.rs @@ -0,0 +1,10 @@ +use clap::Parser; +use serde::{Deserialize, Serialize}; + +use crate::messages::MSG_TESTS_RECOVERY_SNAPSHOT_HELP; + +#[derive(Debug, Serialize, Deserialize, Parser)] +pub struct RecoveryArgs { + #[clap(short, long, help = MSG_TESTS_RECOVERY_SNAPSHOT_HELP)] + pub snapshot: bool, +} diff --git a/zk_toolbox/crates/zk_supervisor/src/commands/test/mod.rs b/zk_toolbox/crates/zk_supervisor/src/commands/test/mod.rs index 857190dba3b0..b95f68d78931 100644 --- a/zk_toolbox/crates/zk_supervisor/src/commands/test/mod.rs +++ b/zk_toolbox/crates/zk_supervisor/src/commands/test/mod.rs @@ -1,11 +1,14 @@ -use args::{integration::IntegrationArgs, revert::RevertArgs}; +use args::{integration::IntegrationArgs, recovery::RecoveryArgs, revert::RevertArgs}; use clap::Subcommand; use xshell::Shell; -use crate::messages::{MSG_INTEGRATION_TESTS_ABOUT, MSG_REVERT_TEST_ABOUT}; +use crate::messages::{ + MSG_INTEGRATION_TESTS_ABOUT, MSG_RECOVERY_TEST_ABOUT, MSG_REVERT_TEST_ABOUT, +}; mod args; mod integration; +mod recovery; mod revert; #[derive(Subcommand, Debug)] @@ -14,11 +17,14 @@ pub enum TestCommands { Integration(IntegrationArgs), #[clap(about = MSG_REVERT_TEST_ABOUT, alias = "r")] Revert(RevertArgs), + #[clap(about = MSG_RECOVERY_TEST_ABOUT, alias = "rec")] + Recovery(RecoveryArgs), } pub fn run(shell: &Shell, args: TestCommands) -> anyhow::Result<()> { match args { TestCommands::Integration(args) => integration::run(shell, args), TestCommands::Revert(args) => revert::run(shell, args), + TestCommands::Recovery(args) => recovery::run(shell, args), } } diff --git a/zk_toolbox/crates/zk_supervisor/src/commands/test/recovery.rs b/zk_toolbox/crates/zk_supervisor/src/commands/test/recovery.rs new file mode 100644 index 000000000000..fdde6a61f896 --- /dev/null +++ b/zk_toolbox/crates/zk_supervisor/src/commands/test/recovery.rs @@ -0,0 +1,52 @@ +use common::{cmd::Cmd, logger, server::Server, spinner::Spinner}; +use config::EcosystemConfig; +use xshell::{cmd, Shell}; + +use super::args::recovery::RecoveryArgs; +use crate::messages::{MSG_RECOVERY_TEST_RUN_INFO, MSG_RECOVERY_TEST_RUN_SUCCESS}; + +const RECOVERY_TESTS_PATH: &str = "core/tests/recovery-test"; + +pub fn run(shell: &Shell, args: RecoveryArgs) -> anyhow::Result<()> { + let ecosystem_config = EcosystemConfig::from_file(shell)?; + shell.change_dir(ecosystem_config.link_to_code.join(RECOVERY_TESTS_PATH)); + + logger::info(MSG_RECOVERY_TEST_RUN_INFO); + Server::new(None, ecosystem_config.link_to_code.clone()).build(shell)?; + install_and_build_dependencies(shell, &ecosystem_config)?; + run_test(shell, &args, &ecosystem_config)?; + logger::outro(MSG_RECOVERY_TEST_RUN_SUCCESS); + + Ok(()) +} + +fn install_and_build_dependencies( + shell: &Shell, + ecosystem_config: &EcosystemConfig, +) -> anyhow::Result<()> { + let _dir_guard = shell.push_dir(&ecosystem_config.link_to_code); + let spinner = Spinner::new("Installing and building dependencies..."); + Cmd::new(cmd!(shell, "yarn install")).run()?; + Cmd::new(cmd!(shell, "yarn utils build")).run()?; + spinner.finish(); + Ok(()) +} + +fn run_test( + shell: &Shell, + args: &RecoveryArgs, + ecosystem_config: &EcosystemConfig, +) -> anyhow::Result<()> { + Spinner::new("Running test...").freeze(); + + let cmd = if args.snapshot { + cmd!(shell, "yarn mocha tests/snapshot-recovery.test.ts") + } else { + cmd!(shell, "yarn mocha tests/genesis-recovery.test.ts") + }; + + let cmd = Cmd::new(cmd).env("CHAIN_NAME", &ecosystem_config.default_chain); + cmd.with_force_run().run()?; + + Ok(()) +} diff --git a/zk_toolbox/crates/zk_supervisor/src/main.rs b/zk_toolbox/crates/zk_supervisor/src/main.rs index d6cc82c0994d..8b2b469390aa 100644 --- a/zk_toolbox/crates/zk_supervisor/src/main.rs +++ b/zk_toolbox/crates/zk_supervisor/src/main.rs @@ -1,5 +1,5 @@ use clap::{Parser, Subcommand}; -use commands::{database::DatabaseCommands, test::TestCommands}; +use commands::{database::DatabaseCommands, snapshot::SnapshotCommands, test::TestCommands}; use common::{ check_general_prerequisites, config::{global_config, init_global_config, GlobalConfig}, @@ -36,6 +36,8 @@ enum SupervisorSubcommands { Test(TestCommands), #[command(subcommand, about = MSG_SUBCOMMAND_CLEAN)] Clean(CleanCommands), + #[command(subcommand, about = "Snapshots creator")] + Snapshot(SnapshotCommands), } #[derive(Parser, Debug)] @@ -86,6 +88,7 @@ async fn run_subcommand(args: Supervisor, shell: &Shell) -> anyhow::Result<()> { SupervisorSubcommands::Database(command) => commands::database::run(shell, command).await?, SupervisorSubcommands::Test(command) => commands::test::run(shell, command)?, SupervisorSubcommands::Clean(command) => commands::clean::run(shell, command)?, + SupervisorSubcommands::Snapshot(command) => commands::snapshot::run(shell, command).await?, } Ok(()) } diff --git a/zk_toolbox/crates/zk_supervisor/src/messages.rs b/zk_toolbox/crates/zk_supervisor/src/messages.rs index ecbe604a7ba3..222d096f6ec1 100644 --- a/zk_toolbox/crates/zk_supervisor/src/messages.rs +++ b/zk_toolbox/crates/zk_supervisor/src/messages.rs @@ -72,7 +72,10 @@ pub(super) const MSG_DATABASE_NEW_MIGRATION_SUCCESS: &str = "Migration created s // Tests related messages pub(super) const MSG_INTEGRATION_TESTS_ABOUT: &str = "Run integration tests"; pub(super) const MSG_REVERT_TEST_ABOUT: &str = "Run revert tests"; +pub(super) const MSG_RECOVERY_TEST_ABOUT: &str = "Run recovery tests"; pub(super) const MSG_TESTS_EXTERNAL_NODE_HELP: &str = "Run tests for external node"; +pub(super) const MSG_TESTS_RECOVERY_SNAPSHOT_HELP: &str = + "Run recovery from a snapshot instead of genesis"; // Integration tests related messages pub(super) fn msg_integration_tests_run(external_node: bool) -> String { @@ -106,6 +109,10 @@ pub(super) fn msg_revert_tests_run(external_node: bool) -> String { pub(super) const MSG_REVERT_TEST_RUN_SUCCESS: &str = "Revert and restart test ran successfully"; +// Recovery tests related messages +pub(super) const MSG_RECOVERY_TEST_RUN_INFO: &str = "Running recovery test"; +pub(super) const MSG_RECOVERY_TEST_RUN_SUCCESS: &str = "Recovery test ran successfully"; + // Cleaning related messages pub(super) const MSG_DOCKER_COMPOSE_DOWN: &str = "docker compose down"; pub(super) const MSG_DOCKER_COMPOSE_REMOVE_VOLUMES: &str = "docker compose remove volumes"; @@ -114,3 +121,6 @@ pub(super) const MSG_CONTRACTS_CLEANING: &str = "Removing contracts building and deployment artifacts"; pub(super) const MSG_CONTRACTS_CLEANING_FINISHED: &str = "Contracts building and deployment artifacts are cleaned up"; + +/// Snapshot creator related messages +pub(super) const MSG_RUNNING_SNAPSHOT_CREATOR: &str = "Running snapshot creator"; From a629ba1bb138dd0a2b1ae5e478ee9635634a5087 Mon Sep 17 00:00:00 2001 From: Grzegorz Prusak Date: Fri, 26 Jul 2024 17:09:38 +0200 Subject: [PATCH 50/52] chore: bumped sqlx version (#2488) --- Cargo.lock | 469 +++++++++--------- Cargo.toml | 4 +- core/node/da_dispatcher/src/da_dispatcher.rs | 5 +- docker/build-base/Dockerfile | 2 +- docker/local-node/Dockerfile | 2 +- .../20.04_amd64_cuda_11_8.Dockerfile | 2 +- .../20.04_amd64_cuda_12_0.Dockerfile | 2 +- docker/zk-environment/Dockerfile | 2 +- docs/guides/setup-dev.md | 4 +- prover/Cargo.lock | 90 ++-- prover/Cargo.toml | 2 +- zk_toolbox/Cargo.lock | 113 +++-- zk_toolbox/Cargo.toml | 2 +- 13 files changed, 354 insertions(+), 345 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 0503253b526c..e53574cd02ca 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -81,7 +81,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "77c3a9648d43b9cd48db467b3f87fdd6e146bcc88ab0180006cef2179fe11d01" dependencies = [ "cfg-if 1.0.0", - "getrandom", "once_cell", "version_check", "zerocopy", @@ -203,7 +202,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0609c78bd572f4edc74310dfb63a01f5609d53fa8b4dd7c4d98aef3b3e8d72d1" dependencies = [ "proc-macro-hack", - "quote 1.0.33", + "quote 1.0.36", "syn 1.0.109", ] @@ -257,9 +256,9 @@ version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" dependencies = [ - "proc-macro2 1.0.69", - "quote 1.0.33", - "syn 2.0.38", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 2.0.72", ] [[package]] @@ -268,9 +267,9 @@ version = "0.1.74" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a66537f1bb974b254c98ed142ff995236e81b9d0fe4db0575f46612cb15eb0f9" dependencies = [ - "proc-macro2 1.0.69", - "quote 1.0.33", - "syn 2.0.38", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 2.0.72", ] [[package]] @@ -288,16 +287,6 @@ version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" -[[package]] -name = "atomic-write-file" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "edcdbedc2236483ab103a53415653d6b4442ea6141baf1ffa85df29635e88436" -dependencies = [ - "nix", - "rand 0.8.5", -] - [[package]] name = "atty" version = "0.2.14" @@ -504,10 +493,12 @@ dependencies = [ [[package]] name = "bigdecimal" -version = "0.3.1" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6773ddc0eafc0e509fb60e48dff7f450f8e674a0686ae8605e8d9901bd5eefa" +checksum = "51d712318a27c7150326677b321a5fa91b55f6d9034ffd67f20319e147d40cee" dependencies = [ + "autocfg", + "libm", "num-bigint 0.4.6", "num-integer", "num-traits", @@ -535,12 +526,12 @@ dependencies = [ "lazycell", "peeking_take_while", "prettyplease", - "proc-macro2 1.0.69", - "quote 1.0.33", + "proc-macro2 1.0.86", + "quote 1.0.36", "regex", "rustc-hash", "shlex", - "syn 2.0.38", + "syn 2.0.72", ] [[package]] @@ -557,12 +548,12 @@ dependencies = [ "lazycell", "log", "prettyplease", - "proc-macro2 1.0.69", - "quote 1.0.33", + "proc-macro2 1.0.86", + "quote 1.0.36", "regex", "rustc-hash", "shlex", - "syn 2.0.38", + "syn 2.0.72", "which", ] @@ -778,9 +769,9 @@ checksum = "bf4918709cc4dd777ad2b6303ed03cb37f3ca0ccede8c1b0d28ac6db8f4710e0" dependencies = [ "once_cell", "proc-macro-crate 2.0.0", - "proc-macro2 1.0.69", - "quote 1.0.33", - "syn 2.0.38", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 2.0.72", "syn_derive", ] @@ -819,8 +810,8 @@ version = "0.6.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a7ec4c6f261935ad534c0c22dbef2201b45918860eb1c574b972bd213a76af61" dependencies = [ - "proc-macro2 1.0.69", - "quote 1.0.33", + "proc-macro2 1.0.86", + "quote 1.0.36", "syn 1.0.109", ] @@ -965,9 +956,9 @@ dependencies = [ [[package]] name = "chrono" -version = "0.4.31" +version = "0.4.38" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f2c685bad3eb3d45a01354cedb7d5faa66194d1d58ba6e267a8de788f79db38" +checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401" dependencies = [ "android-tzdata", "iana-time-zone", @@ -975,7 +966,7 @@ dependencies = [ "num-traits", "serde", "wasm-bindgen", - "windows-targets 0.48.5", + "windows-targets 0.52.6", ] [[package]] @@ -1199,9 +1190,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0862016ff20d69b84ef8247369fabf5c008a7417002411897d40ee1f4532b873" dependencies = [ "heck 0.4.1", - "proc-macro2 1.0.69", - "quote 1.0.33", - "syn 2.0.38", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 2.0.72", ] [[package]] @@ -1259,6 +1250,15 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bed69047ed42e52c7e38d6421eeb8ceefb4f2a2b52eed59137f7bad7908f6800" +[[package]] +name = "concurrent-queue" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ca0197aee26d1ae37445ee532fefce43251d24cc7c166799f4d46817f1d3973" +dependencies = [ + "crossbeam-utils 0.8.20", +] + [[package]] name = "console" version = "0.15.7" @@ -1292,8 +1292,8 @@ version = "0.2.32" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c7f6ff08fd20f4f299298a28e2dfa8a8ba1036e6cd2460ac1de7b425d76f2500" dependencies = [ - "proc-macro2 1.0.69", - "quote 1.0.33", + "proc-macro2 1.0.86", + "quote 1.0.36", "unicode-xid 0.2.4", ] @@ -1583,8 +1583,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "faa0b8f9fdb5c91dcd5569cc7cbc11f514fd784a34988ead8455db0db2cfc1c7" dependencies = [ "proc-macro-error", - "proc-macro2 1.0.69", - "quote 1.0.33", + "proc-macro2 1.0.86", + "quote 1.0.36", "syn 1.0.109", ] @@ -1629,9 +1629,9 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ - "proc-macro2 1.0.69", - "quote 1.0.33", - "syn 2.0.38", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 2.0.72", ] [[package]] @@ -1652,8 +1652,8 @@ checksum = "859d65a907b6852c9361e3185c862aae7fafd2887876799fa55f5f99dc40d610" dependencies = [ "fnv", "ident_case", - "proc-macro2 1.0.69", - "quote 1.0.33", + "proc-macro2 1.0.86", + "quote 1.0.36", "strsim 0.10.0", "syn 1.0.109", ] @@ -1665,7 +1665,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c972679f83bdf9c42bd905396b6c3588a843a17f0f16dfcfa3e2c5d57441835" dependencies = [ "darling_core", - "quote 1.0.33", + "quote 1.0.36", "syn 1.0.109", ] @@ -1676,7 +1676,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856" dependencies = [ "cfg-if 1.0.0", - "hashbrown 0.14.2", + "hashbrown 0.14.5", "lock_api", "once_cell", "parking_lot_core", @@ -1729,8 +1729,8 @@ version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" dependencies = [ - "proc-macro2 1.0.69", - "quote 1.0.33", + "proc-macro2 1.0.86", + "quote 1.0.36", "syn 1.0.109", ] @@ -1749,9 +1749,9 @@ version = "1.0.0-beta.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2bba3e9872d7c58ce7ef0fcf1844fcc3e23ef2a58377b50df35dd98e42a5726e" dependencies = [ - "proc-macro2 1.0.69", - "quote 1.0.33", - "syn 2.0.38", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 2.0.72", "unicode-xid 0.2.4", ] @@ -1959,12 +1959,12 @@ checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "errno" -version = "0.3.5" +version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac3e13f66a2f95e32a39eaa81f6b95d42878ca0e1db0c7543723dfe12557e860" +checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba" dependencies = [ "libc", - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] @@ -2033,9 +2033,14 @@ dependencies = [ [[package]] name = "event-listener" -version = "2.5.3" +version = "5.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" +checksum = "6032be9bd27023a771701cc49f9f053c751055f71efb2e0ae5c15809093675ba" +dependencies = [ + "concurrent-queue", + "parking", + "pin-project-lite", +] [[package]] name = "fastrand" @@ -2085,8 +2090,8 @@ dependencies = [ "num-bigint 0.4.6", "num-integer", "num-traits", - "proc-macro2 1.0.69", - "quote 1.0.33", + "proc-macro2 1.0.86", + "quote 1.0.36", "serde", "syn 1.0.109", ] @@ -2321,9 +2326,9 @@ version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72" dependencies = [ - "proc-macro2 1.0.69", - "quote 1.0.33", - "syn 2.0.38", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 2.0.72", ] [[package]] @@ -2666,9 +2671,9 @@ dependencies = [ [[package]] name = "hashbrown" -version = "0.14.2" +version = "0.14.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f93e7192158dbcda357bdec5fb5788eebf8bbac027f3f33e719d29135ae84156" +checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" dependencies = [ "ahash 0.8.7", "allocator-api2", @@ -2676,11 +2681,11 @@ dependencies = [ [[package]] name = "hashlink" -version = "0.8.4" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8094feaf31ff591f651a2664fb9cfd92bba7a60ce3197265e9482ebe753c8f7" +checksum = "6ba4ff7128dee98c7dc9794b6a411377e1404dba1c97deb8d1a55297bd25d8af" dependencies = [ - "hashbrown 0.14.2", + "hashbrown 0.14.5", ] [[package]] @@ -2697,9 +2702,6 @@ name = "heck" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" -dependencies = [ - "unicode-segmentation", -] [[package]] name = "heck" @@ -3051,8 +3053,8 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "11d7a9f6330b71fea57921c9b61c47ee6e84f72d394754eff6163ae67e7395eb" dependencies = [ - "proc-macro2 1.0.69", - "quote 1.0.33", + "proc-macro2 1.0.86", + "quote 1.0.36", "syn 1.0.109", ] @@ -3073,7 +3075,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d530e1a18b1cb4c484e6e34556a0d948706958449fca0cab753d649f2bce3d1f" dependencies = [ "equivalent", - "hashbrown 0.14.2", + "hashbrown 0.14.5", ] [[package]] @@ -3290,9 +3292,9 @@ checksum = "7895f186d5921065d96e16bd795e5ca89ac8356ec423fafc6e3d7cf8ec11aee4" dependencies = [ "heck 0.5.0", "proc-macro-crate 3.1.0", - "proc-macro2 1.0.69", - "quote 1.0.33", - "syn 2.0.38", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 2.0.72", ] [[package]] @@ -3433,9 +3435,9 @@ checksum = "884e2677b40cc8c339eaefcb701c32ef1fd2493d71118dc0ca4b6a736c93bd67" [[package]] name = "libc" -version = "0.2.149" +version = "0.2.155" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a08173bc88b7955d1b3145aa561539096c421ac8debde8cbc3612ec635fee29b" +checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c" [[package]] name = "libloading" @@ -3471,9 +3473,9 @@ dependencies = [ [[package]] name = "libsqlite3-sys" -version = "0.27.0" +version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf4e226dcd58b4be396f7bd3c20da8fdee2911400705297ba7d2d7cc2c30f716" +checksum = "0c10584274047cb335c23d3e61bcef8e323adae7c5c8c760540f73610177fc3f" dependencies = [ "cc", "pkg-config", @@ -3512,16 +3514,16 @@ version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba125974b109d512fccbc6c0244e7580143e460895dfd6ea7f8bbb692fd94396" dependencies = [ - "proc-macro2 1.0.69", - "quote 1.0.33", - "syn 2.0.38", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 2.0.72", ] [[package]] name = "linux-raw-sys" -version = "0.4.10" +version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da2479e8c062e40bf0066ffa0bc823de0a9368974af99c9f6df941d2c231e03f" +checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" [[package]] name = "loadnext" @@ -3588,10 +3590,10 @@ checksum = "dc487311295e0002e452025d6b580b77bb17286de87b57138f3b5db711cded68" dependencies = [ "beef", "fnv", - "proc-macro2 1.0.69", - "quote 1.0.33", + "proc-macro2 1.0.86", + "quote 1.0.36", "regex-syntax 0.6.29", - "syn 2.0.38", + "syn 2.0.72", ] [[package]] @@ -3713,9 +3715,9 @@ version = "5.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49e7bc1560b95a3c4a25d03de42fe76ca718ab92d1a22a55b9b4cf67b3ae635c" dependencies = [ - "proc-macro2 1.0.69", - "quote 1.0.33", - "syn 2.0.38", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 2.0.72", ] [[package]] @@ -4022,9 +4024,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "96667db765a921f7b295ffee8b60472b686a51d4f21c2ee4ffdb94c7013b65a6" dependencies = [ "proc-macro-crate 1.3.1", - "proc-macro2 1.0.69", - "quote 1.0.33", - "syn 2.0.38", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 2.0.72", ] [[package]] @@ -4034,9 +4036,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "681030a937600a36906c185595136d26abfebb4aa9c65701cefcaf8578bb982b" dependencies = [ "proc-macro-crate 3.1.0", - "proc-macro2 1.0.69", - "quote 1.0.33", - "syn 2.0.38", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 2.0.72", ] [[package]] @@ -4087,9 +4089,9 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ - "proc-macro2 1.0.69", - "quote 1.0.33", - "syn 2.0.38", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 2.0.72", ] [[package]] @@ -4274,11 +4276,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "312270ee71e1cd70289dacf597cab7b207aa107d2f28191c2ae45b2ece18a260" dependencies = [ "proc-macro-crate 1.3.1", - "proc-macro2 1.0.69", - "quote 1.0.33", + "proc-macro2 1.0.86", + "quote 1.0.36", "syn 1.0.109", ] +[[package]] +name = "parking" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb813b8af86854136c6922af0598d719255ecb2179515e6e7730d468f05c9cae" + [[package]] name = "parking_lot" version = "0.12.1" @@ -4297,7 +4305,7 @@ checksum = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e" dependencies = [ "cfg-if 1.0.0", "libc", - "redox_syscall 0.4.1", + "redox_syscall", "smallvec", "windows-targets 0.48.5", ] @@ -4368,9 +4376,9 @@ checksum = "2a31940305ffc96863a735bef7c7994a00b325a7138fdbc5bda0f1a0476d3275" dependencies = [ "pest", "pest_meta", - "proc-macro2 1.0.69", - "quote 1.0.33", - "syn 2.0.38", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 2.0.72", ] [[package]] @@ -4409,9 +4417,9 @@ version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4359fd9c9171ec6e8c62926d6faaf553a8dc3f64e1507e76da7911b4f6a04405" dependencies = [ - "proc-macro2 1.0.69", - "quote 1.0.33", - "syn 2.0.38", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 2.0.72", ] [[package]] @@ -4542,8 +4550,8 @@ version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ae005bd773ab59b4725093fd7df83fd7892f7d8eafb48dbd7de6e024e4215f9d" dependencies = [ - "proc-macro2 1.0.69", - "syn 2.0.38", + "proc-macro2 1.0.86", + "syn 2.0.72", ] [[package]] @@ -4603,8 +4611,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" dependencies = [ "proc-macro-error-attr", - "proc-macro2 1.0.69", - "quote 1.0.33", + "proc-macro2 1.0.86", + "quote 1.0.36", "syn 1.0.109", "version_check", ] @@ -4615,8 +4623,8 @@ version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" dependencies = [ - "proc-macro2 1.0.69", - "quote 1.0.33", + "proc-macro2 1.0.86", + "quote 1.0.36", "version_check", ] @@ -4637,9 +4645,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.69" +version = "1.0.86" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "134c189feb4956b20f6f547d2cf727d4c0fe06722b20a0eec87ed445a97f92da" +checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77" dependencies = [ "unicode-ident", ] @@ -4662,9 +4670,9 @@ version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "440f724eba9f6996b75d63681b0a92b06947f1457076d503a4d2e2c8f56442b8" dependencies = [ - "proc-macro2 1.0.69", - "quote 1.0.33", - "syn 2.0.38", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 2.0.72", ] [[package]] @@ -4704,7 +4712,7 @@ dependencies = [ "prost 0.12.1", "prost-types", "regex", - "syn 2.0.38", + "syn 2.0.72", "tempfile", "which", ] @@ -4717,9 +4725,9 @@ checksum = "265baba7fabd416cf5078179f7d2cbeca4ce7a9041111900675ea7c4cb8a4c32" dependencies = [ "anyhow", "itertools 0.11.0", - "proc-macro2 1.0.69", - "quote 1.0.33", - "syn 2.0.38", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 2.0.72", ] [[package]] @@ -4730,9 +4738,9 @@ checksum = "18bec9b0adc4eba778b33684b7ba3e7137789434769ee3ce3930463ef904cfca" dependencies = [ "anyhow", "itertools 0.12.0", - "proc-macro2 1.0.69", - "quote 1.0.33", - "syn 2.0.38", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 2.0.72", ] [[package]] @@ -4802,8 +4810,8 @@ version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "16b845dbfca988fa33db069c0e230574d15a3088f147a87b64c7589eb662c9ac" dependencies = [ - "proc-macro2 1.0.69", - "quote 1.0.33", + "proc-macro2 1.0.86", + "quote 1.0.36", "syn 1.0.109", ] @@ -4860,11 +4868,11 @@ dependencies = [ [[package]] name = "quote" -version = "1.0.33" +version = "1.0.36" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae" +checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7" dependencies = [ - "proc-macro2 1.0.69", + "proc-macro2 1.0.86", ] [[package]] @@ -4978,15 +4986,6 @@ dependencies = [ "rand_core 0.3.1", ] -[[package]] -name = "redox_syscall" -version = "0.3.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29" -dependencies = [ - "bitflags 1.3.2", -] - [[package]] name = "redox_syscall" version = "0.4.1" @@ -5229,8 +5228,8 @@ version = "0.7.43" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b5c462a1328c8e67e4d6dbad1eb0355dd43e8ab432c6e227a43657f16ade5033" dependencies = [ - "proc-macro2 1.0.69", - "quote 1.0.33", + "proc-macro2 1.0.86", + "quote 1.0.36", "syn 1.0.109", ] @@ -5325,15 +5324,15 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.20" +version = "0.38.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67ce50cb2e16c2903e30d1cbccfd8387a74b9d4c938b6a4c5ec6cc7556f7a8a0" +checksum = "70dc5ec042f7a43c4a73241207cecc9873a06d45debb38b329f8541d85c2730f" dependencies = [ "bitflags 2.6.0", "errno", "libc", "linux-raw-sys", - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] @@ -5695,9 +5694,9 @@ version = "1.0.189" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e48d1f918009ce3145511378cf68d613e3b3d9137d67272562080d68a2b32d5" dependencies = [ - "proc-macro2 1.0.69", - "quote 1.0.33", - "syn 2.0.38", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 2.0.72", ] [[package]] @@ -5752,8 +5751,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e182d6ec6f05393cc0e5ed1bf81ad6db3a8feedf8ee515ecdd369809bcce8082" dependencies = [ "darling", - "proc-macro2 1.0.69", - "quote 1.0.33", + "proc-macro2 1.0.86", + "quote 1.0.36", "syn 1.0.109", ] @@ -6069,9 +6068,9 @@ dependencies = [ [[package]] name = "sqlx" -version = "0.7.3" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dba03c279da73694ef99763320dea58b51095dfe87d001b1d4b5fe78ba8763cf" +checksum = "27144619c6e5802f1380337a209d2ac1c431002dd74c6e60aebff3c506dc4f0c" dependencies = [ "sqlx-core", "sqlx-macros", @@ -6082,11 +6081,10 @@ dependencies = [ [[package]] name = "sqlx-core" -version = "0.7.3" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d84b0a3c3739e220d94b3239fd69fb1f74bc36e16643423bd99de3b43c21bfbd" +checksum = "a999083c1af5b5d6c071d34a708a19ba3e02106ad82ef7bbd69f5e48266b613b" dependencies = [ - "ahash 0.8.7", "atoi", "bigdecimal", "byteorder", @@ -6094,7 +6092,6 @@ dependencies = [ "chrono", "crc", "crossbeam-queue 0.3.11", - "dotenvy", "either", "event-listener", "futures-channel", @@ -6102,6 +6099,7 @@ dependencies = [ "futures-intrusive", "futures-io", "futures-util", + "hashbrown 0.14.5", "hashlink", "hex", "indexmap 2.1.0", @@ -6127,31 +6125,30 @@ dependencies = [ [[package]] name = "sqlx-macros" -version = "0.7.3" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89961c00dc4d7dffb7aee214964b065072bff69e36ddb9e2c107541f75e4f2a5" +checksum = "a23217eb7d86c584b8cbe0337b9eacf12ab76fe7673c513141ec42565698bb88" dependencies = [ - "proc-macro2 1.0.69", - "quote 1.0.33", + "proc-macro2 1.0.86", + "quote 1.0.36", "sqlx-core", "sqlx-macros-core", - "syn 1.0.109", + "syn 2.0.72", ] [[package]] name = "sqlx-macros-core" -version = "0.7.3" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0bd4519486723648186a08785143599760f7cc81c52334a55d6a83ea1e20841" +checksum = "1a099220ae541c5db479c6424bdf1b200987934033c2584f79a0e1693601e776" dependencies = [ - "atomic-write-file", "dotenvy", "either", - "heck 0.4.1", + "heck 0.5.0", "hex", "once_cell", - "proc-macro2 1.0.69", - "quote 1.0.33", + "proc-macro2 1.0.86", + "quote 1.0.36", "serde", "serde_json", "sha2 0.10.8", @@ -6159,7 +6156,7 @@ dependencies = [ "sqlx-mysql", "sqlx-postgres", "sqlx-sqlite", - "syn 1.0.109", + "syn 2.0.72", "tempfile", "tokio", "url", @@ -6167,12 +6164,12 @@ dependencies = [ [[package]] name = "sqlx-mysql" -version = "0.7.3" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e37195395df71fd068f6e2082247891bc11e3289624bbc776a0cdfa1ca7f1ea4" +checksum = "5afe4c38a9b417b6a9a5eeffe7235d0a106716495536e7727d1c7f4b1ff3eba6" dependencies = [ "atoi", - "base64 0.21.5", + "base64 0.22.1", "bigdecimal", "bitflags 2.6.0", "byteorder", @@ -6212,12 +6209,12 @@ dependencies = [ [[package]] name = "sqlx-postgres" -version = "0.7.3" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6ac0ac3b7ccd10cc96c7ab29791a7dd236bd94021f31eec7ba3d46a74aa1c24" +checksum = "b1dbb157e65f10dbe01f729339c06d239120221c9ad9fa0ba8408c4cc18ecf21" dependencies = [ "atoi", - "base64 0.21.5", + "base64 0.22.1", "bigdecimal", "bitflags 2.6.0", "byteorder", @@ -6244,7 +6241,6 @@ dependencies = [ "rust_decimal", "serde", "serde_json", - "sha1", "sha2 0.10.8", "smallvec", "sqlx-core", @@ -6256,9 +6252,9 @@ dependencies = [ [[package]] name = "sqlx-sqlite" -version = "0.7.3" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "210976b7d948c7ba9fced8ca835b11cbb2d677c59c79de41ac0d397e14547490" +checksum = "9b2cdd83c008a622d94499c0006d8ee5f821f36c89b7d625c900e5dc30b5c5ee" dependencies = [ "atoi", "chrono", @@ -6272,10 +6268,10 @@ dependencies = [ "log", "percent-encoding", "serde", + "serde_urlencoded", "sqlx-core", "tracing", "url", - "urlencoding", ] [[package]] @@ -6332,8 +6328,8 @@ checksum = "dcb5ae327f9cc13b68763b5749770cb9e048a99bd9dfdfa58d0cf05d5f64afe0" dependencies = [ "heck 0.3.3", "proc-macro-error", - "proc-macro2 1.0.69", - "quote 1.0.33", + "proc-macro2 1.0.86", + "quote 1.0.36", "syn 1.0.109", ] @@ -6353,10 +6349,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4c6bee85a5a24955dc440386795aa378cd9cf82acd5f764469152d2270e581be" dependencies = [ "heck 0.5.0", - "proc-macro2 1.0.69", - "quote 1.0.33", + "proc-macro2 1.0.86", + "quote 1.0.36", "rustversion", - "syn 2.0.38", + "syn 2.0.72", ] [[package]] @@ -6382,19 +6378,19 @@ version = "1.0.109" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" dependencies = [ - "proc-macro2 1.0.69", - "quote 1.0.33", + "proc-macro2 1.0.86", + "quote 1.0.36", "unicode-ident", ] [[package]] name = "syn" -version = "2.0.38" +version = "2.0.72" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e96b79aaa137db8f61e26363a0c9b47d8b4ec75da28b7d1d614c2303e232408b" +checksum = "dc4b9b9bf2add8093d3f2c0204471e951b2285580335de42f9d2534f3ae7a8af" dependencies = [ - "proc-macro2 1.0.69", - "quote 1.0.33", + "proc-macro2 1.0.86", + "quote 1.0.36", "unicode-ident", ] @@ -6405,9 +6401,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1329189c02ff984e9736652b1631330da25eaa6bc639089ed4915d25446cbe7b" dependencies = [ "proc-macro-error", - "proc-macro2 1.0.69", - "quote 1.0.33", - "syn 2.0.38", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 2.0.72", ] [[package]] @@ -6472,15 +6468,14 @@ checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" [[package]] name = "tempfile" -version = "3.8.0" +version = "3.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb94d2f3cc536af71caac6b6fcebf65860b347e7ce0cc9ebe8f70d3e521054ef" +checksum = "85b77fafb263dd9d05cbeac119526425676db3784113aa9295c88498cbf8bff1" dependencies = [ "cfg-if 1.0.0", "fastrand", - "redox_syscall 0.3.5", "rustix", - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] @@ -6507,9 +6502,9 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2cfbe7811249c4c914b06141b8ac0f2cee2733fb883d05eb19668a45fc60c3d5" dependencies = [ - "proc-macro2 1.0.69", - "quote 1.0.33", - "syn 2.0.38", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 2.0.72", ] [[package]] @@ -6528,9 +6523,9 @@ version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c8f546451eaa38373f549093fe9fd05e7d2bade739e2ddf834b9968621d60107" dependencies = [ - "proc-macro2 1.0.69", - "quote 1.0.33", - "syn 2.0.38", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 2.0.72", ] [[package]] @@ -6563,9 +6558,9 @@ version = "1.0.50" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "266b2e40bc00e5a6c09c3584011e08b06f123c00362c92b975ba9843aaaa14b8" dependencies = [ - "proc-macro2 1.0.69", - "quote 1.0.33", - "syn 2.0.38", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 2.0.72", ] [[package]] @@ -6718,9 +6713,9 @@ version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" dependencies = [ - "proc-macro2 1.0.69", - "quote 1.0.33", - "syn 2.0.38", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 2.0.72", ] [[package]] @@ -6907,9 +6902,9 @@ version = "0.1.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ - "proc-macro2 1.0.69", - "quote 1.0.33", - "syn 2.0.38", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 2.0.72", ] [[package]] @@ -7130,7 +7125,7 @@ version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5ad948c1cb799b1a70f836077721a92a35ac177d4daddf4c20a633786d4cf618" dependencies = [ - "quote 1.0.33", + "quote 1.0.36", "syn 1.0.109", ] @@ -7262,9 +7257,9 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8bb19c33cd5f04dcf4e767635e058a998edbc2b7fca32ade0a4a1cea0f8e9b34" dependencies = [ - "proc-macro2 1.0.69", - "quote 1.0.33", - "syn 2.0.38", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 2.0.72", ] [[package]] @@ -7335,9 +7330,9 @@ dependencies = [ "bumpalo", "log", "once_cell", - "proc-macro2 1.0.69", - "quote 1.0.33", - "syn 2.0.38", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 2.0.72", "wasm-bindgen-shared", ] @@ -7359,7 +7354,7 @@ version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dee495e55982a3bd48105a7b947fd2a9b4a8ae3010041b9e0faab3f9cd028f1d" dependencies = [ - "quote 1.0.33", + "quote 1.0.36", "wasm-bindgen-macro-support", ] @@ -7369,9 +7364,9 @@ version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" dependencies = [ - "proc-macro2 1.0.69", - "quote 1.0.33", - "syn 2.0.38", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 2.0.72", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -7442,7 +7437,7 @@ version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0fec781d48b41f8163426ed18e8fc2864c12937df9ce54c88ede7bd47270893e" dependencies = [ - "redox_syscall 0.4.1", + "redox_syscall", "wasite", ] @@ -7759,9 +7754,9 @@ version = "0.7.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b3c129550b3e6de3fd0ba67ba5c81818f9805e58b8d7fee80a3a59d2c9fc601a" dependencies = [ - "proc-macro2 1.0.69", - "quote 1.0.33", - "syn 2.0.38", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 2.0.72", ] [[package]] @@ -7779,9 +7774,9 @@ version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ - "proc-macro2 1.0.69", - "quote 1.0.33", - "syn 2.0.38", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 2.0.72", ] [[package]] @@ -9105,9 +9100,9 @@ dependencies = [ name = "zksync_node_framework_derive" version = "0.1.0" dependencies = [ - "proc-macro2 1.0.69", - "quote 1.0.33", - "syn 2.0.38", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 2.0.72", ] [[package]] @@ -9280,12 +9275,12 @@ dependencies = [ "anyhow", "heck 0.5.0", "prettyplease", - "proc-macro2 1.0.69", + "proc-macro2 1.0.86", "prost-build", "prost-reflect", "protox", - "quote 1.0.33", - "syn 2.0.38", + "quote 1.0.36", + "syn 2.0.72", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index e731bf6533b0..8c94e29c4630 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -107,7 +107,7 @@ assert_matches = "1.5" async-trait = "0.1" axum = "0.7.5" backon = "0.4.4" -bigdecimal = "0.3.0" +bigdecimal = "0.4.5" bincode = "1" blake2 = "0.10" chrono = "0.4" @@ -164,7 +164,7 @@ serde_with = "1" serde_yaml = "0.9" sha2 = "0.10.8" sha3 = "0.10.8" -sqlx = "0.7.3" +sqlx = "0.8.0" static_assertions = "1.1" structopt = "0.3.20" strum = "0.26" diff --git a/core/node/da_dispatcher/src/da_dispatcher.rs b/core/node/da_dispatcher/src/da_dispatcher.rs index 80c030dff338..99e87a1cc45d 100644 --- a/core/node/da_dispatcher/src/da_dispatcher.rs +++ b/core/node/da_dispatcher/src/da_dispatcher.rs @@ -1,7 +1,7 @@ use std::{future::Future, time::Duration}; use anyhow::Context; -use chrono::{NaiveDateTime, Utc}; +use chrono::Utc; use rand::Rng; use tokio::sync::watch::Receiver; use zksync_config::DADispatcherConfig; @@ -94,8 +94,7 @@ impl DataAvailabilityDispatcher { })?; let dispatch_latency_duration = dispatch_latency.observe(); - let sent_at = - NaiveDateTime::from_timestamp_millis(Utc::now().timestamp_millis()).unwrap(); + let sent_at = Utc::now().naive_utc(); let mut conn = self.pool.connection_tagged("da_dispatcher").await?; conn.data_availability_dal() diff --git a/docker/build-base/Dockerfile b/docker/build-base/Dockerfile index 436843eed3de..68ea7ce001c7 100644 --- a/docker/build-base/Dockerfile +++ b/docker/build-base/Dockerfile @@ -12,4 +12,4 @@ RUN curl https://sh.rustup.rs -sSf | bash -s -- -y && \ rustup install nightly-2024-05-07 && \ rustup default nightly-2024-05-07 -RUN cargo install sqlx-cli --version 0.7.3 +RUN cargo install sqlx-cli --version 0.8.0 diff --git a/docker/local-node/Dockerfile b/docker/local-node/Dockerfile index 2e6b09ef3d10..fce2cd9a8218 100644 --- a/docker/local-node/Dockerfile +++ b/docker/local-node/Dockerfile @@ -24,7 +24,7 @@ ENV RUSTUP_HOME=/usr/local/rustup \ RUN curl https://sh.rustup.rs -sSf | bash -s -- -y && \ rustup install stable -RUN cargo install sqlx-cli --version 0.7.3 +RUN cargo install sqlx-cli --version 0.8.0 # Copy required packages while preserving the folders structure from the repo # It's required because these packages use relative paths to the SDK diff --git a/docker/zk-environment/20.04_amd64_cuda_11_8.Dockerfile b/docker/zk-environment/20.04_amd64_cuda_11_8.Dockerfile index a50587e9a83b..44725f4f59dc 100644 --- a/docker/zk-environment/20.04_amd64_cuda_11_8.Dockerfile +++ b/docker/zk-environment/20.04_amd64_cuda_11_8.Dockerfile @@ -75,7 +75,7 @@ RUN echo "deb http://packages.cloud.google.com/apt cloud-sdk main" > /etc/apt/so RUN wget -c -O - https://sh.rustup.rs | bash -s -- -y RUN rustup install nightly-2024-05-07 RUN rustup default stable -RUN cargo install --version=0.7.3 sqlx-cli +RUN cargo install --version=0.8.0 sqlx-cli RUN cargo install cargo-nextest # Copy compiler (both solc and zksolc) binaries diff --git a/docker/zk-environment/20.04_amd64_cuda_12_0.Dockerfile b/docker/zk-environment/20.04_amd64_cuda_12_0.Dockerfile index 9e56613f9ead..d4997b589bc8 100644 --- a/docker/zk-environment/20.04_amd64_cuda_12_0.Dockerfile +++ b/docker/zk-environment/20.04_amd64_cuda_12_0.Dockerfile @@ -73,7 +73,7 @@ RUN echo "deb http://packages.cloud.google.com/apt cloud-sdk main" > /etc/apt/so RUN wget -c -O - https://sh.rustup.rs | bash -s -- -y RUN rustup install nightly-2024-05-07 RUN rustup default stable -RUN cargo install --version=0.7.3 sqlx-cli +RUN cargo install --version=0.8.0 sqlx-cli RUN cargo install cargo-nextest # Copy compiler (both solc and zksolc) binaries diff --git a/docker/zk-environment/Dockerfile b/docker/zk-environment/Dockerfile index c5cb35cf1a07..0494067a4ecd 100644 --- a/docker/zk-environment/Dockerfile +++ b/docker/zk-environment/Dockerfile @@ -103,7 +103,7 @@ RUN echo "deb [arch=${ARCH}] http://packages.cloud.google.com/apt cloud-sdk main RUN wget -c -O - https://sh.rustup.rs | bash -s -- -y && \ rustup default stable -RUN cargo install --version=0.7.3 sqlx-cli +RUN cargo install --version=0.8.0 sqlx-cli RUN cargo install cargo-nextest # Copy compiler (both solc and zksolc) binaries diff --git a/docs/guides/setup-dev.md b/docs/guides/setup-dev.md index 12e8da7b022f..f656eab0fdc6 100644 --- a/docs/guides/setup-dev.md +++ b/docs/guides/setup-dev.md @@ -43,7 +43,7 @@ yarn set version 1.22.19 # For running unit tests cargo install cargo-nextest # SQL tools -cargo install sqlx-cli --version 0.7.4 +cargo install sqlx-cli --version 0.8.0 # Foundry curl -L https://foundry.paradigm.xyz | bash @@ -217,7 +217,7 @@ SQLx is a Rust library we use to interact with Postgres, and its CLI is used to features of the library. ```bash -cargo install --locked sqlx-cli --version 0.7.4 +cargo install --locked sqlx-cli --version 0.8.0 ``` ## Easier method using `nix` diff --git a/prover/Cargo.lock b/prover/Cargo.lock index d3023bba6dc9..cad7e13c7c69 100644 --- a/prover/Cargo.lock +++ b/prover/Cargo.lock @@ -46,7 +46,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" dependencies = [ "cfg-if 1.0.0", - "getrandom", "once_cell", "version_check", "zerocopy", @@ -455,10 +454,12 @@ dependencies = [ [[package]] name = "bigdecimal" -version = "0.3.1" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6773ddc0eafc0e509fb60e48dff7f450f8e674a0686ae8605e8d9901bd5eefa" +checksum = "51d712318a27c7150326677b321a5fa91b55f6d9034ffd67f20319e147d40cee" dependencies = [ + "autocfg", + "libm", "num-bigint 0.4.5", "num-integer", "num-traits", @@ -1179,6 +1180,15 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bed69047ed42e52c7e38d6421eeb8ceefb4f2a2b52eed59137f7bad7908f6800" +[[package]] +name = "concurrent-queue" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ca0197aee26d1ae37445ee532fefce43251d24cc7c166799f4d46817f1d3973" +dependencies = [ + "crossbeam-utils 0.8.20", +] + [[package]] name = "console" version = "0.15.8" @@ -1958,9 +1968,14 @@ dependencies = [ [[package]] name = "event-listener" -version = "2.5.3" +version = "5.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" +checksum = "6032be9bd27023a771701cc49f9f053c751055f71efb2e0ae5c15809093675ba" +dependencies = [ + "concurrent-queue", + "parking", + "pin-project-lite", +] [[package]] name = "fastrand" @@ -2527,9 +2542,9 @@ dependencies = [ [[package]] name = "hashlink" -version = "0.8.4" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8094feaf31ff591f651a2664fb9cfd92bba7a60ce3197265e9482ebe753c8f7" +checksum = "6ba4ff7128dee98c7dc9794b6a411377e1404dba1c97deb8d1a55297bd25d8af" dependencies = [ "hashbrown 0.14.5", ] @@ -2543,15 +2558,6 @@ dependencies = [ "unicode-segmentation", ] -[[package]] -name = "heck" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" -dependencies = [ - "unicode-segmentation", -] - [[package]] name = "heck" version = "0.5.0" @@ -3291,9 +3297,9 @@ dependencies = [ [[package]] name = "libsqlite3-sys" -version = "0.27.0" +version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf4e226dcd58b4be396f7bd3c20da8fdee2911400705297ba7d2d7cc2c30f716" +checksum = "0c10584274047cb335c23d3e61bcef8e323adae7c5c8c760540f73610177fc3f" dependencies = [ "cc", "pkg-config", @@ -4055,6 +4061,12 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "parking" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb813b8af86854136c6922af0598d719255ecb2179515e6e7730d468f05c9cae" + [[package]] name = "parking_lot" version = "0.12.3" @@ -5770,9 +5782,9 @@ dependencies = [ [[package]] name = "sqlx" -version = "0.7.4" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c9a2ccff1a000a5a59cd33da541d9f2fdcd9e6e8229cc200565942bff36d0aaa" +checksum = "27144619c6e5802f1380337a209d2ac1c431002dd74c6e60aebff3c506dc4f0c" dependencies = [ "sqlx-core", "sqlx-macros", @@ -5783,11 +5795,10 @@ dependencies = [ [[package]] name = "sqlx-core" -version = "0.7.4" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24ba59a9342a3d9bab6c56c118be528b27c9b60e490080e9711a04dccac83ef6" +checksum = "a999083c1af5b5d6c071d34a708a19ba3e02106ad82ef7bbd69f5e48266b613b" dependencies = [ - "ahash 0.8.11", "atoi", "bigdecimal", "byteorder", @@ -5802,6 +5813,7 @@ dependencies = [ "futures-intrusive", "futures-io", "futures-util", + "hashbrown 0.14.5", "hashlink", "hex", "indexmap 2.2.6", @@ -5827,26 +5839,26 @@ dependencies = [ [[package]] name = "sqlx-macros" -version = "0.7.4" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ea40e2345eb2faa9e1e5e326db8c34711317d2b5e08d0d5741619048a803127" +checksum = "a23217eb7d86c584b8cbe0337b9eacf12ab76fe7673c513141ec42565698bb88" dependencies = [ "proc-macro2 1.0.85", "quote 1.0.36", "sqlx-core", "sqlx-macros-core", - "syn 1.0.109", + "syn 2.0.66", ] [[package]] name = "sqlx-macros-core" -version = "0.7.4" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5833ef53aaa16d860e92123292f1f6a3d53c34ba8b1969f152ef1a7bb803f3c8" +checksum = "1a099220ae541c5db479c6424bdf1b200987934033c2584f79a0e1693601e776" dependencies = [ "dotenvy", "either", - "heck 0.4.1", + "heck 0.5.0", "hex", "once_cell", "proc-macro2 1.0.85", @@ -5858,7 +5870,7 @@ dependencies = [ "sqlx-mysql", "sqlx-postgres", "sqlx-sqlite", - "syn 1.0.109", + "syn 2.0.66", "tempfile", "tokio", "url", @@ -5866,12 +5878,12 @@ dependencies = [ [[package]] name = "sqlx-mysql" -version = "0.7.4" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ed31390216d20e538e447a7a9b959e06ed9fc51c37b514b46eb758016ecd418" +checksum = "5afe4c38a9b417b6a9a5eeffe7235d0a106716495536e7727d1c7f4b1ff3eba6" dependencies = [ "atoi", - "base64 0.21.7", + "base64 0.22.1", "bigdecimal", "bitflags 2.5.0", "byteorder", @@ -5911,12 +5923,12 @@ dependencies = [ [[package]] name = "sqlx-postgres" -version = "0.7.4" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c824eb80b894f926f89a0b9da0c7f435d27cdd35b8c655b114e58223918577e" +checksum = "b1dbb157e65f10dbe01f729339c06d239120221c9ad9fa0ba8408c4cc18ecf21" dependencies = [ "atoi", - "base64 0.21.7", + "base64 0.22.1", "bigdecimal", "bitflags 2.5.0", "byteorder", @@ -5954,9 +5966,9 @@ dependencies = [ [[package]] name = "sqlx-sqlite" -version = "0.7.4" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b244ef0a8414da0bed4bb1910426e890b19e5e9bccc27ada6b797d05c55ae0aa" +checksum = "9b2cdd83c008a622d94499c0006d8ee5f821f36c89b7d625c900e5dc30b5c5ee" dependencies = [ "atoi", "chrono", @@ -5970,10 +5982,10 @@ dependencies = [ "log", "percent-encoding", "serde", + "serde_urlencoded", "sqlx-core", "tracing", "url", - "urlencoding", ] [[package]] diff --git a/prover/Cargo.toml b/prover/Cargo.toml index 824ce4c4451a..6c7919e20546 100644 --- a/prover/Cargo.toml +++ b/prover/Cargo.toml @@ -45,7 +45,7 @@ serde = "1.0" serde_derive = "1.0" serde_json = "1.0" sha3 = "0.10.8" -sqlx = { version = "0.7.3", default-features = false } +sqlx = { version = "0.8.0", default-features = false } structopt = "0.3.26" strum = { version = "0.26" } tempfile = "3" diff --git a/zk_toolbox/Cargo.lock b/zk_toolbox/Cargo.lock index 0c6b1b0a0cbb..c81d037a7d24 100644 --- a/zk_toolbox/Cargo.lock +++ b/zk_toolbox/Cargo.lock @@ -45,7 +45,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" dependencies = [ "cfg-if", - "getrandom", "once_cell", "version_check", "zerocopy", @@ -339,10 +338,12 @@ checksum = "3a8241f3ebb85c056b509d4327ad0358fbbba6ffb340bf388f26350aeda225b1" [[package]] name = "bigdecimal" -version = "0.3.1" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6773ddc0eafc0e509fb60e48dff7f450f8e674a0686ae8605e8d9901bd5eefa" +checksum = "51d712318a27c7150326677b321a5fa91b55f6d9034ffd67f20319e147d40cee" dependencies = [ + "autocfg", + "libm", "num-bigint", "num-integer", "num-traits", @@ -569,7 +570,7 @@ version = "4.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2bac35c6dafb060fd4d275d9a4ffae97917c13a6327903a8be2153cd964f7085" dependencies = [ - "heck 0.5.0", + "heck", "proc-macro2", "quote", "syn 2.0.68", @@ -681,6 +682,15 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bed69047ed42e52c7e38d6421eeb8ceefb4f2a2b52eed59137f7bad7908f6800" +[[package]] +name = "concurrent-queue" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ca0197aee26d1ae37445ee532fefce43251d24cc7c166799f4d46817f1d3973" +dependencies = [ + "crossbeam-utils", +] + [[package]] name = "config" version = "0.1.0" @@ -1545,9 +1555,14 @@ dependencies = [ [[package]] name = "event-listener" -version = "2.5.3" +version = "5.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" +checksum = "6032be9bd27023a771701cc49f9f053c751055f71efb2e0ae5c15809093675ba" +dependencies = [ + "concurrent-queue", + "parking", + "pin-project-lite", +] [[package]] name = "eyre" @@ -1944,22 +1959,13 @@ dependencies = [ [[package]] name = "hashlink" -version = "0.8.4" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8094feaf31ff591f651a2664fb9cfd92bba7a60ce3197265e9482ebe753c8f7" +checksum = "6ba4ff7128dee98c7dc9794b6a411377e1404dba1c97deb8d1a55297bd25d8af" dependencies = [ "hashbrown 0.14.5", ] -[[package]] -name = "heck" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" -dependencies = [ - "unicode-segmentation", -] - [[package]] name = "heck" version = "0.5.0" @@ -2547,9 +2553,9 @@ dependencies = [ [[package]] name = "libsqlite3-sys" -version = "0.27.0" +version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf4e226dcd58b4be396f7bd3c20da8fdee2911400705297ba7d2d7cc2c30f716" +checksum = "0c10584274047cb335c23d3e61bcef8e323adae7c5c8c760540f73610177fc3f" dependencies = [ "cc", "pkg-config", @@ -3166,6 +3172,12 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "parking" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb813b8af86854136c6922af0598d719255ecb2179515e6e7730d468f05c9cae" + [[package]] name = "parking_lot" version = "0.12.3" @@ -3540,7 +3552,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "22505a5c94da8e3b7c2996394d1c933236c4d743e81a410bcca4e6989fc066a4" dependencies = [ "bytes", - "heck 0.5.0", + "heck", "itertools 0.12.1", "log", "multimap", @@ -4611,6 +4623,9 @@ name = "smallvec" version = "1.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" +dependencies = [ + "serde", +] [[package]] name = "smawk" @@ -4689,9 +4704,9 @@ dependencies = [ [[package]] name = "sqlx" -version = "0.7.4" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c9a2ccff1a000a5a59cd33da541d9f2fdcd9e6e8229cc200565942bff36d0aaa" +checksum = "27144619c6e5802f1380337a209d2ac1c431002dd74c6e60aebff3c506dc4f0c" dependencies = [ "sqlx-core", "sqlx-macros", @@ -4702,11 +4717,10 @@ dependencies = [ [[package]] name = "sqlx-core" -version = "0.7.4" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24ba59a9342a3d9bab6c56c118be528b27c9b60e490080e9711a04dccac83ef6" +checksum = "a999083c1af5b5d6c071d34a708a19ba3e02106ad82ef7bbd69f5e48266b613b" dependencies = [ - "ahash", "atoi", "byteorder", "bytes", @@ -4719,6 +4733,7 @@ dependencies = [ "futures-intrusive", "futures-io", "futures-util", + "hashbrown 0.14.5", "hashlink", "hex", "indexmap 2.2.6", @@ -4741,26 +4756,26 @@ dependencies = [ [[package]] name = "sqlx-macros" -version = "0.7.4" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ea40e2345eb2faa9e1e5e326db8c34711317d2b5e08d0d5741619048a803127" +checksum = "a23217eb7d86c584b8cbe0337b9eacf12ab76fe7673c513141ec42565698bb88" dependencies = [ "proc-macro2", "quote", "sqlx-core", "sqlx-macros-core", - "syn 1.0.109", + "syn 2.0.68", ] [[package]] name = "sqlx-macros-core" -version = "0.7.4" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5833ef53aaa16d860e92123292f1f6a3d53c34ba8b1969f152ef1a7bb803f3c8" +checksum = "1a099220ae541c5db479c6424bdf1b200987934033c2584f79a0e1693601e776" dependencies = [ "dotenvy", "either", - "heck 0.4.1", + "heck", "hex", "once_cell", "proc-macro2", @@ -4772,7 +4787,7 @@ dependencies = [ "sqlx-mysql", "sqlx-postgres", "sqlx-sqlite", - "syn 1.0.109", + "syn 2.0.68", "tempfile", "tokio", "url", @@ -4780,12 +4795,12 @@ dependencies = [ [[package]] name = "sqlx-mysql" -version = "0.7.4" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ed31390216d20e538e447a7a9b959e06ed9fc51c37b514b46eb758016ecd418" +checksum = "5afe4c38a9b417b6a9a5eeffe7235d0a106716495536e7727d1c7f4b1ff3eba6" dependencies = [ "atoi", - "base64 0.21.7", + "base64 0.22.1", "bitflags 2.6.0", "byteorder", "bytes", @@ -4822,12 +4837,12 @@ dependencies = [ [[package]] name = "sqlx-postgres" -version = "0.7.4" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c824eb80b894f926f89a0b9da0c7f435d27cdd35b8c655b114e58223918577e" +checksum = "b1dbb157e65f10dbe01f729339c06d239120221c9ad9fa0ba8408c4cc18ecf21" dependencies = [ "atoi", - "base64 0.21.7", + "base64 0.22.1", "bitflags 2.6.0", "byteorder", "crc", @@ -4860,9 +4875,9 @@ dependencies = [ [[package]] name = "sqlx-sqlite" -version = "0.7.4" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b244ef0a8414da0bed4bb1910426e890b19e5e9bccc27ada6b797d05c55ae0aa" +checksum = "9b2cdd83c008a622d94499c0006d8ee5f821f36c89b7d625c900e5dc30b5c5ee" dependencies = [ "atoi", "flume", @@ -4875,10 +4890,10 @@ dependencies = [ "log", "percent-encoding", "serde", + "serde_urlencoded", "sqlx-core", "tracing", "url", - "urlencoding", ] [[package]] @@ -4944,7 +4959,7 @@ version = "0.26.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4c6bee85a5a24955dc440386795aa378cd9cf82acd5f764469152d2270e581be" dependencies = [ - "heck 0.5.0", + "heck", "proc-macro2", "quote", "rustversion", @@ -5593,12 +5608,6 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e4259d9d4425d9f0661581b804cb85fe66a4c631cadd8f490d1c13a35d5d9291" -[[package]] -name = "unicode-segmentation" -version = "1.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4c87d22b6e3f4a18d4d40ef354e97c90fcb14dd91d7dc0aa9d8a1172ebf7202" - [[package]] name = "unicode-width" version = "0.1.13" @@ -5660,12 +5669,6 @@ dependencies = [ "serde", ] -[[package]] -name = "urlencoding" -version = "2.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da" - [[package]] name = "utf-8" version = "0.7.6" @@ -6432,7 +6435,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f6cafeec1150ae91f1a37c8f0dce6b71b92b93e0c4153d32b4c37e2fd71bce2f" dependencies = [ "anyhow", - "heck 0.5.0", + "heck", "prettyplease", "proc-macro2", "prost-build", diff --git a/zk_toolbox/Cargo.toml b/zk_toolbox/Cargo.toml index 0c5e2188c66a..ef47b9f7015a 100644 --- a/zk_toolbox/Cargo.toml +++ b/zk_toolbox/Cargo.toml @@ -47,7 +47,7 @@ rand = "0.8.5" serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" serde_yaml = "0.9" -sqlx = { version = "0.7.4", features = ["runtime-tokio", "migrate", "postgres"] } +sqlx = { version = "0.8.0", features = ["runtime-tokio", "migrate", "postgres"] } strum = { version = "0.26", features = ["derive"] } thiserror = "1.0.57" tokio = { version = "1.37", features = ["full"] } From e2fa86fd216b04c798939f80517d7cca1a45a5a7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mat=C3=ADas=20Ignacio=20Gonz=C3=A1lez?= Date: Fri, 26 Jul 2024 17:18:10 +0200 Subject: [PATCH 51/52] feat(zk_toolbox): Add update command (#2440) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ Add update command --------- Signed-off-by: Danil Co-authored-by: Danil --- zk_toolbox/crates/common/src/git.rs | 9 + zk_toolbox/crates/config/src/chain.rs | 17 +- zk_toolbox/crates/config/src/consts.rs | 12 +- zk_toolbox/crates/config/src/lib.rs | 2 +- .../zk_inception/src/commands/args/mod.rs | 2 + .../zk_inception/src/commands/args/update.rs | 9 + .../crates/zk_inception/src/commands/mod.rs | 1 + .../zk_inception/src/commands/update.rs | 495 ++++++++++++++++++ zk_toolbox/crates/zk_inception/src/main.rs | 6 +- .../crates/zk_inception/src/messages.rs | 41 ++ 10 files changed, 585 insertions(+), 9 deletions(-) create mode 100644 zk_toolbox/crates/zk_inception/src/commands/args/update.rs create mode 100644 zk_toolbox/crates/zk_inception/src/commands/update.rs diff --git a/zk_toolbox/crates/common/src/git.rs b/zk_toolbox/crates/common/src/git.rs index 7ebedf0f6283..ea6540c20b29 100644 --- a/zk_toolbox/crates/common/src/git.rs +++ b/zk_toolbox/crates/common/src/git.rs @@ -29,3 +29,12 @@ pub fn submodule_update(shell: &Shell, link_to_code: PathBuf) -> anyhow::Result< .run()?; Ok(()) } + +pub fn pull(shell: &Shell, link_to_code: PathBuf) -> anyhow::Result<()> { + let _dir_guard = shell.push_dir(link_to_code); + let res = Cmd::new(cmd!(shell, "git rev-parse --abbrev-ref HEAD")).run_with_output()?; + let current_branch = String::from_utf8(res.stdout)?; + let current_branch = current_branch.trim_end(); + Cmd::new(cmd!(shell, "git pull origin {current_branch}")).run()?; + Ok(()) +} diff --git a/zk_toolbox/crates/config/src/chain.rs b/zk_toolbox/crates/config/src/chain.rs index e8b6df00644d..d8cc53954352 100644 --- a/zk_toolbox/crates/config/src/chain.rs +++ b/zk_toolbox/crates/config/src/chain.rs @@ -9,7 +9,10 @@ use xshell::Shell; use zksync_basic_types::L2ChainId; use crate::{ - consts::{CONFIG_NAME, GENERAL_FILE, L1_CONTRACTS_FOUNDRY, SECRETS_FILE, WALLETS_FILE}, + consts::{ + CONFIG_NAME, CONTRACTS_FILE, EN_CONFIG_FILE, GENERAL_FILE, GENESIS_FILE, + L1_CONTRACTS_FOUNDRY, SECRETS_FILE, WALLETS_FILE, + }, create_localhost_wallets, traits::{ FileConfigWithDefaultName, ReadConfig, ReadConfigWithBasePath, SaveConfig, @@ -101,6 +104,18 @@ impl ChainConfig { self.configs.join(GENERAL_FILE) } + pub fn path_to_external_node_config(&self) -> PathBuf { + self.configs.join(EN_CONFIG_FILE) + } + + pub fn path_to_genesis_config(&self) -> PathBuf { + self.configs.join(GENESIS_FILE) + } + + pub fn path_to_contracts_config(&self) -> PathBuf { + self.configs.join(CONTRACTS_FILE) + } + pub fn path_to_secrets_config(&self) -> PathBuf { self.configs.join(SECRETS_FILE) } diff --git a/zk_toolbox/crates/config/src/consts.rs b/zk_toolbox/crates/config/src/consts.rs index fecb6e78c9a5..0fd55ebe0db6 100644 --- a/zk_toolbox/crates/config/src/consts.rs +++ b/zk_toolbox/crates/config/src/consts.rs @@ -1,23 +1,23 @@ /// Name of the main configuration file pub(crate) const CONFIG_NAME: &str = "ZkStack.yaml"; /// Name of the wallets file -pub(crate) const WALLETS_FILE: &str = "wallets.yaml"; +pub const WALLETS_FILE: &str = "wallets.yaml"; /// Name of the secrets config file -pub(crate) const SECRETS_FILE: &str = "secrets.yaml"; +pub const SECRETS_FILE: &str = "secrets.yaml"; /// Name of the general config file -pub(crate) const GENERAL_FILE: &str = "general.yaml"; +pub const GENERAL_FILE: &str = "general.yaml"; /// Name of the genesis config file -pub(crate) const GENESIS_FILE: &str = "genesis.yaml"; +pub const GENESIS_FILE: &str = "genesis.yaml"; // Name of external node specific config -pub(crate) const EN_CONFIG_FILE: &str = "external_node.yaml"; +pub const EN_CONFIG_FILE: &str = "external_node.yaml"; pub(crate) const ERC20_CONFIGS_FILE: &str = "erc20.yaml"; /// Name of the initial deployments config file pub(crate) const INITIAL_DEPLOYMENT_FILE: &str = "initial_deployments.yaml"; /// Name of the erc20 deployments config file pub(crate) const ERC20_DEPLOYMENT_FILE: &str = "erc20_deployments.yaml"; /// Name of the contracts file -pub(crate) const CONTRACTS_FILE: &str = "contracts.yaml"; +pub const CONTRACTS_FILE: &str = "contracts.yaml"; /// Main repository for the ZKsync project pub const ZKSYNC_ERA_GIT_REPO: &str = "https://github.com/matter-labs/zksync-era"; /// Name of the docker-compose file inside zksync repository diff --git a/zk_toolbox/crates/config/src/lib.rs b/zk_toolbox/crates/config/src/lib.rs index 47d4040eb6bf..e2d366aeb869 100644 --- a/zk_toolbox/crates/config/src/lib.rs +++ b/zk_toolbox/crates/config/src/lib.rs @@ -1,5 +1,5 @@ pub use chain::*; -pub use consts::{DOCKER_COMPOSE_FILE, ZKSYNC_ERA_GIT_REPO}; +pub use consts::*; pub use contracts::*; pub use ecosystem::*; pub use file_config::*; diff --git a/zk_toolbox/crates/zk_inception/src/commands/args/mod.rs b/zk_toolbox/crates/zk_inception/src/commands/args/mod.rs index 7b21015691b9..2d58d2ef3bb9 100644 --- a/zk_toolbox/crates/zk_inception/src/commands/args/mod.rs +++ b/zk_toolbox/crates/zk_inception/src/commands/args/mod.rs @@ -1,3 +1,5 @@ pub use run_server::*; +pub use update::*; mod run_server; +mod update; diff --git a/zk_toolbox/crates/zk_inception/src/commands/args/update.rs b/zk_toolbox/crates/zk_inception/src/commands/args/update.rs new file mode 100644 index 000000000000..b6c980163fe6 --- /dev/null +++ b/zk_toolbox/crates/zk_inception/src/commands/args/update.rs @@ -0,0 +1,9 @@ +use clap::Parser; + +use crate::messages::MSG_UPDATE_ONLY_CONFIG_HELP; + +#[derive(Debug, Parser)] +pub struct UpdateArgs { + #[clap(long, short = 'c', help = MSG_UPDATE_ONLY_CONFIG_HELP)] + pub only_config: bool, +} diff --git a/zk_toolbox/crates/zk_inception/src/commands/mod.rs b/zk_toolbox/crates/zk_inception/src/commands/mod.rs index 5cba51265981..5eea6e8a5a1a 100644 --- a/zk_toolbox/crates/zk_inception/src/commands/mod.rs +++ b/zk_toolbox/crates/zk_inception/src/commands/mod.rs @@ -6,3 +6,4 @@ pub mod ecosystem; pub mod external_node; pub mod prover; pub mod server; +pub mod update; diff --git a/zk_toolbox/crates/zk_inception/src/commands/update.rs b/zk_toolbox/crates/zk_inception/src/commands/update.rs new file mode 100644 index 000000000000..cc4fe13312cd --- /dev/null +++ b/zk_toolbox/crates/zk_inception/src/commands/update.rs @@ -0,0 +1,495 @@ +use std::path::Path; + +use anyhow::{Context, Ok}; +use common::{ + git::{pull, submodule_update}, + logger, + spinner::Spinner, +}; +use config::{ + ChainConfig, EcosystemConfig, CONTRACTS_FILE, EN_CONFIG_FILE, GENERAL_FILE, GENESIS_FILE, + SECRETS_FILE, +}; +use xshell::Shell; + +use super::args::UpdateArgs; +use crate::messages::{ + msg_diff_contracts_config, msg_diff_genesis_config, msg_diff_secrets, msg_updating_chain, + MSG_CHAIN_NOT_FOUND_ERR, MSG_DIFF_EN_CONFIG, MSG_DIFF_EN_GENERAL_CONFIG, + MSG_DIFF_GENERAL_CONFIG, MSG_INVALID_KEY_TYPE_ERR, MSG_PULLING_ZKSYNC_CODE_SPINNER, + MSG_UPDATING_SUBMODULES_SPINNER, MSG_UPDATING_ZKSYNC, MSG_ZKSYNC_UPDATED, +}; + +/// Holds the differences between two YAML configurations. +#[derive(Default)] +struct ConfigDiff { + /// Fields that have different values between the two configurations + /// This contains the new values + pub differing_values: serde_yaml::Mapping, + + /// Fields that are present in the new configuration but not in the old one. + pub new_fields: serde_yaml::Mapping, +} + +impl ConfigDiff { + fn print(&self, msg: &str, is_warning: bool) { + if self.new_fields.is_empty() { + return; + } + + if is_warning { + logger::warn(msg); + logger::warn(logger::object_to_string(&self.new_fields)); + } else { + logger::info(msg); + logger::info(logger::object_to_string(&self.new_fields)); + } + } +} + +pub fn run(shell: &Shell, args: UpdateArgs) -> anyhow::Result<()> { + logger::info(MSG_UPDATING_ZKSYNC); + let ecosystem = EcosystemConfig::from_file(shell)?; + + if !args.only_config { + update_repo(shell, &ecosystem)?; + } + + let general_config_path = ecosystem.get_default_configs_path().join(GENERAL_FILE); + let external_node_config_path = ecosystem.get_default_configs_path().join(EN_CONFIG_FILE); + let genesis_config_path = ecosystem.get_default_configs_path().join(GENESIS_FILE); + let contracts_config_path = ecosystem.get_default_configs_path().join(CONTRACTS_FILE); + let secrets_path = ecosystem.get_default_configs_path().join(SECRETS_FILE); + + for chain in ecosystem.list_of_chains() { + logger::step(msg_updating_chain(&chain)); + let chain = ecosystem + .load_chain(Some(chain)) + .context(MSG_CHAIN_NOT_FOUND_ERR)?; + update_chain( + shell, + &chain, + &general_config_path, + &external_node_config_path, + &genesis_config_path, + &contracts_config_path, + &secrets_path, + )?; + } + + logger::outro(MSG_ZKSYNC_UPDATED); + + Ok(()) +} + +fn update_repo(shell: &Shell, ecosystem: &EcosystemConfig) -> anyhow::Result<()> { + let link_to_code = ecosystem.link_to_code.clone(); + + let spinner = Spinner::new(MSG_PULLING_ZKSYNC_CODE_SPINNER); + pull(shell, link_to_code.clone())?; + spinner.finish(); + let spinner = Spinner::new(MSG_UPDATING_SUBMODULES_SPINNER); + submodule_update(shell, link_to_code.clone())?; + spinner.finish(); + + Ok(()) +} + +fn save_updated_config( + shell: &Shell, + config: serde_yaml::Value, + path: &Path, + diff: ConfigDiff, + msg: &str, +) -> anyhow::Result<()> { + if diff.new_fields.is_empty() { + return Ok(()); + } + + diff.print(msg, false); + + let general_config = serde_yaml::to_string(&config)?; + shell.write_file(path, general_config)?; + + Ok(()) +} + +fn update_config( + shell: Shell, + original_config_path: &Path, + chain_config_path: &Path, + save_config: bool, + msg: &str, +) -> anyhow::Result<()> { + let original_config = serde_yaml::from_str(&shell.read_file(original_config_path)?)?; + let mut chain_config = serde_yaml::from_str(&shell.read_file(chain_config_path)?)?; + let diff = merge_yaml(&mut chain_config, original_config)?; + if save_config { + save_updated_config(&shell, chain_config, chain_config_path, diff, msg)?; + } else { + diff.print(msg, true); + } + + Ok(()) +} + +fn update_chain( + shell: &Shell, + chain: &ChainConfig, + general: &Path, + external_node: &Path, + genesis: &Path, + contracts: &Path, + secrets: &Path, +) -> anyhow::Result<()> { + update_config( + shell.clone(), + general, + &chain.path_to_general_config(), + true, + MSG_DIFF_GENERAL_CONFIG, + )?; + + update_config( + shell.clone(), + external_node, + &chain.path_to_external_node_config(), + true, + MSG_DIFF_EN_CONFIG, + )?; + + update_config( + shell.clone(), + genesis, + &chain.path_to_genesis_config(), + false, + &msg_diff_genesis_config(&chain.name), + )?; + + update_config( + shell.clone(), + contracts, + &chain.path_to_contracts_config(), + false, + &msg_diff_contracts_config(&chain.name), + )?; + + update_config( + shell.clone(), + secrets, + &chain.path_to_secrets_config(), + false, + &msg_diff_secrets(&chain.name, &chain.path_to_secrets_config(), secrets), + )?; + + if let Some(external_node_config_path) = chain.external_node_config_path.clone() { + let external_node_general_config_path = external_node_config_path.join(GENERAL_FILE); + if !shell.path_exists(external_node_general_config_path.clone()) { + return Ok(()); + } + update_config( + shell.clone(), + general, + &external_node_general_config_path, + true, + MSG_DIFF_EN_GENERAL_CONFIG, + )?; + } + + Ok(()) +} + +fn merge_yaml_internal( + a: &mut serde_yaml::Value, + b: serde_yaml::Value, + current_key: String, + diff: &mut ConfigDiff, +) -> anyhow::Result<()> { + match (a, b) { + (serde_yaml::Value::Mapping(a), serde_yaml::Value::Mapping(b)) => { + for (key, value) in b { + let k = key.as_str().context(MSG_INVALID_KEY_TYPE_ERR)?.to_string(); + let current_key = if current_key.is_empty() { + k.clone() + } else { + format!("{}.{}", current_key, k) + }; + + if a.contains_key(&key) { + merge_yaml_internal(a.get_mut(&key).unwrap(), value, current_key, diff)?; + } else { + a.insert(key.clone(), value.clone()); + diff.new_fields.insert(current_key.into(), value); + } + } + } + (a, b) => { + if a != &b { + diff.differing_values.insert(current_key.into(), b); + } + } + } + Ok(()) +} + +fn merge_yaml(a: &mut serde_yaml::Value, b: serde_yaml::Value) -> anyhow::Result { + let mut diff = ConfigDiff::default(); + merge_yaml_internal(a, b, "".into(), &mut diff)?; + Ok(diff) +} + +#[cfg(test)] +mod tests { + #[test] + fn test_merge_yaml_both_are_equal_returns_no_diff() { + let mut a = serde_yaml::from_str( + r#" + key1: value1 + key2: value2 + key3: + key4: value4 + "#, + ) + .unwrap(); + let b: serde_yaml::Value = serde_yaml::from_str( + r#" + key1: value1 + key2: value2 + key3: + key4: value4 + "#, + ) + .unwrap(); + let expected: serde_yaml::Value = serde_yaml::from_str( + r#" + key1: value1 + key2: value2 + key3: + key4: value4 + "#, + ) + .unwrap(); + let diff = super::merge_yaml(&mut a, b).unwrap(); + assert!(diff.differing_values.is_empty()); + assert!(diff.new_fields.is_empty()); + assert_eq!(a, expected); + } + + #[test] + fn test_merge_yaml_b_has_extra_field_returns_diff() { + let mut a = serde_yaml::from_str( + r#" + key1: value1 + key2: value2 + key3: + key4: value4 + "#, + ) + .unwrap(); + let b: serde_yaml::Value = serde_yaml::from_str( + r#" + key1: value1 + key2: value2 + key3: + key4: value4 + key5: value5 + "#, + ) + .unwrap(); + + let expected: serde_yaml::Value = serde_yaml::from_str( + r#" + key1: value1 + key2: value2 + key3: + key4: value4 + key5: value5 + "#, + ) + .unwrap(); + + let diff = super::merge_yaml(&mut a, b.clone()).unwrap(); + assert!(diff.differing_values.is_empty()); + assert_eq!(diff.new_fields.len(), 1); + assert_eq!( + diff.new_fields.get::("key5".into()).unwrap(), + b.clone().get("key5").unwrap() + ); + assert_eq!(a, expected); + } + + #[test] + fn test_merge_yaml_a_has_extra_field_no_diff() { + let mut a = serde_yaml::from_str( + r#" + key1: value1 + key2: value2 + key3: + key4: value4 + key5: value5 + "#, + ) + .unwrap(); + let b: serde_yaml::Value = serde_yaml::from_str( + r#" + key1: value1 + key2: value2 + key3: + key4: value4 + "#, + ) + .unwrap(); + + let expected: serde_yaml::Value = serde_yaml::from_str( + r#" + key1: value1 + key2: value2 + key3: + key4: value4 + key5: value5 + "#, + ) + .unwrap(); + + let diff = super::merge_yaml(&mut a, b).unwrap(); + assert!(diff.differing_values.is_empty()); + assert!(diff.new_fields.is_empty()); + assert_eq!(a, expected); + } + + #[test] + fn test_merge_yaml_a_has_extra_field_and_b_has_extra_field_returns_diff() { + let mut a = serde_yaml::from_str( + r#" + key1: value1 + key2: value2 + key3: + key4: value4 + key5: value5 + "#, + ) + .unwrap(); + let b: serde_yaml::Value = serde_yaml::from_str( + r#" + key1: value1 + key2: value2 + key3: + key4: value4 + key6: value6 + "#, + ) + .unwrap(); + + let expected: serde_yaml::Value = serde_yaml::from_str( + r#" + key1: value1 + key2: value2 + key3: + key4: value4 + key5: value5 + key6: value6 + "#, + ) + .unwrap(); + + let diff = super::merge_yaml(&mut a, b.clone()).unwrap(); + assert_eq!(diff.differing_values.len(), 0); + assert_eq!(diff.new_fields.len(), 1); + assert_eq!( + diff.new_fields.get::("key6".into()).unwrap(), + b.clone().get("key6").unwrap() + ); + assert_eq!(a, expected); + } + + #[test] + fn test_merge_yaml_a_has_different_value_returns_diff() { + let mut a = serde_yaml::from_str( + r#" + key1: value1 + key2: value2 + key3: + key4: value4 + "#, + ) + .unwrap(); + let b: serde_yaml::Value = serde_yaml::from_str( + r#" + key1: value1 + key2: value2 + key3: + key4: value5 + "#, + ) + .unwrap(); + + let expected: serde_yaml::Value = serde_yaml::from_str( + r#" + key1: value1 + key2: value2 + key3: + key4: value4 + "#, + ) + .unwrap(); + + let diff = super::merge_yaml(&mut a, b.clone()).unwrap(); + assert_eq!(diff.differing_values.len(), 1); + assert_eq!( + diff.differing_values + .get::("key3.key4".into()) + .unwrap(), + b.get("key3").unwrap().get("key4").unwrap() + ); + assert_eq!(a, expected); + } + + #[test] + fn test_merge_yaml_a_has_different_value_and_b_has_extra_field_returns_diff() { + let mut a = serde_yaml::from_str( + r#" + key1: value1 + key2: value2 + key3: + key4: value4 + "#, + ) + .unwrap(); + let b: serde_yaml::Value = serde_yaml::from_str( + r#" + key1: value1 + key2: value2 + key3: + key4: value5 + key5: value5 + "#, + ) + .unwrap(); + + let expected: serde_yaml::Value = serde_yaml::from_str( + r#" + key1: value1 + key2: value2 + key3: + key4: value4 + key5: value5 + "#, + ) + .unwrap(); + + let diff = super::merge_yaml(&mut a, b.clone()).unwrap(); + assert_eq!(diff.differing_values.len(), 1); + assert_eq!( + diff.differing_values + .get::("key3.key4".into()) + .unwrap(), + b.get("key3").unwrap().get("key4").unwrap() + ); + assert_eq!(diff.new_fields.len(), 1); + assert_eq!( + diff.new_fields.get::("key5".into()).unwrap(), + b.get("key5").unwrap() + ); + assert_eq!(a, expected); + } +} diff --git a/zk_toolbox/crates/zk_inception/src/main.rs b/zk_toolbox/crates/zk_inception/src/main.rs index dd10e9494627..e68dec4d3ed7 100644 --- a/zk_toolbox/crates/zk_inception/src/main.rs +++ b/zk_toolbox/crates/zk_inception/src/main.rs @@ -1,5 +1,5 @@ use clap::{command, Parser, Subcommand}; -use commands::contract_verifier::ContractVerifierCommands; +use commands::{args::UpdateArgs, contract_verifier::ContractVerifierCommands}; use common::{ check_general_prerequisites, config::{global_config, init_global_config, GlobalConfig}, @@ -53,6 +53,9 @@ pub enum InceptionSubcommands { /// Run contract verifier #[command(subcommand)] ContractVerifier(ContractVerifierCommands), + /// Update zkSync + #[command(alias = "u")] + Update(UpdateArgs), } #[derive(Parser, Debug)] @@ -110,6 +113,7 @@ async fn run_subcommand(inception_args: Inception, shell: &Shell) -> anyhow::Res InceptionSubcommands::ContractVerifier(args) => { commands::contract_verifier::run(shell, args).await? } + InceptionSubcommands::Update(args) => commands::update::run(shell, args)?, } Ok(()) } diff --git a/zk_toolbox/crates/zk_inception/src/messages.rs b/zk_toolbox/crates/zk_inception/src/messages.rs index 555aade78cbb..428b06516921 100644 --- a/zk_toolbox/crates/zk_inception/src/messages.rs +++ b/zk_toolbox/crates/zk_inception/src/messages.rs @@ -319,3 +319,44 @@ pub(super) fn msg_binary_already_exists(name: &str, version: &str) -> String { pub(super) fn msg_downloading_binary_spinner(name: &str, version: &str) -> String { format!("Downloading {} {} binary", name, version) } + +/// Update related messages + +pub(super) const MSG_UPDATE_ONLY_CONFIG_HELP: &str = "Update only the config files"; +pub(super) const MSG_UPDATING_ZKSYNC: &str = "Updating ZkSync"; +pub(super) const MSG_ZKSYNC_UPDATED: &str = "ZkSync updated successfully"; +pub(super) const MSG_PULLING_ZKSYNC_CODE_SPINNER: &str = "Pulling zksync-era repo..."; +pub(super) const MSG_UPDATING_SUBMODULES_SPINNER: &str = "Updating submodules..."; +pub(super) const MSG_DIFF_GENERAL_CONFIG: &str = + "Added the following fields to the general config:"; +pub(super) const MSG_DIFF_EN_CONFIG: &str = + "Added the following fields to the external node config:"; +pub(super) const MSG_DIFF_EN_GENERAL_CONFIG: &str = + "Added the following fields to the external node generalconfig:"; +pub(super) const MSG_INVALID_KEY_TYPE_ERR: &str = "Invalid key type"; + +pub(super) fn msg_diff_genesis_config(chain: &str) -> String { + format!( + "Found differences between chain {chain} and era genesis configs. Consider updating the chain {chain} genesis config and re-running genesis. Diff:" + ) +} + +pub(super) fn msg_diff_contracts_config(chain: &str) -> String { + format!( + "Found differences between chain {chain} and era contracts configs. Consider updating the chain {chain} contracts config and re-running genesis. Diff:" + ) +} + +pub(super) fn msg_diff_secrets( + chain: &str, + current_secrets_path: &Path, + era_secret_path: &Path, +) -> String { + format!( + "Found differences between chain {chain} and era secrets configs. Consider updating the chain {chain} secrets config at {current_secrets_path:?} using the file {era_secret_path:?} as reference. Diff:" + ) +} + +pub(super) fn msg_updating_chain(chain: &str) -> String { + format!("Updating chain: {}", chain) +} From 3729468436114642e62ce8a531533921015455a7 Mon Sep 17 00:00:00 2001 From: Danil Date: Fri, 26 Jul 2024 17:48:11 +0200 Subject: [PATCH 52/52] fix(contract verifier): Fix config values (#2510) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ ## Why ❔ ## Checklist - [ ] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [ ] Tests for the changes have been added / updated. - [ ] Documentation comments have been added / updated. - [ ] Code has been formatted via `zk fmt` and `zk lint`. --------- Signed-off-by: Danil --- core/lib/contract_verifier/src/lib.rs | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/core/lib/contract_verifier/src/lib.rs b/core/lib/contract_verifier/src/lib.rs index 224d4b292347..30901729fc54 100644 --- a/core/lib/contract_verifier/src/lib.rs +++ b/core/lib/contract_verifier/src/lib.rs @@ -12,7 +12,6 @@ use regex::Regex; use tokio::time; use zksync_config::ContractVerifierConfig; use zksync_dal::{Connection, ConnectionPool, Core, CoreDal}; -use zksync_env_config::FromEnv; use zksync_queued_job_processor::{async_trait, JobProcessor}; use zksync_types::{ contract_verification_api::{ @@ -524,11 +523,10 @@ impl JobProcessor for ContractVerifier { started_at: Instant, ) -> tokio::task::JoinHandle> { let connection_pool = self.connection_pool.clone(); + let config = self.config.clone(); tokio::task::spawn(async move { tracing::info!("Started to process request with id = {}", job.id); - let config: ContractVerifierConfig = - ContractVerifierConfig::from_env().context("ContractVerifierConfig")?; let mut connection = connection_pool.connection().await.unwrap(); let job_id = job.id;