From 17e564c6f7914cc6c4304626592fd81f611ac83c Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Mon, 8 Apr 2024 13:42:13 -0300 Subject: [PATCH 01/65] fmt --- prover/prover_cli/Cargo.toml | 17 ++++++++++++++++ prover/prover_cli/src/cli.rs | 26 ++++++++++++++++++++++++ prover/prover_cli/src/commands/mod.rs | 1 + prover/prover_cli/src/commands/status.rs | 4 ++++ prover/prover_cli/src/lib.rs | 2 ++ prover/prover_cli/src/main.rs | 10 +++++++++ 6 files changed, 60 insertions(+) create mode 100644 prover/prover_cli/Cargo.toml create mode 100644 prover/prover_cli/src/cli.rs create mode 100644 prover/prover_cli/src/commands/mod.rs create mode 100644 prover/prover_cli/src/commands/status.rs create mode 100644 prover/prover_cli/src/lib.rs create mode 100644 prover/prover_cli/src/main.rs diff --git a/prover/prover_cli/Cargo.toml b/prover/prover_cli/Cargo.toml new file mode 100644 index 000000000000..7effe24198a1 --- /dev/null +++ b/prover/prover_cli/Cargo.toml @@ -0,0 +1,17 @@ +[package] +name = "prover_cli" +version.workspace = true +edition.workspace = true +authors.workspace = true +homepage.workspace = true +repository.workspace = true +license.workspace = true +keywords.workspace = true +categories.workspace = true + +[dependencies] +tokio = { version = "1", features = ["rt-multi-thread", "macros"] } +env_logger = "0.10" +log = "0.4" +clap = { version = "4.3", features = ["derive"] } +eyre = "0.6" diff --git a/prover/prover_cli/src/cli.rs b/prover/prover_cli/src/cli.rs new file mode 100644 index 000000000000..6d09f3b0171e --- /dev/null +++ b/prover/prover_cli/src/cli.rs @@ -0,0 +1,26 @@ +use crate::commands::status; + +use clap::{command, Parser, Subcommand}; + +pub const VERSION_STRING: &str = env!("CARGO_PKG_VERSION"); + +#[derive(Parser)] +#[command(name="prover-cli", version=VERSION_STRING, about, long_about = None)] +struct ProverCLI { + #[command(subcommand)] + command: ProverCommand, +} + +#[derive(Subcommand)] +enum ProverCommand { + Status, +} + +pub async fn start() -> eyre::Result<()> { + let ProverCLI { command } = ProverCLI::parse(); + match command { + ProverCommand::Status => status::run().await?, + }; + + Ok(()) +} diff --git a/prover/prover_cli/src/commands/mod.rs b/prover/prover_cli/src/commands/mod.rs new file mode 100644 index 000000000000..3d7dcd739867 --- /dev/null +++ b/prover/prover_cli/src/commands/mod.rs @@ -0,0 +1 @@ +pub(crate) mod status; diff --git a/prover/prover_cli/src/commands/status.rs b/prover/prover_cli/src/commands/status.rs new file mode 100644 index 000000000000..5edf73beb816 --- /dev/null +++ b/prover/prover_cli/src/commands/status.rs @@ -0,0 +1,4 @@ +pub(crate) async fn run() -> eyre::Result<()> { + log::info!("Prover Status"); + Ok(()) +} diff --git a/prover/prover_cli/src/lib.rs b/prover/prover_cli/src/lib.rs new file mode 100644 index 000000000000..3ef8b313f0c2 --- /dev/null +++ b/prover/prover_cli/src/lib.rs @@ -0,0 +1,2 @@ +pub mod cli; +mod commands; diff --git a/prover/prover_cli/src/main.rs b/prover/prover_cli/src/main.rs new file mode 100644 index 000000000000..f2a7dd710267 --- /dev/null +++ b/prover/prover_cli/src/main.rs @@ -0,0 +1,10 @@ +use prover_cli::cli; + +#[tokio::main] +async fn main() { + env_logger::builder() + .filter_level(log::LevelFilter::Debug) + .init(); + + cli::start().await.unwrap(); +} From 245b0a4e75ddd1c6a968b07152cfcfffe70cc2c0 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Mon, 8 Apr 2024 16:09:36 -0300 Subject: [PATCH 02/65] wip --- prover/prover_cli/Cargo.toml | 20 +++++++++++++++ prover/prover_cli/src/cli.rs | 6 +++-- .../src/commands/get_proof_progress.rs | 25 +++++++++++++++++++ prover/prover_cli/src/commands/mod.rs | 1 + 4 files changed, 50 insertions(+), 2 deletions(-) create mode 100644 prover/prover_cli/src/commands/get_proof_progress.rs diff --git a/prover/prover_cli/Cargo.toml b/prover/prover_cli/Cargo.toml index 7effe24198a1..ae0dddd885dc 100644 --- a/prover/prover_cli/Cargo.toml +++ b/prover/prover_cli/Cargo.toml @@ -15,3 +15,23 @@ env_logger = "0.10" log = "0.4" clap = { version = "4.3", features = ["derive"] } eyre = "0.6" + +anyhow.workspace = true +zksync_config.workspace = true +zksync_env_config.workspace = true +zksync_db_connection.workspace = true +zksync_basic_types.workspace = true +zksync_types.workspace = true +prover_dal.workspace = true +sqlx = { workspace = true, features = [ + "runtime-tokio", + "tls-native-tls", + "macros", + "postgres", + "bigdecimal", + "rust_decimal", + "chrono", + "json", + "migrate", + "ipnetwork", +] } diff --git a/prover/prover_cli/src/cli.rs b/prover/prover_cli/src/cli.rs index 6d09f3b0171e..ed1c083a4caa 100644 --- a/prover/prover_cli/src/cli.rs +++ b/prover/prover_cli/src/cli.rs @@ -1,7 +1,7 @@ -use crate::commands::status; - use clap::{command, Parser, Subcommand}; +use crate::commands::{get_proof_progress, status}; + pub const VERSION_STRING: &str = env!("CARGO_PKG_VERSION"); #[derive(Parser)] @@ -14,12 +14,14 @@ struct ProverCLI { #[derive(Subcommand)] enum ProverCommand { Status, + GetProofProgress, } pub async fn start() -> eyre::Result<()> { let ProverCLI { command } = ProverCLI::parse(); match command { ProverCommand::Status => status::run().await?, + ProverCommand::GetProofProgress => get_proof_progress::run().await?, }; Ok(()) diff --git a/prover/prover_cli/src/commands/get_proof_progress.rs b/prover/prover_cli/src/commands/get_proof_progress.rs new file mode 100644 index 000000000000..2f5f6ecf8141 --- /dev/null +++ b/prover/prover_cli/src/commands/get_proof_progress.rs @@ -0,0 +1,25 @@ +use anyhow::Context as _; +use prover_dal::{ConnectionPool, Prover, ProverDal}; +use tokio::{ + sync::{oneshot, watch::Receiver}, + task::JoinHandle, +}; +use zksync_config::configs::{ + fri_prover_group::FriProverGroupConfig, FriProverConfig, ObservabilityConfig, PostgresConfig, +}; +use zksync_env_config::{ + object_store::{ProverObjectStoreConfig, PublicObjectStoreConfig}, + FromEnv, +}; + +pub(crate) async fn run() -> eyre::Result<()> { + log::info!("Proof Progress"); + + let postgres_config = PostgresConfig::from_env().context("PostgresConfig::from_env()")?; + let pool = ConnectionPool::singleton(postgres_config.prover_url()?) + .build() + .await + .context("failed to build a connection pool")?; + + Ok(()) +} diff --git a/prover/prover_cli/src/commands/mod.rs b/prover/prover_cli/src/commands/mod.rs index 3d7dcd739867..b36d7c3d75ee 100644 --- a/prover/prover_cli/src/commands/mod.rs +++ b/prover/prover_cli/src/commands/mod.rs @@ -1 +1,2 @@ +pub(crate) mod get_proof_progress; pub(crate) mod status; From 8e84243ea40be2e1b4c523413fff32d9575e86d3 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Mon, 8 Apr 2024 16:12:37 -0300 Subject: [PATCH 03/65] add cargo.toml cargo.lock --- prover/Cargo.lock | 50 +++++++++++++++++++++++++++++++++++++++++++++++ prover/Cargo.toml | 1 + 2 files changed, 51 insertions(+) diff --git a/prover/Cargo.lock b/prover/Cargo.lock index a0d1b1dcb840..b9e7119a6c2e 100644 --- a/prover/Cargo.lock +++ b/prover/Cargo.lock @@ -1755,7 +1755,11 @@ version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4cd405aab171cb85d6735e5c8d9db038c17d3ca007a4d2c25f337935c3d90580" dependencies = [ + "humantime", + "is-terminal", "log", + "regex", + "termcolor", ] [[package]] @@ -1866,6 +1870,16 @@ version = "2.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" +[[package]] +name = "eyre" +version = "0.6.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7cd915d99f24784cdc19fd37ef22b97e3ff0ae756c7e492e9fbfe897d61e2aec" +dependencies = [ + "indenter", + "once_cell", +] + [[package]] name = "fastrand" version = "2.0.1" @@ -2712,6 +2726,12 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "indenter" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce23b50ad8242c51a442f3ff322d56b02f08852c77e4c0b4d3fd684abc89c683" + [[package]] name = "indexmap" version = "1.9.3" @@ -2747,6 +2767,17 @@ dependencies = [ "serde", ] +[[package]] +name = "is-terminal" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f23ff5ef2b80d608d61efee834934d862cd92461afc0560dedf493e4c033738b" +dependencies = [ + "hermit-abi 0.3.6", + "libc", + "windows-sys 0.52.0", +] + [[package]] name = "itertools" version = "0.10.5" @@ -4314,6 +4345,25 @@ dependencies = [ "thiserror", ] +[[package]] +name = "prover_cli" +version = "0.1.0" +dependencies = [ + "anyhow", + "clap 4.4.6", + "env_logger 0.10.2", + "eyre", + "log", + "prover_dal", + "sqlx", + "tokio", + "zksync_basic_types", + "zksync_config", + "zksync_db_connection", + "zksync_env_config", + "zksync_types", +] + [[package]] name = "prover_dal" version = "0.1.0" diff --git a/prover/Cargo.toml b/prover/Cargo.toml index 2e9ee65ce7a7..7f33fa4a18c3 100644 --- a/prover/Cargo.toml +++ b/prover/Cargo.toml @@ -13,6 +13,7 @@ members = [ "prover_fri_gateway", "proof_fri_compressor", "tools", + "prover_cli", ] resolver = "2" From 17517b144741b5e7bb05dc105c8cedc55b3d9a32 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Mon, 8 Apr 2024 17:36:57 -0300 Subject: [PATCH 04/65] go back to anihow --- prover/prover_cli/src/cli.rs | 2 +- .../src/commands/get_proof_progress.rs | 36 +++++++++++-------- prover/prover_cli/src/commands/status.rs | 2 +- 3 files changed, 24 insertions(+), 16 deletions(-) diff --git a/prover/prover_cli/src/cli.rs b/prover/prover_cli/src/cli.rs index ed1c083a4caa..0b0d87f1c9d3 100644 --- a/prover/prover_cli/src/cli.rs +++ b/prover/prover_cli/src/cli.rs @@ -17,7 +17,7 @@ enum ProverCommand { GetProofProgress, } -pub async fn start() -> eyre::Result<()> { +pub async fn start() -> anyhow::Result<()> { let ProverCLI { command } = ProverCLI::parse(); match command { ProverCommand::Status => status::run().await?, diff --git a/prover/prover_cli/src/commands/get_proof_progress.rs b/prover/prover_cli/src/commands/get_proof_progress.rs index 2f5f6ecf8141..c4ff753afec3 100644 --- a/prover/prover_cli/src/commands/get_proof_progress.rs +++ b/prover/prover_cli/src/commands/get_proof_progress.rs @@ -1,25 +1,33 @@ use anyhow::Context as _; -use prover_dal::{ConnectionPool, Prover, ProverDal}; -use tokio::{ - sync::{oneshot, watch::Receiver}, - task::JoinHandle, -}; -use zksync_config::configs::{ - fri_prover_group::FriProverGroupConfig, FriProverConfig, ObservabilityConfig, PostgresConfig, -}; -use zksync_env_config::{ - object_store::{ProverObjectStoreConfig, PublicObjectStoreConfig}, - FromEnv, -}; +use zksync_config::PostgresConfig; +use zksync_db_connection::connection_pool::ConnectionPool; +use zksync_env_config::FromEnv; +use prover_dal::Prover; -pub(crate) async fn run() -> eyre::Result<()> { +pub(crate) async fn run() -> anyhow::Result<()> { log::info!("Proof Progress"); let postgres_config = PostgresConfig::from_env().context("PostgresConfig::from_env()")?; - let pool = ConnectionPool::singleton(postgres_config.prover_url()?) + + println!("{:?}", postgres_config); + + let pool = ConnectionPool::::singleton(postgres_config.prover_url()?) .build() .await .context("failed to build a connection pool")?; + // let asd = sqlx::query_as!( + // StorageL1BatchHeader, + // r#" + // SELECT + // id + // FROM + // prover_jobs_fri + // "#, + // ) + // .fetch_all(pool) + // .await?; + Ok(()) } + diff --git a/prover/prover_cli/src/commands/status.rs b/prover/prover_cli/src/commands/status.rs index 5edf73beb816..5d0b9b085260 100644 --- a/prover/prover_cli/src/commands/status.rs +++ b/prover/prover_cli/src/commands/status.rs @@ -1,4 +1,4 @@ -pub(crate) async fn run() -> eyre::Result<()> { +pub(crate) async fn run() -> anyhow::Result<()> { log::info!("Prover Status"); Ok(()) } From 4ab7ce87d6757d72e5eaed78da62946f0b3b7083 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Mon, 8 Apr 2024 17:37:13 -0300 Subject: [PATCH 05/65] fmt --- prover/prover_cli/src/commands/get_proof_progress.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/prover/prover_cli/src/commands/get_proof_progress.rs b/prover/prover_cli/src/commands/get_proof_progress.rs index c4ff753afec3..9328c355dded 100644 --- a/prover/prover_cli/src/commands/get_proof_progress.rs +++ b/prover/prover_cli/src/commands/get_proof_progress.rs @@ -1,8 +1,8 @@ use anyhow::Context as _; +use prover_dal::Prover; use zksync_config::PostgresConfig; use zksync_db_connection::connection_pool::ConnectionPool; use zksync_env_config::FromEnv; -use prover_dal::Prover; pub(crate) async fn run() -> anyhow::Result<()> { log::info!("Proof Progress"); @@ -30,4 +30,3 @@ pub(crate) async fn run() -> anyhow::Result<()> { Ok(()) } - From 46552017c1a736ec85ac812b1ff1bac75b4346d1 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Tue, 9 Apr 2024 13:50:12 -0300 Subject: [PATCH 06/65] rm tool workspace --- prover/Cargo.lock | 7 +++ prover/Cargo.toml | 1 - prover/prover_cli/Cargo.toml | 9 +++- prover/{tools => prover_cli}/README.md | 4 +- prover/prover_cli/src/cli.rs | 8 ++-- .../src/commands/get_file_info.rs} | 47 ++++--------------- .../src/commands/get_proof_progress.rs | 32 ------------- prover/prover_cli/src/commands/mod.rs | 3 +- prover/prover_cli/src/commands/status.rs | 4 -- prover/tools/Cargo.toml | 20 -------- 10 files changed, 31 insertions(+), 104 deletions(-) rename prover/{tools => prover_cli}/README.md (93%) rename prover/{tools/src/main.rs => prover_cli/src/commands/get_file_info.rs} (90%) delete mode 100644 prover/prover_cli/src/commands/get_proof_progress.rs delete mode 100644 prover/prover_cli/src/commands/status.rs delete mode 100644 prover/tools/Cargo.toml diff --git a/prover/Cargo.lock b/prover/Cargo.lock index b9e7119a6c2e..40a9ad15394f 100644 --- a/prover/Cargo.lock +++ b/prover/Cargo.lock @@ -4350,17 +4350,24 @@ name = "prover_cli" version = "0.1.0" dependencies = [ "anyhow", + "bincode", "clap 4.4.6", + "colored", "env_logger 0.10.2", "eyre", + "hex", "log", "prover_dal", "sqlx", "tokio", + "tracing", + "tracing-subscriber", "zksync_basic_types", "zksync_config", "zksync_db_connection", "zksync_env_config", + "zksync_prover_fri_types", + "zksync_prover_interface", "zksync_types", ] diff --git a/prover/Cargo.toml b/prover/Cargo.toml index 7f33fa4a18c3..bd2b25bd3420 100644 --- a/prover/Cargo.toml +++ b/prover/Cargo.toml @@ -12,7 +12,6 @@ members = [ "witness_vector_generator", "prover_fri_gateway", "proof_fri_compressor", - "tools", "prover_cli", ] diff --git a/prover/prover_cli/Cargo.toml b/prover/prover_cli/Cargo.toml index ae0dddd885dc..3952b12bd36c 100644 --- a/prover/prover_cli/Cargo.toml +++ b/prover/prover_cli/Cargo.toml @@ -13,15 +13,22 @@ categories.workspace = true tokio = { version = "1", features = ["rt-multi-thread", "macros"] } env_logger = "0.10" log = "0.4" -clap = { version = "4.3", features = ["derive"] } eyre = "0.6" +clap = { workspace = true, features = ["derive"] } +tracing.workspace = true +tracing-subscriber = { workspace = true, features = ["env-filter"] } +bincode.workspace = true +colored.workspace = true +hex.workspace = true anyhow.workspace = true zksync_config.workspace = true zksync_env_config.workspace = true zksync_db_connection.workspace = true zksync_basic_types.workspace = true zksync_types.workspace = true +zksync_prover_fri_types.workspace = true +zksync_prover_interface.workspace = true prover_dal.workspace = true sqlx = { workspace = true, features = [ "runtime-tokio", diff --git a/prover/tools/README.md b/prover/prover_cli/README.md similarity index 93% rename from prover/tools/README.md rename to prover/prover_cli/README.md index 35778faa687a..25bbe95fc4d5 100644 --- a/prover/tools/README.md +++ b/prover/prover_cli/README.md @@ -1,9 +1,9 @@ -# Tool to better understand and debug provers +# CLI to better understand and debug provers For now, it has only one command 'file-info' ``` -cargo run --release file-info /zksync-era/prover/artifacts/proofs_fri/l1_batch_proof_1.bin +cargo run -- file-info --file-path /zksync-era/prover/artifacts/proofs_fri/l1_batch_proof_1.bin ``` Example outputs: diff --git a/prover/prover_cli/src/cli.rs b/prover/prover_cli/src/cli.rs index 0b0d87f1c9d3..844387f983e0 100644 --- a/prover/prover_cli/src/cli.rs +++ b/prover/prover_cli/src/cli.rs @@ -1,6 +1,6 @@ use clap::{command, Parser, Subcommand}; -use crate::commands::{get_proof_progress, status}; +use crate::commands::get_file_info; pub const VERSION_STRING: &str = env!("CARGO_PKG_VERSION"); @@ -13,15 +13,13 @@ struct ProverCLI { #[derive(Subcommand)] enum ProverCommand { - Status, - GetProofProgress, + FileInfo(get_file_info::Args), } pub async fn start() -> anyhow::Result<()> { let ProverCLI { command } = ProverCLI::parse(); match command { - ProverCommand::Status => status::run().await?, - ProverCommand::GetProofProgress => get_proof_progress::run().await?, + ProverCommand::FileInfo(args) => get_file_info::run(args).await?, }; Ok(()) diff --git a/prover/tools/src/main.rs b/prover/prover_cli/src/commands/get_file_info.rs similarity index 90% rename from prover/tools/src/main.rs rename to prover/prover_cli/src/commands/get_file_info.rs index f7df2147fac9..ff32f56a22c6 100644 --- a/prover/tools/src/main.rs +++ b/prover/prover_cli/src/commands/get_file_info.rs @@ -1,8 +1,7 @@ use std::fs; -use clap::{Parser, Subcommand}; +use clap::Args as ClapArgs; use colored::Colorize; -use tracing::level_filters::LevelFilter; use zksync_prover_fri_types::{ circuit_definitions::{ boojum::{ @@ -18,23 +17,10 @@ use zksync_prover_fri_types::{ }; use zksync_prover_interface::outputs::L1BatchProofForL1; -#[derive(Debug, Parser)] -#[command( - author = "Matter Labs", - version, - about = "Debugging tools for prover related things", - long_about = None -)] - -struct Cli { - #[command(subcommand)] - command: Command, -} - -#[derive(Debug, Subcommand)] -enum Command { - #[command(name = "file-info")] - FileInfo { file_path: String }, +#[derive(ClapArgs)] +pub(crate) struct Args { + #[clap(short, long)] + file_path: String, } fn pretty_print_size_hint(size_hint: (Option, Option)) { @@ -204,7 +190,8 @@ fn pretty_print_l1_proof(result: &L1BatchProofForL1) { println!(" This proof will pass on L1, if L1 executor computes the block commitment that is matching exactly the Inputs value above"); } -fn file_info(path: String) { +pub(crate) async fn run(args: Args) -> anyhow::Result<()> { + let path = args.file_path; println!("Reading file {} and guessing the type.", path.bold()); let bytes = fs::read(path).unwrap(); @@ -214,14 +201,14 @@ fn file_info(path: String) { if let Some(circuit) = maybe_circuit { println!(" Parsing file as CircuitWrapper."); pretty_print_circuit_wrapper(&circuit); - return; + return Ok(()); } println!(" NOT a CircuitWrapper."); let maybe_fri_proof: Option = bincode::deserialize(&bytes).ok(); if let Some(fri_proof) = maybe_fri_proof { println!(" Parsing file as FriProofWrapper."); pretty_print_proof(&fri_proof); - return; + return Ok(()); } println!(" NOT a FriProofWrapper."); @@ -232,19 +219,5 @@ fn file_info(path: String) { } else { println!(" NOT a L1BatchProof."); } -} - -fn main() { - tracing_subscriber::fmt() - .with_env_filter( - tracing_subscriber::EnvFilter::builder() - .with_default_directive(LevelFilter::INFO.into()) - .from_env_lossy(), - ) - .init(); - - let opt = Cli::parse(); - match opt.command { - Command::FileInfo { file_path } => file_info(file_path), - } + Ok(()) } diff --git a/prover/prover_cli/src/commands/get_proof_progress.rs b/prover/prover_cli/src/commands/get_proof_progress.rs deleted file mode 100644 index 9328c355dded..000000000000 --- a/prover/prover_cli/src/commands/get_proof_progress.rs +++ /dev/null @@ -1,32 +0,0 @@ -use anyhow::Context as _; -use prover_dal::Prover; -use zksync_config::PostgresConfig; -use zksync_db_connection::connection_pool::ConnectionPool; -use zksync_env_config::FromEnv; - -pub(crate) async fn run() -> anyhow::Result<()> { - log::info!("Proof Progress"); - - let postgres_config = PostgresConfig::from_env().context("PostgresConfig::from_env()")?; - - println!("{:?}", postgres_config); - - let pool = ConnectionPool::::singleton(postgres_config.prover_url()?) - .build() - .await - .context("failed to build a connection pool")?; - - // let asd = sqlx::query_as!( - // StorageL1BatchHeader, - // r#" - // SELECT - // id - // FROM - // prover_jobs_fri - // "#, - // ) - // .fetch_all(pool) - // .await?; - - Ok(()) -} diff --git a/prover/prover_cli/src/commands/mod.rs b/prover/prover_cli/src/commands/mod.rs index b36d7c3d75ee..3e9a45cb72ac 100644 --- a/prover/prover_cli/src/commands/mod.rs +++ b/prover/prover_cli/src/commands/mod.rs @@ -1,2 +1 @@ -pub(crate) mod get_proof_progress; -pub(crate) mod status; +pub(crate) mod get_file_info; diff --git a/prover/prover_cli/src/commands/status.rs b/prover/prover_cli/src/commands/status.rs deleted file mode 100644 index 5d0b9b085260..000000000000 --- a/prover/prover_cli/src/commands/status.rs +++ /dev/null @@ -1,4 +0,0 @@ -pub(crate) async fn run() -> anyhow::Result<()> { - log::info!("Prover Status"); - Ok(()) -} diff --git a/prover/tools/Cargo.toml b/prover/tools/Cargo.toml deleted file mode 100644 index 66df1e99db4e..000000000000 --- a/prover/tools/Cargo.toml +++ /dev/null @@ -1,20 +0,0 @@ -[package] -name = "tools" -version.workspace = true -edition.workspace = true -authors.workspace = true -homepage.workspace = true -repository.workspace = true -license.workspace = true -keywords.workspace = true -categories.workspace = true - -[dependencies] -clap = { workspace = true, features = ["derive"] } -tracing.workspace = true -tracing-subscriber = { workspace = true, features = ["env-filter"] } -zksync_prover_fri_types.workspace = true -bincode.workspace = true -colored.workspace = true -zksync_prover_interface.workspace = true -hex.workspace = true \ No newline at end of file From c32d1aa427711b3123735a6b12bbd891c9889270 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Wed, 10 Apr 2024 16:12:21 -0300 Subject: [PATCH 07/65] fmt --- prover/Cargo.lock | 14 ----- prover/prover_cli/src/cli.rs | 4 +- .../src/commands/get_proof_progress.rs | 57 +++++++++++++++++++ prover/prover_cli/src/commands/mod.rs | 1 + 4 files changed, 61 insertions(+), 15 deletions(-) create mode 100644 prover/prover_cli/src/commands/get_proof_progress.rs diff --git a/prover/Cargo.lock b/prover/Cargo.lock index 40a9ad15394f..16c9ad3c20a5 100644 --- a/prover/Cargo.lock +++ b/prover/Cargo.lock @@ -6302,20 +6302,6 @@ dependencies = [ "tracing", ] -[[package]] -name = "tools" -version = "0.1.0" -dependencies = [ - "bincode", - "clap 4.4.6", - "colored", - "hex", - "tracing", - "tracing-subscriber", - "zksync_prover_fri_types", - "zksync_prover_interface", -] - [[package]] name = "tower" version = "0.4.13" diff --git a/prover/prover_cli/src/cli.rs b/prover/prover_cli/src/cli.rs index 844387f983e0..dbf1b910b044 100644 --- a/prover/prover_cli/src/cli.rs +++ b/prover/prover_cli/src/cli.rs @@ -1,6 +1,6 @@ use clap::{command, Parser, Subcommand}; -use crate::commands::get_file_info; +use crate::commands::{get_file_info, get_proof_progress}; pub const VERSION_STRING: &str = env!("CARGO_PKG_VERSION"); @@ -14,12 +14,14 @@ struct ProverCLI { #[derive(Subcommand)] enum ProverCommand { FileInfo(get_file_info::Args), + ProofProgress(get_proof_progress::Args), } pub async fn start() -> anyhow::Result<()> { let ProverCLI { command } = ProverCLI::parse(); match command { ProverCommand::FileInfo(args) => get_file_info::run(args).await?, + ProverCommand::ProofProgress(args) => get_proof_progress::run(args).await?, }; Ok(()) diff --git a/prover/prover_cli/src/commands/get_proof_progress.rs b/prover/prover_cli/src/commands/get_proof_progress.rs new file mode 100644 index 000000000000..36b1033a5b5b --- /dev/null +++ b/prover/prover_cli/src/commands/get_proof_progress.rs @@ -0,0 +1,57 @@ +use anyhow::Context as _; +use clap::Args as ClapArgs; +use sqlx::postgres::PgPoolOptions; +use sqlx::Row; +use zksync_config::PostgresConfig; +use zksync_env_config::FromEnv; + +#[derive(ClapArgs)] +pub(crate) struct Args { + #[clap(short, long)] + l1_batch_number: i32, +} + +pub(crate) async fn run(args: Args) -> anyhow::Result<()> { + let pool = PgPoolOptions::new() + .max_connections(5) + .connect("postgres://postgres:notsecurepassword@localhost/prover_local") + .await?; + + let query = sqlx::query( + "SELECT + id, + status, + error, + processing_started_at, + time_taken, + is_node_final_proof + FROM + prover_jobs_fri + WHERE + l1_batch_number = $1", + ) + .bind(&args.l1_batch_number) + .fetch_all(&pool) + .await?; + + let total_jobs = query.len(); + let successful_jobs = query + .iter() + .filter(|row| row.get::("status") == "successfull") + .count(); + let failed_jobs = query + .iter() + .filter(|row| row.get::("status") == "failed") + .count(); + let progress = (successful_jobs as f32 / total_jobs as f32) * 100.0; + + println!("= Prover progress ="); + println!("Batch number: {}", args.l1_batch_number); + println!( + "Progress: {:.2}% ({}/{})", + progress, successful_jobs, total_jobs + ); + println!("Failed: {}", failed_jobs); + + Ok(()) +} diff --git a/prover/prover_cli/src/commands/mod.rs b/prover/prover_cli/src/commands/mod.rs index 3e9a45cb72ac..87d92ecb0557 100644 --- a/prover/prover_cli/src/commands/mod.rs +++ b/prover/prover_cli/src/commands/mod.rs @@ -1 +1,2 @@ pub(crate) mod get_file_info; +pub(crate) mod get_proof_progress; From 8e49e04f1c5d4a184b43770432bbd987cca9dd8e Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Wed, 10 Apr 2024 16:13:03 -0300 Subject: [PATCH 08/65] zk fmt --- prover/prover_cli/src/commands/get_proof_progress.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/prover/prover_cli/src/commands/get_proof_progress.rs b/prover/prover_cli/src/commands/get_proof_progress.rs index 36b1033a5b5b..e497b7afca1b 100644 --- a/prover/prover_cli/src/commands/get_proof_progress.rs +++ b/prover/prover_cli/src/commands/get_proof_progress.rs @@ -1,7 +1,6 @@ use anyhow::Context as _; use clap::Args as ClapArgs; -use sqlx::postgres::PgPoolOptions; -use sqlx::Row; +use sqlx::{postgres::PgPoolOptions, Row}; use zksync_config::PostgresConfig; use zksync_env_config::FromEnv; From 80e13daf9163b77b268bbbab66be8e047addae88 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Wed, 10 Apr 2024 16:18:57 -0300 Subject: [PATCH 09/65] fix typo --- prover/prover_cli/src/commands/get_proof_progress.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prover/prover_cli/src/commands/get_proof_progress.rs b/prover/prover_cli/src/commands/get_proof_progress.rs index e497b7afca1b..f23fe7de9829 100644 --- a/prover/prover_cli/src/commands/get_proof_progress.rs +++ b/prover/prover_cli/src/commands/get_proof_progress.rs @@ -36,7 +36,7 @@ pub(crate) async fn run(args: Args) -> anyhow::Result<()> { let total_jobs = query.len(); let successful_jobs = query .iter() - .filter(|row| row.get::("status") == "successfull") + .filter(|row| row.get::("status") == "successful") .count(); let failed_jobs = query .iter() From 0f2ebd8b639f7246c18b58a7615ceb8fd0faa53c Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Fri, 12 Apr 2024 18:51:18 -0300 Subject: [PATCH 10/65] move query to prover_dal --- prover/prover_cli/src/cli.rs | 6 +- .../src/commands/get_proof_progress.rs | 56 -------------- prover/prover_cli/src/commands/mod.rs | 2 +- prover/prover_cli/src/commands/progress.rs | 74 +++++++++++++++++++ prover/prover_dal/src/fri_prover_dal.rs | 53 +++++++++++++ 5 files changed, 131 insertions(+), 60 deletions(-) delete mode 100644 prover/prover_cli/src/commands/get_proof_progress.rs create mode 100644 prover/prover_cli/src/commands/progress.rs diff --git a/prover/prover_cli/src/cli.rs b/prover/prover_cli/src/cli.rs index dbf1b910b044..b72731a9f646 100644 --- a/prover/prover_cli/src/cli.rs +++ b/prover/prover_cli/src/cli.rs @@ -1,6 +1,6 @@ use clap::{command, Parser, Subcommand}; -use crate::commands::{get_file_info, get_proof_progress}; +use crate::commands::{get_file_info, progress}; pub const VERSION_STRING: &str = env!("CARGO_PKG_VERSION"); @@ -14,14 +14,14 @@ struct ProverCLI { #[derive(Subcommand)] enum ProverCommand { FileInfo(get_file_info::Args), - ProofProgress(get_proof_progress::Args), + Progress(progress::Args), } pub async fn start() -> anyhow::Result<()> { let ProverCLI { command } = ProverCLI::parse(); match command { ProverCommand::FileInfo(args) => get_file_info::run(args).await?, - ProverCommand::ProofProgress(args) => get_proof_progress::run(args).await?, + ProverCommand::Progress(args) => progress::run(args).await?, }; Ok(()) diff --git a/prover/prover_cli/src/commands/get_proof_progress.rs b/prover/prover_cli/src/commands/get_proof_progress.rs deleted file mode 100644 index f23fe7de9829..000000000000 --- a/prover/prover_cli/src/commands/get_proof_progress.rs +++ /dev/null @@ -1,56 +0,0 @@ -use anyhow::Context as _; -use clap::Args as ClapArgs; -use sqlx::{postgres::PgPoolOptions, Row}; -use zksync_config::PostgresConfig; -use zksync_env_config::FromEnv; - -#[derive(ClapArgs)] -pub(crate) struct Args { - #[clap(short, long)] - l1_batch_number: i32, -} - -pub(crate) async fn run(args: Args) -> anyhow::Result<()> { - let pool = PgPoolOptions::new() - .max_connections(5) - .connect("postgres://postgres:notsecurepassword@localhost/prover_local") - .await?; - - let query = sqlx::query( - "SELECT - id, - status, - error, - processing_started_at, - time_taken, - is_node_final_proof - FROM - prover_jobs_fri - WHERE - l1_batch_number = $1", - ) - .bind(&args.l1_batch_number) - .fetch_all(&pool) - .await?; - - let total_jobs = query.len(); - let successful_jobs = query - .iter() - .filter(|row| row.get::("status") == "successful") - .count(); - let failed_jobs = query - .iter() - .filter(|row| row.get::("status") == "failed") - .count(); - let progress = (successful_jobs as f32 / total_jobs as f32) * 100.0; - - println!("= Prover progress ="); - println!("Batch number: {}", args.l1_batch_number); - println!( - "Progress: {:.2}% ({}/{})", - progress, successful_jobs, total_jobs - ); - println!("Failed: {}", failed_jobs); - - Ok(()) -} diff --git a/prover/prover_cli/src/commands/mod.rs b/prover/prover_cli/src/commands/mod.rs index 87d92ecb0557..027cee560ad1 100644 --- a/prover/prover_cli/src/commands/mod.rs +++ b/prover/prover_cli/src/commands/mod.rs @@ -1,2 +1,2 @@ pub(crate) mod get_file_info; -pub(crate) mod get_proof_progress; +pub(crate) mod progress; diff --git a/prover/prover_cli/src/commands/progress.rs b/prover/prover_cli/src/commands/progress.rs new file mode 100644 index 000000000000..cbc01ad260fa --- /dev/null +++ b/prover/prover_cli/src/commands/progress.rs @@ -0,0 +1,74 @@ +use anyhow::Context as _; +use clap::Args as ClapArgs; +use prover_dal::{ConnectionPool, Prover, ProverDal}; +use zksync_basic_types::L1BatchNumber; +use zksync_config::PostgresConfig; +use zksync_env_config::FromEnv; + +#[derive(ClapArgs)] +pub(crate) struct Args { + #[clap(short, long, conflicts_with = "all", required_unless_present = "all", num_args = 0..)] + proof: Option>, + #[clap(short, long, default_value("false"))] + verbose: bool, + #[clap(short, long, conflicts_with = "proof")] + all: bool, +} + +fn pretty_print_job_status( + l1_batch_number: &L1BatchNumber, + total_jobs: usize, + successful_jobs: usize, + failed_jobs: usize, +) { + let progress = (successful_jobs as f32 / total_jobs as f32) * 100.0; + println!("Batch number: {}", l1_batch_number); + println!( + "Progress: {:.2}% ({}/{})", + progress, successful_jobs, total_jobs + ); + println!("Failed: {}", failed_jobs); +} + +async fn get_one_batch_progress(l1_batches_numbers: Vec) -> anyhow::Result<()> { + let postgres_config = PostgresConfig::from_env().context("PostgresConfig::from_env()")?; + + let prover_connection_pool = ConnectionPool::::builder( + postgres_config.prover_url()?, + postgres_config.max_connections()?, + ) + .build() + .await + .context("failed to build a prover_connection_pool")?; + + let mut conn = prover_connection_pool.connection().await.unwrap(); + let stats = conn + .fri_prover_jobs_dal() + .get_prover_jobs_stats_for_batch(l1_batches_numbers) + .await; + + for row in &stats { + let (l1_batch_number, statistics) = row; + let total_jobs = + statistics.queued + statistics.in_progress + statistics.failed + statistics.successful; + pretty_print_job_status( + l1_batch_number, + total_jobs, + statistics.successful, + statistics.failed, + ) + } + Ok(()) +} + +async fn get_all_batches_progress() -> anyhow::Result<()> { + Ok(()) +} +pub(crate) async fn run(args: Args) -> anyhow::Result<()> { + println!("{:?}", args.proof); + if let Some(l1_batch_number) = args.proof { + get_one_batch_progress(l1_batch_number).await + } else { + get_all_batches_progress().await + } +} diff --git a/prover/prover_dal/src/fri_prover_dal.rs b/prover/prover_dal/src/fri_prover_dal.rs index 2da9f69d311a..ab4f3e131753 100644 --- a/prover/prover_dal/src/fri_prover_dal.rs +++ b/prover/prover_dal/src/fri_prover_dal.rs @@ -594,4 +594,57 @@ impl FriProverDal<'_, '_> { .unwrap() .unwrap_or(0) as usize } + + pub async fn get_prover_jobs_stats_for_batch( + &mut self, + l1_batches_numbers: Vec, + ) -> HashMap { + { + sqlx::query!( + r#" + SELECT + COUNT(*) AS "count!", + l1_batch_number AS "l1_batch_number!", + status AS "status!" + FROM + prover_jobs_fri + WHERE + l1_batch_number = ANY ($1) + GROUP BY + l1_batch_number, + status + "#, + &l1_batches_numbers + .into_iter() + .map(|x| i64::from(x.0)) + .collect::>() + ) + .fetch_all(self.storage.conn()) + .await + .unwrap() + .into_iter() + .map(|row| (row.l1_batch_number, row.status, row.count as usize)) + .fold( + HashMap::new(), + |mut acc, (l1_batch_number, status, value)| { + let stats = acc.entry(L1BatchNumber(l1_batch_number as u32)).or_insert( + JobCountStatistics { + queued: 0, + in_progress: 0, + failed: 0, + successful: 0, + }, + ); + match status.as_ref() { + "queued" => stats.queued = value, + "in_progress" => stats.in_progress = value, + "failed" => stats.failed = value, + "successful" => stats.successful = value, + _ => (), + } + acc + }, + ) + } + } } From 69d2104ae2ab1f219ce23160057100f20a7c6813 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Mon, 15 Apr 2024 13:08:31 -0300 Subject: [PATCH 11/65] remove --all flag --- prover/prover_cli/src/commands/progress.rs | 59 +++++++++------------- 1 file changed, 24 insertions(+), 35 deletions(-) diff --git a/prover/prover_cli/src/commands/progress.rs b/prover/prover_cli/src/commands/progress.rs index cbc01ad260fa..54a5bf045983 100644 --- a/prover/prover_cli/src/commands/progress.rs +++ b/prover/prover_cli/src/commands/progress.rs @@ -1,36 +1,40 @@ use anyhow::Context as _; use clap::Args as ClapArgs; use prover_dal::{ConnectionPool, Prover, ProverDal}; -use zksync_basic_types::L1BatchNumber; +use zksync_basic_types::{prover_dal::JobCountStatistics, L1BatchNumber}; use zksync_config::PostgresConfig; use zksync_env_config::FromEnv; #[derive(ClapArgs)] pub(crate) struct Args { - #[clap(short, long, conflicts_with = "all", required_unless_present = "all", num_args = 0..)] - proof: Option>, + #[clap(short, long, num_args = 0..)] + batch: Vec, #[clap(short, long, default_value("false"))] verbose: bool, - #[clap(short, long, conflicts_with = "proof")] - all: bool, } fn pretty_print_job_status( l1_batch_number: &L1BatchNumber, - total_jobs: usize, - successful_jobs: usize, - failed_jobs: usize, + statistics: &JobCountStatistics, + verbose: bool, ) { - let progress = (successful_jobs as f32 / total_jobs as f32) * 100.0; + let total_jobs = + statistics.queued + statistics.in_progress + statistics.failed + statistics.successful; + let progress = (statistics.successful as f32 / total_jobs as f32) * 100.0; println!("Batch number: {}", l1_batch_number); println!( "Progress: {:.2}% ({}/{})", - progress, successful_jobs, total_jobs + progress, statistics.successful, total_jobs ); - println!("Failed: {}", failed_jobs); + if verbose { + println!("In progress: {}", statistics.in_progress); + println!("Queued: {}", statistics.in_progress); + println!("Successful: {}", statistics.in_progress); + } + println!("Failed: {}", statistics.failed); } -async fn get_one_batch_progress(l1_batches_numbers: Vec) -> anyhow::Result<()> { +pub(crate) async fn run(args: Args) -> anyhow::Result<()> { let postgres_config = PostgresConfig::from_env().context("PostgresConfig::from_env()")?; let prover_connection_pool = ConnectionPool::::builder( @@ -44,31 +48,16 @@ async fn get_one_batch_progress(l1_batches_numbers: Vec) -> anyho let mut conn = prover_connection_pool.connection().await.unwrap(); let stats = conn .fri_prover_jobs_dal() - .get_prover_jobs_stats_for_batch(l1_batches_numbers) + .get_prover_jobs_stats_for_batch(args.batch) .await; + if stats.len() > 0 { + for row in &stats { + let (l1_batch_number, statistics) = row; - for row in &stats { - let (l1_batch_number, statistics) = row; - let total_jobs = - statistics.queued + statistics.in_progress + statistics.failed + statistics.successful; - pretty_print_job_status( - l1_batch_number, - total_jobs, - statistics.successful, - statistics.failed, - ) - } - Ok(()) -} - -async fn get_all_batches_progress() -> anyhow::Result<()> { - Ok(()) -} -pub(crate) async fn run(args: Args) -> anyhow::Result<()> { - println!("{:?}", args.proof); - if let Some(l1_batch_number) = args.proof { - get_one_batch_progress(l1_batch_number).await + pretty_print_job_status(l1_batch_number, statistics, args.verbose) + } } else { - get_all_batches_progress().await + println!("No batches found.") } + Ok(()) } From c9e6e5e9262d35dca538e0f5bbb1025d6cb5b2c2 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Tue, 16 Apr 2024 11:12:34 -0300 Subject: [PATCH 12/65] change command name --- prover/prover_cli/src/cli.rs | 7 +++---- .../src/commands/{progress.rs => jobs_status.rs} | 0 prover/prover_cli/src/commands/mod.rs | 2 +- 3 files changed, 4 insertions(+), 5 deletions(-) rename prover/prover_cli/src/commands/{progress.rs => jobs_status.rs} (100%) diff --git a/prover/prover_cli/src/cli.rs b/prover/prover_cli/src/cli.rs index b72731a9f646..8105cccdd6ea 100644 --- a/prover/prover_cli/src/cli.rs +++ b/prover/prover_cli/src/cli.rs @@ -1,6 +1,6 @@ use clap::{command, Parser, Subcommand}; -use crate::commands::{get_file_info, progress}; +use crate::commands::{get_file_info, jobs_status}; pub const VERSION_STRING: &str = env!("CARGO_PKG_VERSION"); @@ -14,15 +14,14 @@ struct ProverCLI { #[derive(Subcommand)] enum ProverCommand { FileInfo(get_file_info::Args), - Progress(progress::Args), + StatusJobs(jobs_status::Args), } pub async fn start() -> anyhow::Result<()> { let ProverCLI { command } = ProverCLI::parse(); match command { ProverCommand::FileInfo(args) => get_file_info::run(args).await?, - ProverCommand::Progress(args) => progress::run(args).await?, + ProverCommand::StatusJobs(args) => jobs_status::run(args).await?, }; - Ok(()) } diff --git a/prover/prover_cli/src/commands/progress.rs b/prover/prover_cli/src/commands/jobs_status.rs similarity index 100% rename from prover/prover_cli/src/commands/progress.rs rename to prover/prover_cli/src/commands/jobs_status.rs diff --git a/prover/prover_cli/src/commands/mod.rs b/prover/prover_cli/src/commands/mod.rs index 027cee560ad1..57da8f98a9c9 100644 --- a/prover/prover_cli/src/commands/mod.rs +++ b/prover/prover_cli/src/commands/mod.rs @@ -1,2 +1,2 @@ pub(crate) mod get_file_info; -pub(crate) mod progress; +pub(crate) mod jobs_status; From cdbb46775e702ff03cf602208c3f661d36927764 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Tue, 16 Apr 2024 11:18:01 -0300 Subject: [PATCH 13/65] fmt --- prover/prover_cli/src/cli.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/prover/prover_cli/src/cli.rs b/prover/prover_cli/src/cli.rs index c8c96cc0dad0..aae354d09eab 100644 --- a/prover/prover_cli/src/cli.rs +++ b/prover/prover_cli/src/cli.rs @@ -1,6 +1,7 @@ -use crate::commands::{get_file_info, jobs_status}; use clap::{command, Parser, Subcommand}; +use crate::commands::{get_file_info, jobs_status}; + pub const VERSION_STRING: &str = env!("CARGO_PKG_VERSION"); #[derive(Parser)] From d1f3994ab7beacae3aff51486d53892e12a7f784 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Tue, 16 Apr 2024 11:18:37 -0300 Subject: [PATCH 14/65] add sqlx query json --- ...8ad95ba0b3b5b7b3a81592a4720bc094c32f9.json | 34 +++++++++++++++++++ 1 file changed, 34 insertions(+) create mode 100644 prover/prover_dal/.sqlx/query-8f9c609a78ca4f854c0b24621ee8ad95ba0b3b5b7b3a81592a4720bc094c32f9.json diff --git a/prover/prover_dal/.sqlx/query-8f9c609a78ca4f854c0b24621ee8ad95ba0b3b5b7b3a81592a4720bc094c32f9.json b/prover/prover_dal/.sqlx/query-8f9c609a78ca4f854c0b24621ee8ad95ba0b3b5b7b3a81592a4720bc094c32f9.json new file mode 100644 index 000000000000..b65a594a8ed2 --- /dev/null +++ b/prover/prover_dal/.sqlx/query-8f9c609a78ca4f854c0b24621ee8ad95ba0b3b5b7b3a81592a4720bc094c32f9.json @@ -0,0 +1,34 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n COUNT(*) AS \"count!\",\n l1_batch_number AS \"l1_batch_number!\",\n status AS \"status!\"\n FROM\n prover_jobs_fri\n WHERE\n l1_batch_number = ANY ($1)\n GROUP BY\n l1_batch_number,\n status\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "count!", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "l1_batch_number!", + "type_info": "Int8" + }, + { + "ordinal": 2, + "name": "status!", + "type_info": "Text" + } + ], + "parameters": { + "Left": [ + "Int8Array" + ] + }, + "nullable": [ + null, + false, + false + ] + }, + "hash": "8f9c609a78ca4f854c0b24621ee8ad95ba0b3b5b7b3a81592a4720bc094c32f9" +} From fff3910a6b3f3558836abc8d73790b5650915522 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Tue, 16 Apr 2024 12:36:37 -0300 Subject: [PATCH 15/65] update README --- prover/prover_cli/README.md | 53 ++++++++++++++++++++++++++++++++++--- 1 file changed, 49 insertions(+), 4 deletions(-) diff --git a/prover/prover_cli/README.md b/prover/prover_cli/README.md index 25bbe95fc4d5..fcc85bc87992 100644 --- a/prover/prover_cli/README.md +++ b/prover/prover_cli/README.md @@ -1,14 +1,59 @@ # CLI to better understand and debug provers -For now, it has only one command 'file-info' +## Usage +> Note: +> For now, its necesary to use the 'zk f' tool to set up the environment. +> The main command will later be changed to `pli`. +```bash +Usage: zk f cargo run --release -- + +Commands: + file-info + status-jobs + help Print this message or the help of the given subcommand(s) + +Options: + -h, --help Print help + -V, --version Print version +``` +### Status-jobs + +You can get the progress for some batch proof, for a bunch of batches the `status-jobs` command: + +```bash +# Displays the proof progress of the batch 1 + zk f cargo run -- status-jobs --batch 1 +# Displays the proof progress of the batches 1 and 2 + zk f cargo run -- status-jobs --batch 1 2 +# Displays the proof progress of the batch 3, with additional information + zk f cargo run -- status-jobs --batch 3 --verbose ``` + +Example: + +```bash +$ zk f cargo run -- status-jobs --batch 1 --verbose + +Batch number: 1 +Progress: 34.88% (45/129) +In progress: 1 +Queued: 83 +Successful: 45 +Failed: 0 +``` + +### File-Info + +Displays the information about a given file: + +```bash cargo run -- file-info --file-path /zksync-era/prover/artifacts/proofs_fri/l1_batch_proof_1.bin ``` Example outputs: -``` +```bash L1 proof AUX info: L1 msg linear hash: [163, 243, 172, 16, 189, 59, 100, 227, 249, 46, 226, 220, 82, 135, 213, 208, 221, 228, 49, 46, 121, 136, 78, 163, 15, 155, 199, 82, 64, 24, 172, 198] @@ -18,7 +63,7 @@ AUX info: Inputs: [Fr(0x00000000775db828700e0ebbe0384f8a017598a271dfb6c96ebb2baf22a7a572)] ``` -``` +```bash == Circuit == Type: basic. Id: 1 (Scheduler) Geometry: CSGeometry { num_columns_under_copy_permutation: 130, num_witness_columns: 0, num_constant_columns: 4, max_allowed_constraint_degree: 8 } @@ -34,4 +79,4 @@ Previous block meta hash: [63, 236, 0, 236, 23, 236, 175, 242, 75, 187, 203, 193 Previous block aux hash: [200, 12, 70, 33, 103, 13, 251, 174, 96, 165, 135, 138, 34, 75, 249, 81, 93, 86, 110, 52, 30, 172, 198, 51, 155, 82, 86, 137, 156, 215, 11, 119] EIP 4844 - witnesses: None EIP 4844 - proofs: 0 -``` +``` \ No newline at end of file From ef906b93587453f0307164ccd4290ecb00c92006 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Tue, 16 Apr 2024 12:37:27 -0300 Subject: [PATCH 16/65] zk fmt --- prover/prover_cli/README.md | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/prover/prover_cli/README.md b/prover/prover_cli/README.md index fcc85bc87992..890538dbf510 100644 --- a/prover/prover_cli/README.md +++ b/prover/prover_cli/README.md @@ -1,9 +1,9 @@ # CLI to better understand and debug provers ## Usage -> Note: -> For now, its necesary to use the 'zk f' tool to set up the environment. -> The main command will later be changed to `pli`. + +> Note: For now, its necesary to use the 'zk f' tool to set up the environment. The main command will later be changed +> to `pli`. ```bash Usage: zk f cargo run --release -- @@ -17,6 +17,7 @@ Options: -h, --help Print help -V, --version Print version ``` + ### Status-jobs You can get the progress for some batch proof, for a bunch of batches the `status-jobs` command: @@ -79,4 +80,4 @@ Previous block meta hash: [63, 236, 0, 236, 23, 236, 175, 242, 75, 187, 203, 193 Previous block aux hash: [200, 12, 70, 33, 103, 13, 251, 174, 96, 165, 135, 138, 34, 75, 249, 81, 93, 86, 110, 52, 30, 172, 198, 51, 155, 82, 86, 137, 156, 215, 11, 119] EIP 4844 - witnesses: None EIP 4844 - proofs: 0 -``` \ No newline at end of file +``` From b3051b2b4c2f0c19f99a414a5faac854b0e298a4 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Tue, 16 Apr 2024 12:44:23 -0300 Subject: [PATCH 17/65] remove eol --- prover/prover_cli/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prover/prover_cli/Cargo.toml b/prover/prover_cli/Cargo.toml index 61a2fad3be13..b54bd0a0d448 100644 --- a/prover/prover_cli/Cargo.toml +++ b/prover/prover_cli/Cargo.toml @@ -28,4 +28,4 @@ zksync_basic_types.workspace = true zksync_types.workspace = true zksync_prover_fri_types.workspace = true zksync_prover_interface.workspace = true -prover_dal.workspace = true +prover_dal.workspace = true \ No newline at end of file From ee9d35bf3f77897d1175594cb570d87b217fd1c9 Mon Sep 17 00:00:00 2001 From: Ivan Litteri Date: Tue, 16 Apr 2024 17:33:34 -0300 Subject: [PATCH 18/65] Move jobs cmd to status cmd module --- .../{jobs_status.rs => status/jobs.rs} | 42 +++++++++---------- 1 file changed, 21 insertions(+), 21 deletions(-) rename prover/prover_cli/src/commands/{jobs_status.rs => status/jobs.rs} (100%) diff --git a/prover/prover_cli/src/commands/jobs_status.rs b/prover/prover_cli/src/commands/status/jobs.rs similarity index 100% rename from prover/prover_cli/src/commands/jobs_status.rs rename to prover/prover_cli/src/commands/status/jobs.rs index 54a5bf045983..01205964752b 100644 --- a/prover/prover_cli/src/commands/jobs_status.rs +++ b/prover/prover_cli/src/commands/status/jobs.rs @@ -13,27 +13,6 @@ pub(crate) struct Args { verbose: bool, } -fn pretty_print_job_status( - l1_batch_number: &L1BatchNumber, - statistics: &JobCountStatistics, - verbose: bool, -) { - let total_jobs = - statistics.queued + statistics.in_progress + statistics.failed + statistics.successful; - let progress = (statistics.successful as f32 / total_jobs as f32) * 100.0; - println!("Batch number: {}", l1_batch_number); - println!( - "Progress: {:.2}% ({}/{})", - progress, statistics.successful, total_jobs - ); - if verbose { - println!("In progress: {}", statistics.in_progress); - println!("Queued: {}", statistics.in_progress); - println!("Successful: {}", statistics.in_progress); - } - println!("Failed: {}", statistics.failed); -} - pub(crate) async fn run(args: Args) -> anyhow::Result<()> { let postgres_config = PostgresConfig::from_env().context("PostgresConfig::from_env()")?; @@ -61,3 +40,24 @@ pub(crate) async fn run(args: Args) -> anyhow::Result<()> { } Ok(()) } + +fn pretty_print_job_status( + l1_batch_number: &L1BatchNumber, + statistics: &JobCountStatistics, + verbose: bool, +) { + let total_jobs = + statistics.queued + statistics.in_progress + statistics.failed + statistics.successful; + let progress = (statistics.successful as f32 / total_jobs as f32) * 100.0; + println!("Batch number: {}", l1_batch_number); + println!( + "Progress: {:.2}% ({}/{})", + progress, statistics.successful, total_jobs + ); + if verbose { + println!("In progress: {}", statistics.in_progress); + println!("Queued: {}", statistics.in_progress); + println!("Successful: {}", statistics.in_progress); + } + println!("Failed: {}", statistics.failed); +} From 82faf32849c2904f7e5162c561731476cd1de9ab Mon Sep 17 00:00:00 2001 From: Ivan Litteri Date: Tue, 16 Apr 2024 17:34:23 -0300 Subject: [PATCH 19/65] Refactor status cmd --- prover/prover_cli/src/cli.rs | 7 ++++--- prover/prover_cli/src/commands/mod.rs | 10 +++++++++- prover/prover_cli/src/commands/status/mod.rs | 9 +++++++++ 3 files changed, 22 insertions(+), 4 deletions(-) create mode 100644 prover/prover_cli/src/commands/status/mod.rs diff --git a/prover/prover_cli/src/cli.rs b/prover/prover_cli/src/cli.rs index aae354d09eab..cafdd2dff99c 100644 --- a/prover/prover_cli/src/cli.rs +++ b/prover/prover_cli/src/cli.rs @@ -1,6 +1,6 @@ use clap::{command, Parser, Subcommand}; -use crate::commands::{get_file_info, jobs_status}; +use crate::commands::{self, get_file_info, status}; pub const VERSION_STRING: &str = env!("CARGO_PKG_VERSION"); @@ -14,14 +14,15 @@ struct ProverCLI { #[derive(Subcommand)] enum ProverCommand { FileInfo(get_file_info::Args), - StatusJobs(jobs_status::Args), + #[command(subcommand)] + Status(commands::StatusCommand), } pub async fn start() -> anyhow::Result<()> { let ProverCLI { command } = ProverCLI::parse(); match command { ProverCommand::FileInfo(args) => get_file_info::run(args).await?, - ProverCommand::StatusJobs(args) => jobs_status::run(args).await?, + ProverCommand::Status(status_cmd) => status::run(status_cmd).await?, }; Ok(()) diff --git a/prover/prover_cli/src/commands/mod.rs b/prover/prover_cli/src/commands/mod.rs index 57da8f98a9c9..e53bb47fdc34 100644 --- a/prover/prover_cli/src/commands/mod.rs +++ b/prover/prover_cli/src/commands/mod.rs @@ -1,2 +1,10 @@ +use clap::Subcommand; +use status::jobs; + pub(crate) mod get_file_info; -pub(crate) mod jobs_status; +pub(crate) mod status; + +#[derive(Subcommand)] +pub(crate) enum StatusCommand { + Jobs(jobs::Args), +} diff --git a/prover/prover_cli/src/commands/status/mod.rs b/prover/prover_cli/src/commands/status/mod.rs new file mode 100644 index 000000000000..761bf53c57a0 --- /dev/null +++ b/prover/prover_cli/src/commands/status/mod.rs @@ -0,0 +1,9 @@ +use super::StatusCommand; + +pub(crate) mod jobs; + +pub(crate) async fn run(status_cmd: StatusCommand) -> anyhow::Result<()> { + match status_cmd { + StatusCommand::Jobs(args) => jobs::run(args).await, + } +} From aa448645ace357a4be504695cde5735e7327aff1 Mon Sep 17 00:00:00 2001 From: Ivan Litteri Date: Tue, 16 Apr 2024 17:34:35 -0300 Subject: [PATCH 20/65] Update Cargo.lock --- prover/Cargo.lock | 22 ---------------------- 1 file changed, 22 deletions(-) diff --git a/prover/Cargo.lock b/prover/Cargo.lock index edaf63a367ac..7262bd7ecdaa 100644 --- a/prover/Cargo.lock +++ b/prover/Cargo.lock @@ -1756,15 +1756,11 @@ version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4cd405aab171cb85d6735e5c8d9db038c17d3ca007a4d2c25f337935c3d90580" dependencies = [ - "humantime", - "is-terminal", "humantime", "is-terminal", "log", "regex", "termcolor", - "regex", - "termcolor", ] [[package]] @@ -1875,16 +1871,6 @@ version = "2.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" -[[package]] -name = "eyre" -version = "0.6.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7cd915d99f24784cdc19fd37ef22b97e3ff0ae756c7e492e9fbfe897d61e2aec" -dependencies = [ - "indenter", - "once_cell", -] - [[package]] name = "fastrand" version = "2.0.1" @@ -2731,12 +2717,6 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "indenter" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce23b50ad8242c51a442f3ff322d56b02f08852c77e4c0b4d3fd684abc89c683" - [[package]] name = "indexmap" version = "1.9.3" @@ -4359,11 +4339,9 @@ dependencies = [ "clap 4.4.6", "colored", "env_logger 0.10.2", - "eyre", "hex", "log", "prover_dal", - "sqlx", "tokio", "tracing", "tracing-subscriber", From 3fe32b6e164a54528dca3cb4034a73c978a62d18 Mon Sep 17 00:00:00 2001 From: Ivan Litteri Date: Tue, 16 Apr 2024 17:40:30 -0300 Subject: [PATCH 21/65] zk fmt --- prover/prover_cli/src/main.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/prover/prover_cli/src/main.rs b/prover/prover_cli/src/main.rs index f2a7dd710267..a1060476be34 100644 --- a/prover/prover_cli/src/main.rs +++ b/prover/prover_cli/src/main.rs @@ -3,6 +3,8 @@ use prover_cli::cli; #[tokio::main] async fn main() { env_logger::builder() + .filter_module("zksync_db_connection::connection_pool", log::LevelFilter::Off) + .filter_module("sqlx::query", log::LevelFilter::Off) .filter_level(log::LevelFilter::Debug) .init(); From 9977e950a62b6efba95895b53d806b2ebba3f2dc Mon Sep 17 00:00:00 2001 From: Ivan Litteri Date: Wed, 17 Apr 2024 21:40:32 -0300 Subject: [PATCH 22/65] Refactor --- prover/prover_cli/src/cli.rs | 4 ++-- prover/prover_cli/src/commands/mod.rs | 8 +------- .../src/commands/status/{jobs.rs => batch.rs} | 8 ++++++-- prover/prover_cli/src/commands/status/mod.rs | 17 ++++++++++++----- prover/prover_cli/src/main.rs | 12 +++++++++--- 5 files changed, 30 insertions(+), 19 deletions(-) rename prover/prover_cli/src/commands/status/{jobs.rs => batch.rs} (92%) diff --git a/prover/prover_cli/src/cli.rs b/prover/prover_cli/src/cli.rs index cafdd2dff99c..4c01c132cea5 100644 --- a/prover/prover_cli/src/cli.rs +++ b/prover/prover_cli/src/cli.rs @@ -1,6 +1,6 @@ use clap::{command, Parser, Subcommand}; -use crate::commands::{self, get_file_info, status}; +use crate::commands::{self, get_file_info}; pub const VERSION_STRING: &str = env!("CARGO_PKG_VERSION"); @@ -22,7 +22,7 @@ pub async fn start() -> anyhow::Result<()> { let ProverCLI { command } = ProverCLI::parse(); match command { ProverCommand::FileInfo(args) => get_file_info::run(args).await?, - ProverCommand::Status(status_cmd) => status::run(status_cmd).await?, + ProverCommand::Status(cmd) => cmd.run().await?, }; Ok(()) diff --git a/prover/prover_cli/src/commands/mod.rs b/prover/prover_cli/src/commands/mod.rs index e53bb47fdc34..e02f4bfb1bae 100644 --- a/prover/prover_cli/src/commands/mod.rs +++ b/prover/prover_cli/src/commands/mod.rs @@ -1,10 +1,4 @@ -use clap::Subcommand; -use status::jobs; - pub(crate) mod get_file_info; pub(crate) mod status; -#[derive(Subcommand)] -pub(crate) enum StatusCommand { - Jobs(jobs::Args), -} +pub(crate) use status::StatusCommand; diff --git a/prover/prover_cli/src/commands/status/jobs.rs b/prover/prover_cli/src/commands/status/batch.rs similarity index 92% rename from prover/prover_cli/src/commands/status/jobs.rs rename to prover/prover_cli/src/commands/status/batch.rs index 01205964752b..f5977ad34780 100644 --- a/prover/prover_cli/src/commands/status/jobs.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -1,4 +1,4 @@ -use anyhow::Context as _; +use anyhow::{ensure, Context as _}; use clap::Args as ClapArgs; use prover_dal::{ConnectionPool, Prover, ProverDal}; use zksync_basic_types::{prover_dal::JobCountStatistics, L1BatchNumber}; @@ -6,7 +6,7 @@ use zksync_config::PostgresConfig; use zksync_env_config::FromEnv; #[derive(ClapArgs)] -pub(crate) struct Args { +pub struct Args { #[clap(short, long, num_args = 0..)] batch: Vec, #[clap(short, long, default_value("false"))] @@ -14,6 +14,10 @@ pub(crate) struct Args { } pub(crate) async fn run(args: Args) -> anyhow::Result<()> { + ensure!( + !args.batch.is_empty(), + "At least one batch number should be provided" + ); let postgres_config = PostgresConfig::from_env().context("PostgresConfig::from_env()")?; let prover_connection_pool = ConnectionPool::::builder( diff --git a/prover/prover_cli/src/commands/status/mod.rs b/prover/prover_cli/src/commands/status/mod.rs index 761bf53c57a0..431b1060d60d 100644 --- a/prover/prover_cli/src/commands/status/mod.rs +++ b/prover/prover_cli/src/commands/status/mod.rs @@ -1,9 +1,16 @@ -use super::StatusCommand; +use clap::Subcommand; -pub(crate) mod jobs; +pub(crate) mod batch; -pub(crate) async fn run(status_cmd: StatusCommand) -> anyhow::Result<()> { - match status_cmd { - StatusCommand::Jobs(args) => jobs::run(args).await, +#[derive(Subcommand)] +pub enum StatusCommand { + Batch(batch::Args), +} + +impl StatusCommand { + pub(crate) async fn run(self) -> anyhow::Result<()> { + match self { + StatusCommand::Batch(args) => batch::run(args).await, + } } } diff --git a/prover/prover_cli/src/main.rs b/prover/prover_cli/src/main.rs index a1060476be34..b979a36ed508 100644 --- a/prover/prover_cli/src/main.rs +++ b/prover/prover_cli/src/main.rs @@ -3,10 +3,16 @@ use prover_cli::cli; #[tokio::main] async fn main() { env_logger::builder() - .filter_module("zksync_db_connection::connection_pool", log::LevelFilter::Off) - .filter_module("sqlx::query", log::LevelFilter::Off) + .filter_module("zksync_db_connection", log::LevelFilter::Off) + .filter_module("sqlx", log::LevelFilter::Off) .filter_level(log::LevelFilter::Debug) .init(); - cli::start().await.unwrap(); + match cli::start().await { + Ok(_) => {} + Err(err) => { + log::error!("{err:?}"); + std::process::exit(1); + } + } } From 0120a09319bff428eaa379158f987a021f7312a1 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Fri, 19 Apr 2024 10:50:32 -0300 Subject: [PATCH 23/65] fix typo --- prover/prover_cli/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prover/prover_cli/README.md b/prover/prover_cli/README.md index 890538dbf510..99869b36bf25 100644 --- a/prover/prover_cli/README.md +++ b/prover/prover_cli/README.md @@ -2,7 +2,7 @@ ## Usage -> Note: For now, its necesary to use the 'zk f' tool to set up the environment. The main command will later be changed +> Note: For now, its necessary to use the 'zk f' tool to set up the environment. The main command will later be changed > to `pli`. ```bash From 9a11a94b9c067a373372af2eb5f84850b6425796 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Fri, 19 Apr 2024 17:34:22 -0300 Subject: [PATCH 24/65] modify query to include aggregation_round --- .../prover_cli/src/commands/status/batch.rs | 7 ++--- ...76096de57cdba25831f86c1428081ca0a14f.json} | 10 +++++-- prover/prover_dal/src/fri_prover_dal.rs | 26 ++++++++++++++----- 3 files changed, 31 insertions(+), 12 deletions(-) rename prover/prover_dal/.sqlx/{query-8f9c609a78ca4f854c0b24621ee8ad95ba0b3b5b7b3a81592a4720bc094c32f9.json => query-676020e89f0833cc92be1c1114a076096de57cdba25831f86c1428081ca0a14f.json} (50%) diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/prover_cli/src/commands/status/batch.rs index f5977ad34780..a6fe4e95a0f7 100644 --- a/prover/prover_cli/src/commands/status/batch.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -35,9 +35,10 @@ pub(crate) async fn run(args: Args) -> anyhow::Result<()> { .await; if stats.len() > 0 { for row in &stats { - let (l1_batch_number, statistics) = row; - - pretty_print_job_status(l1_batch_number, statistics, args.verbose) + let ((l1_batch_number, aggregation_round), statistics) = row; + if *aggregation_round == 0 { + pretty_print_job_status(l1_batch_number, statistics, args.verbose) + } } } else { println!("No batches found.") diff --git a/prover/prover_dal/.sqlx/query-8f9c609a78ca4f854c0b24621ee8ad95ba0b3b5b7b3a81592a4720bc094c32f9.json b/prover/prover_dal/.sqlx/query-676020e89f0833cc92be1c1114a076096de57cdba25831f86c1428081ca0a14f.json similarity index 50% rename from prover/prover_dal/.sqlx/query-8f9c609a78ca4f854c0b24621ee8ad95ba0b3b5b7b3a81592a4720bc094c32f9.json rename to prover/prover_dal/.sqlx/query-676020e89f0833cc92be1c1114a076096de57cdba25831f86c1428081ca0a14f.json index b65a594a8ed2..b578881deeb1 100644 --- a/prover/prover_dal/.sqlx/query-8f9c609a78ca4f854c0b24621ee8ad95ba0b3b5b7b3a81592a4720bc094c32f9.json +++ b/prover/prover_dal/.sqlx/query-676020e89f0833cc92be1c1114a076096de57cdba25831f86c1428081ca0a14f.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n COUNT(*) AS \"count!\",\n l1_batch_number AS \"l1_batch_number!\",\n status AS \"status!\"\n FROM\n prover_jobs_fri\n WHERE\n l1_batch_number = ANY ($1)\n GROUP BY\n l1_batch_number,\n status\n ", + "query": "\n SELECT\n COUNT(*) AS \"count!\",\n l1_batch_number AS \"l1_batch_number!\",\n aggregation_round AS \"aggregation_round!\",\n status AS \"status!\"\n FROM\n prover_jobs_fri\n WHERE\n l1_batch_number = ANY ($1)\n GROUP BY\n l1_batch_number,\n aggregation_round,\n status\n ", "describe": { "columns": [ { @@ -15,6 +15,11 @@ }, { "ordinal": 2, + "name": "aggregation_round!", + "type_info": "Int2" + }, + { + "ordinal": 3, "name": "status!", "type_info": "Text" } @@ -27,8 +32,9 @@ "nullable": [ null, false, + false, false ] }, - "hash": "8f9c609a78ca4f854c0b24621ee8ad95ba0b3b5b7b3a81592a4720bc094c32f9" + "hash": "676020e89f0833cc92be1c1114a076096de57cdba25831f86c1428081ca0a14f" } diff --git a/prover/prover_dal/src/fri_prover_dal.rs b/prover/prover_dal/src/fri_prover_dal.rs index ab4f3e131753..88b4e7f9def5 100644 --- a/prover/prover_dal/src/fri_prover_dal.rs +++ b/prover/prover_dal/src/fri_prover_dal.rs @@ -598,13 +598,14 @@ impl FriProverDal<'_, '_> { pub async fn get_prover_jobs_stats_for_batch( &mut self, l1_batches_numbers: Vec, - ) -> HashMap { + ) -> HashMap<(L1BatchNumber, u8), JobCountStatistics> { { sqlx::query!( r#" SELECT COUNT(*) AS "count!", l1_batch_number AS "l1_batch_number!", + aggregation_round AS "aggregation_round!", status AS "status!" FROM prover_jobs_fri @@ -612,6 +613,7 @@ impl FriProverDal<'_, '_> { l1_batch_number = ANY ($1) GROUP BY l1_batch_number, + aggregation_round, status "#, &l1_batches_numbers @@ -623,18 +625,28 @@ impl FriProverDal<'_, '_> { .await .unwrap() .into_iter() - .map(|row| (row.l1_batch_number, row.status, row.count as usize)) + .map(|row| { + ( + row.l1_batch_number, + row.aggregation_round, + row.status, + row.count as usize, + ) + }) .fold( HashMap::new(), - |mut acc, (l1_batch_number, status, value)| { - let stats = acc.entry(L1BatchNumber(l1_batch_number as u32)).or_insert( - JobCountStatistics { + |mut acc, (l1_batch_number, aggregation_round, status, value)| { + let stats = acc + .entry(( + L1BatchNumber(l1_batch_number as u32), + aggregation_round as u8, + )) + .or_insert(JobCountStatistics { queued: 0, in_progress: 0, failed: 0, successful: 0, - }, - ); + }); match status.as_ref() { "queued" => stats.queued = value, "in_progress" => stats.in_progress = value, From 1c92e9578bf800edf6db2f5d3a19a4de9df6f727 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Wed, 24 Apr 2024 15:34:45 -0300 Subject: [PATCH 25/65] change query return type --- prover/prover_dal/src/fri_prover_dal.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/prover/prover_dal/src/fri_prover_dal.rs b/prover/prover_dal/src/fri_prover_dal.rs index 88b4e7f9def5..0639d33e144a 100644 --- a/prover/prover_dal/src/fri_prover_dal.rs +++ b/prover/prover_dal/src/fri_prover_dal.rs @@ -598,7 +598,7 @@ impl FriProverDal<'_, '_> { pub async fn get_prover_jobs_stats_for_batch( &mut self, l1_batches_numbers: Vec, - ) -> HashMap<(L1BatchNumber, u8), JobCountStatistics> { + ) -> HashMap<(L1BatchNumber, AggregationRound), JobCountStatistics> { { sqlx::query!( r#" @@ -639,7 +639,7 @@ impl FriProverDal<'_, '_> { let stats = acc .entry(( L1BatchNumber(l1_batch_number as u32), - aggregation_round as u8, + AggregationRound::from(aggregation_round as u8), )) .or_insert(JobCountStatistics { queued: 0, From 05e163646639b859a74fba69480b5ab1cf8fe7e7 Mon Sep 17 00:00:00 2001 From: Ivan Litteri Date: Wed, 24 Apr 2024 15:37:46 -0300 Subject: [PATCH 26/65] Update Cargo files --- prover/Cargo.lock | 1 + prover/prover_cli/Cargo.toml | 5 +++-- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/prover/Cargo.lock b/prover/Cargo.lock index 031791106d83..e690f7ddf1fe 100644 --- a/prover/Cargo.lock +++ b/prover/Cargo.lock @@ -4363,6 +4363,7 @@ dependencies = [ "hex", "log", "prover_dal", + "strum", "tokio", "tracing", "tracing-subscriber", diff --git a/prover/prover_cli/Cargo.toml b/prover/prover_cli/Cargo.toml index b54bd0a0d448..a09d012f5b4a 100644 --- a/prover/prover_cli/Cargo.toml +++ b/prover/prover_cli/Cargo.toml @@ -13,12 +13,12 @@ categories.workspace = true tokio = { version = "1", features = ["rt-multi-thread", "macros"] } env_logger = "0.10" log = "0.4" +colored = "2.1.0" clap = { workspace = true, features = ["derive"] } tracing.workspace = true tracing-subscriber = { workspace = true, features = ["env-filter"] } bincode.workspace = true -colored.workspace = true hex.workspace = true anyhow.workspace = true zksync_config.workspace = true @@ -28,4 +28,5 @@ zksync_basic_types.workspace = true zksync_types.workspace = true zksync_prover_fri_types.workspace = true zksync_prover_interface.workspace = true -prover_dal.workspace = true \ No newline at end of file +prover_dal.workspace = true +strum.workspace = true From 58d47ad72c56d87e37d83c0cf0891720437f2baf Mon Sep 17 00:00:00 2001 From: Ivan Litteri Date: Wed, 24 Apr 2024 15:38:58 -0300 Subject: [PATCH 27/65] Setle status cmd baseline --- .../prover_cli/src/commands/status/batch.rs | 74 +++++++------------ 1 file changed, 26 insertions(+), 48 deletions(-) diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/prover_cli/src/commands/status/batch.rs index a6fe4e95a0f7..cd3a97f420b7 100644 --- a/prover/prover_cli/src/commands/status/batch.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -1,68 +1,46 @@ +use super::utils::BatchData; +use crate::commands::status::utils::postgres_config; use anyhow::{ensure, Context as _}; use clap::Args as ClapArgs; -use prover_dal::{ConnectionPool, Prover, ProverDal}; -use zksync_basic_types::{prover_dal::JobCountStatistics, L1BatchNumber}; -use zksync_config::PostgresConfig; -use zksync_env_config::FromEnv; + +use prover_dal::{ConnectionPool, Prover}; +use zksync_types::L1BatchNumber; #[derive(ClapArgs)] pub struct Args { - #[clap(short, long, num_args = 0..)] - batch: Vec, + #[clap(short = 'n', num_args = 1..)] + batches: Vec, #[clap(short, long, default_value("false"))] verbose: bool, } pub(crate) async fn run(args: Args) -> anyhow::Result<()> { ensure!( - !args.batch.is_empty(), + !args.batches.is_empty(), "At least one batch number should be provided" ); - let postgres_config = PostgresConfig::from_env().context("PostgresConfig::from_env()")?; - let prover_connection_pool = ConnectionPool::::builder( - postgres_config.prover_url()?, - postgres_config.max_connections()?, - ) - .build() - .await - .context("failed to build a prover_connection_pool")?; + let batches_data = get_batches_data(args.batches).await?; - let mut conn = prover_connection_pool.connection().await.unwrap(); - let stats = conn - .fri_prover_jobs_dal() - .get_prover_jobs_stats_for_batch(args.batch) - .await; - if stats.len() > 0 { - for row in &stats { - let ((l1_batch_number, aggregation_round), statistics) = row; - if *aggregation_round == 0 { - pretty_print_job_status(l1_batch_number, statistics, args.verbose) - } - } - } else { - println!("No batches found.") + for batch_data in batches_data { + println!("{batch_data:?}"); } + Ok(()) } -fn pretty_print_job_status( - l1_batch_number: &L1BatchNumber, - statistics: &JobCountStatistics, - verbose: bool, -) { - let total_jobs = - statistics.queued + statistics.in_progress + statistics.failed + statistics.successful; - let progress = (statistics.successful as f32 / total_jobs as f32) * 100.0; - println!("Batch number: {}", l1_batch_number); - println!( - "Progress: {:.2}% ({}/{})", - progress, statistics.successful, total_jobs - ); - if verbose { - println!("In progress: {}", statistics.in_progress); - println!("Queued: {}", statistics.in_progress); - println!("Successful: {}", statistics.in_progress); - } - println!("Failed: {}", statistics.failed); +async fn get_batches_data(_batches: Vec) -> anyhow::Result> { + let config = postgres_config()?; + + let prover_connection_pool = + ConnectionPool::::builder(config.prover_url()?, config.max_connections()?) + .build() + .await + .context("failed to build a prover_connection_pool")?; + + let _conn = prover_connection_pool.connection().await.unwrap(); + + // Queries here... + + Ok(vec![BatchData::default()]) } From f7787597afb90b5b0fc4c491c507eedfd4d95d5a Mon Sep 17 00:00:00 2001 From: Ivan Litteri Date: Wed, 24 Apr 2024 15:39:43 -0300 Subject: [PATCH 28/65] Add status cmd utils --- prover/prover_cli/src/commands/status/mod.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/prover/prover_cli/src/commands/status/mod.rs b/prover/prover_cli/src/commands/status/mod.rs index 431b1060d60d..142f9e1feea0 100644 --- a/prover/prover_cli/src/commands/status/mod.rs +++ b/prover/prover_cli/src/commands/status/mod.rs @@ -1,6 +1,7 @@ use clap::Subcommand; pub(crate) mod batch; +mod utils; #[derive(Subcommand)] pub enum StatusCommand { From e945df2a0ec50aebc8a35144760a2325ce15716d Mon Sep 17 00:00:00 2001 From: Ivan Litteri Date: Wed, 24 Apr 2024 16:41:46 -0300 Subject: [PATCH 29/65] Add status utils --- .../prover_cli/src/commands/status/utils.rs | 137 ++++++++++++++++++ 1 file changed, 137 insertions(+) create mode 100644 prover/prover_cli/src/commands/status/utils.rs diff --git a/prover/prover_cli/src/commands/status/utils.rs b/prover/prover_cli/src/commands/status/utils.rs new file mode 100644 index 000000000000..e499489620ca --- /dev/null +++ b/prover/prover_cli/src/commands/status/utils.rs @@ -0,0 +1,137 @@ +use std::{collections::HashMap, fmt::Debug}; + +use colored::*; +use strum::{Display, EnumString}; +use zksync_basic_types::{basic_fri_types::AggregationRound, prover_dal::JobCountStatistics}; +use zksync_config::PostgresConfig; +use zksync_env_config::FromEnv; +use zksync_types::L1BatchNumber; + +pub fn postgres_config() -> anyhow::Result { + Ok(PostgresConfig::from_env()?) +} + +pub struct BatchData { + pub batch_number: L1BatchNumber, + pub basic_witness_generator: Task, + pub leaf_witness_generator: Task, + pub node_witness_generator: Task, + pub recursion_tip: Task, + pub scheduler: Task, + pub compressor: Task, +} + +impl Debug for BatchData { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + writeln!( + f, + "== {} ==", + format!("Batch {} Status", self.batch_number).bold() + )?; + writeln!(f)?; + writeln!(f, "= {} =", format!("Proving Stages").bold())?; + writeln!(f, "{:?}", self.basic_witness_generator)?; + writeln!(f, "{:?}", self.leaf_witness_generator)?; + writeln!(f, "{:?}", self.node_witness_generator)?; + writeln!(f, "{:?}", self.recursion_tip)?; + writeln!(f, "{:?}", self.scheduler)?; + writeln!(f, "{:?}", self.compressor) + } +} + +impl Default for BatchData { + fn default() -> Self { + BatchData { + batch_number: L1BatchNumber::default(), + basic_witness_generator: Task::BasicWitnessGenerator(TaskStatus::Stuck), + leaf_witness_generator: Task::LeafWitnessGenerator { + status: TaskStatus::WaitingForProofs, + aggregation_round_0_prover_jobs_data: ProverJobsData::default(), + }, + node_witness_generator: Task::NodeWitnessGenerator { + status: TaskStatus::WaitingForProofs, + aggregation_round_1_prover_jobs_data: ProverJobsData::default(), + }, + recursion_tip: Task::RecursionTip { + status: TaskStatus::WaitingForProofs, + aggregation_round_2_prover_jobs_data: ProverJobsData::default(), + }, + scheduler: Task::Scheduler(TaskStatus::WaitingForProofs), + compressor: Task::Compressor(TaskStatus::WaitingForProofs), + } + } +} + +#[derive(Debug, EnumString, Clone, Display)] +pub enum TaskStatus { + /// A task is considered queued when all of its jobs is queued. + #[strum(to_string = "Queued 📥")] + Queued, + /// A task is considered in progress when at least one of its jobs differs in its status. + #[strum(to_string = "In Progress ⌛️")] + InProgress, + /// A task is considered successful when all of its jobs were processed successfully. + #[strum(to_string = "Successful ✅")] + Successful, + /// A task is considered waiting for proofs when all of its jobs are waiting for proofs. + #[strum(to_string = "Waiting for Proof ⏱️")] + WaitingForProofs, + /// A task is considered stuck when at least one of its jobs is stuck. + #[strum(to_string = "Stuck 🛑")] + Stuck, +} + +impl Default for TaskStatus { + fn default() -> Self { + TaskStatus::Queued + } +} + +impl Copy for TaskStatus {} + +type ProverJobsData = HashMap<(L1BatchNumber, AggregationRound), JobCountStatistics>; + +#[derive(EnumString, Clone, Display)] +pub enum Task { + #[strum(to_string = "Basic Witness Generator")] + BasicWitnessGenerator(TaskStatus), + #[strum(to_string = "Leaf Witness Generator")] + LeafWitnessGenerator { + status: TaskStatus, + aggregation_round_0_prover_jobs_data: ProverJobsData, + }, + #[strum(to_string = "Node Witness Generator")] + NodeWitnessGenerator { + status: TaskStatus, + aggregation_round_1_prover_jobs_data: ProverJobsData, + }, + #[strum(to_string = "Recursion Tip")] + RecursionTip { + status: TaskStatus, + aggregation_round_2_prover_jobs_data: ProverJobsData, + }, + #[strum(to_string = "Scheduler")] + Scheduler(TaskStatus), + #[strum(to_string = "Compressor")] + Compressor(TaskStatus), +} + +impl Task { + fn status(&self) -> TaskStatus { + match self { + Task::BasicWitnessGenerator(status) + | Task::LeafWitnessGenerator { status, .. } + | Task::NodeWitnessGenerator { status, .. } + | Task::RecursionTip { status, .. } + | Task::Scheduler(status) + | Task::Compressor(status) => *status, + } + } +} + +impl Debug for Task { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + writeln!(f, "-- {} --", self.to_string().bold())?; + writeln!(f, "> {}", self.status().to_string()) + } +} From 65853781a9c9195fcf78a5ed61b897fac7424cb0 Mon Sep 17 00:00:00 2001 From: Ivan Litteri Date: Wed, 24 Apr 2024 16:42:04 -0300 Subject: [PATCH 30/65] zk fmt --- prover/prover_cli/src/commands/status/batch.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/prover_cli/src/commands/status/batch.rs index cd3a97f420b7..30528d35d0ef 100644 --- a/prover/prover_cli/src/commands/status/batch.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -1,11 +1,11 @@ -use super::utils::BatchData; -use crate::commands::status::utils::postgres_config; use anyhow::{ensure, Context as _}; use clap::Args as ClapArgs; - use prover_dal::{ConnectionPool, Prover}; use zksync_types::L1BatchNumber; +use super::utils::BatchData; +use crate::commands::status::utils::postgres_config; + #[derive(ClapArgs)] pub struct Args { #[clap(short = 'n', num_args = 1..)] From 47df898ade3b1957958eaea44719a77b60e467ea Mon Sep 17 00:00:00 2001 From: Ivan Litteri Date: Wed, 24 Apr 2024 17:18:06 -0300 Subject: [PATCH 31/65] Document enums --- prover/prover_cli/src/commands/status/utils.rs | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/prover/prover_cli/src/commands/status/utils.rs b/prover/prover_cli/src/commands/status/utils.rs index e499489620ca..68e191e9c2b6 100644 --- a/prover/prover_cli/src/commands/status/utils.rs +++ b/prover/prover_cli/src/commands/status/utils.rs @@ -11,13 +11,21 @@ pub fn postgres_config() -> anyhow::Result { Ok(PostgresConfig::from_env()?) } +/// Represents the proving data of a batch. pub struct BatchData { + /// The number of the batch. pub batch_number: L1BatchNumber, + /// The basic witness generator data. pub basic_witness_generator: Task, + /// The leaf witness generator data. pub leaf_witness_generator: Task, + /// The node witness generator data. pub node_witness_generator: Task, + /// The recursion tip data. pub recursion_tip: Task, + /// The scheduler data. pub scheduler: Task, + /// The compressor data. pub compressor: Task, } @@ -93,25 +101,31 @@ type ProverJobsData = HashMap<(L1BatchNumber, AggregationRound), JobCountStatist #[derive(EnumString, Clone, Display)] pub enum Task { + /// Represents the basic witness generator task and its status. #[strum(to_string = "Basic Witness Generator")] BasicWitnessGenerator(TaskStatus), + /// Represents the leaf witness generator task, its status and the aggregation round 0 prover jobs data. #[strum(to_string = "Leaf Witness Generator")] LeafWitnessGenerator { status: TaskStatus, aggregation_round_0_prover_jobs_data: ProverJobsData, }, + /// Represents the node witness generator task, its status and the aggregation round 1 prover jobs data. #[strum(to_string = "Node Witness Generator")] NodeWitnessGenerator { status: TaskStatus, aggregation_round_1_prover_jobs_data: ProverJobsData, }, + /// Represents the recursion tip task, its status and the aggregation round 2 prover jobs data. #[strum(to_string = "Recursion Tip")] RecursionTip { status: TaskStatus, aggregation_round_2_prover_jobs_data: ProverJobsData, }, + /// Represents the scheduler task and its status. #[strum(to_string = "Scheduler")] Scheduler(TaskStatus), + /// Represents the compressor task and its status. #[strum(to_string = "Compressor")] Compressor(TaskStatus), } From ceae28985e36a09782685e8bf57fa6838bbc1864 Mon Sep 17 00:00:00 2001 From: Ivan Litteri Date: Wed, 24 Apr 2024 20:26:47 -0300 Subject: [PATCH 32/65] Add BatchDataBuilder struct --- .../prover_cli/src/commands/status/utils.rs | 61 +++++++++++++++++++ 1 file changed, 61 insertions(+) diff --git a/prover/prover_cli/src/commands/status/utils.rs b/prover/prover_cli/src/commands/status/utils.rs index 68e191e9c2b6..a88d3206e7c5 100644 --- a/prover/prover_cli/src/commands/status/utils.rs +++ b/prover/prover_cli/src/commands/status/utils.rs @@ -11,6 +11,67 @@ pub fn postgres_config() -> anyhow::Result { Ok(PostgresConfig::from_env()?) } +pub struct BatchDataBuilder { + batch_number: L1BatchNumber, + basic_witness_generator: Task, + leaf_witness_generator: Task, + node_witness_generator: Task, + recursion_tip: Task, + scheduler: Task, + compressor: Task, +} + +impl BatchDataBuilder { + pub fn new(batch_number: L1BatchNumber) -> Self { + BatchDataBuilder { + batch_number, + ..Default::default() + } + } + + pub fn basic_witness_generator(mut self, task: Task) -> Self { + self.basic_witness_generator = task; + self + } + + pub fn leaf_witness_generator(mut self, task: Task) -> Self { + self.leaf_witness_generator = task; + self + } + + pub fn node_witness_generator(mut self, task: Task) -> Self { + self.node_witness_generator = task; + self + } + + pub fn recursion_tip(mut self, task: Task) -> Self { + self.recursion_tip = task; + self + } + + pub fn scheduler(mut self, task: Task) -> Self { + self.scheduler = task; + self + } + + pub fn compressor(mut self, task: Task) -> Self { + self.compressor = task; + self + } + + pub fn build(self) -> BatchData { + BatchData { + batch_number: self.batch_number, + basic_witness_generator: self.basic_witness_generator, + leaf_witness_generator: self.leaf_witness_generator, + node_witness_generator: self.node_witness_generator, + recursion_tip: self.recursion_tip, + scheduler: self.scheduler, + compressor: self.compressor, + } + } +} + /// Represents the proving data of a batch. pub struct BatchData { /// The number of the batch. From f6d7b9acd8e4c0ff80b89e40d187c80a577972e1 Mon Sep 17 00:00:00 2001 From: Ivan Litteri Date: Wed, 24 Apr 2024 20:27:14 -0300 Subject: [PATCH 33/65] Add TaskStatus::Custom enum variant --- prover/prover_cli/src/commands/status/utils.rs | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/prover/prover_cli/src/commands/status/utils.rs b/prover/prover_cli/src/commands/status/utils.rs index a88d3206e7c5..1fba80113ceb 100644 --- a/prover/prover_cli/src/commands/status/utils.rs +++ b/prover/prover_cli/src/commands/status/utils.rs @@ -133,6 +133,10 @@ impl Default for BatchData { #[derive(Debug, EnumString, Clone, Display)] pub enum TaskStatus { + /// A custom status that can be set manually. + /// Mostly used when a task has singular status. + #[strum(to_string = "{0}")] + Custom(String), /// A task is considered queued when all of its jobs is queued. #[strum(to_string = "Queued 📥")] Queued, @@ -152,7 +156,7 @@ pub enum TaskStatus { impl Default for TaskStatus { fn default() -> Self { - TaskStatus::Queued + TaskStatus::WaitingForProofs } } From 8e16d3506e8d20fa67806c9bc5a2b9139a679c0f Mon Sep 17 00:00:00 2001 From: Ivan Litteri Date: Wed, 24 Apr 2024 20:42:30 -0300 Subject: [PATCH 34/65] Fix BatchDataBuilder --- .../prover_cli/src/commands/status/utils.rs | 90 ++++++++++++++++--- 1 file changed, 76 insertions(+), 14 deletions(-) diff --git a/prover/prover_cli/src/commands/status/utils.rs b/prover/prover_cli/src/commands/status/utils.rs index 1fba80113ceb..171ad93800dd 100644 --- a/prover/prover_cli/src/commands/status/utils.rs +++ b/prover/prover_cli/src/commands/status/utils.rs @@ -1,6 +1,8 @@ use std::{collections::HashMap, fmt::Debug}; +use anyhow::ensure; use colored::*; +use prover_dal::fri_proof_compressor_dal::ProofCompressionJobStatus; use strum::{Display, EnumString}; use zksync_basic_types::{basic_fri_types::AggregationRound, prover_dal::JobCountStatistics}; use zksync_config::PostgresConfig; @@ -29,34 +31,58 @@ impl BatchDataBuilder { } } - pub fn basic_witness_generator(mut self, task: Task) -> Self { + pub fn basic_witness_generator(mut self, task: Task) -> anyhow::Result { + ensure!( + matches!(task, Task::BasicWitnessGenerator(_)), + "Task should be a basic witness generator" + ); self.basic_witness_generator = task; - self + Ok(self) } - pub fn leaf_witness_generator(mut self, task: Task) -> Self { + pub fn leaf_witness_generator(mut self, task: Task) -> anyhow::Result { + ensure!( + matches!(task, Task::LeafWitnessGenerator { .. }), + "Task should be a leaf witness generator" + ); self.leaf_witness_generator = task; - self + Ok(self) } - pub fn node_witness_generator(mut self, task: Task) -> Self { + pub fn node_witness_generator(mut self, task: Task) -> anyhow::Result { + ensure!( + matches!(task, Task::NodeWitnessGenerator { .. }), + "Task should be a node witness generator" + ); self.node_witness_generator = task; - self + Ok(self) } - pub fn recursion_tip(mut self, task: Task) -> Self { + pub fn recursion_tip(mut self, task: Task) -> anyhow::Result { + ensure!( + matches!(task, Task::RecursionTip { .. }), + "Task should be a recursion tip" + ); self.recursion_tip = task; - self + Ok(self) } - pub fn scheduler(mut self, task: Task) -> Self { + pub fn scheduler(mut self, task: Task) -> anyhow::Result { + ensure!( + matches!(task, Task::Scheduler(_)), + "Task should be a scheduler" + ); self.scheduler = task; - self + Ok(self) } - pub fn compressor(mut self, task: Task) -> Self { + pub fn compressor(mut self, task: Task) -> anyhow::Result { + ensure!( + matches!(task, Task::Compressor(_)), + "Task should be a compressor" + ); self.compressor = task; - self + Ok(self) } pub fn build(self) -> BatchData { @@ -72,6 +98,29 @@ impl BatchDataBuilder { } } +impl Default for BatchDataBuilder { + fn default() -> Self { + BatchDataBuilder { + batch_number: L1BatchNumber::default(), + basic_witness_generator: Task::BasicWitnessGenerator(TaskStatus::Stuck), + leaf_witness_generator: Task::LeafWitnessGenerator { + status: TaskStatus::WaitingForProofs, + aggregation_round_0_prover_jobs_data: ProverJobsData::default(), + }, + node_witness_generator: Task::NodeWitnessGenerator { + status: TaskStatus::WaitingForProofs, + aggregation_round_1_prover_jobs_data: ProverJobsData::default(), + }, + recursion_tip: Task::RecursionTip { + status: TaskStatus::WaitingForProofs, + aggregation_round_2_prover_jobs_data: ProverJobsData::default(), + }, + scheduler: Task::Scheduler(TaskStatus::WaitingForProofs), + compressor: Task::Compressor(TaskStatus::WaitingForProofs), + } + } +} + /// Represents the proving data of a batch. pub struct BatchData { /// The number of the batch. @@ -160,7 +209,20 @@ impl Default for TaskStatus { } } -impl Copy for TaskStatus {} +impl From for TaskStatus { + fn from(status: ProofCompressionJobStatus) -> Self { + match status { + ProofCompressionJobStatus::Queued => TaskStatus::Queued, + ProofCompressionJobStatus::InProgress => TaskStatus::InProgress, + ProofCompressionJobStatus::Successful => TaskStatus::Successful, + ProofCompressionJobStatus::Failed => TaskStatus::InProgress, + ProofCompressionJobStatus::SentToServer => { + TaskStatus::Custom("Sent to server 📤".to_owned()) + } + ProofCompressionJobStatus::Skipped => TaskStatus::Custom("Skipped ⏩".to_owned()), + } + } +} type ProverJobsData = HashMap<(L1BatchNumber, AggregationRound), JobCountStatistics>; @@ -203,7 +265,7 @@ impl Task { | Task::NodeWitnessGenerator { status, .. } | Task::RecursionTip { status, .. } | Task::Scheduler(status) - | Task::Compressor(status) => *status, + | Task::Compressor(status) => status.clone(), } } } From a76cb32e79c0f692dcbb8d4076522d589db5dafe Mon Sep 17 00:00:00 2001 From: Ivan Litteri Date: Wed, 24 Apr 2024 20:44:08 -0300 Subject: [PATCH 35/65] Rollback --- prover/prover_cli/src/commands/status/utils.rs | 16 ---------------- 1 file changed, 16 deletions(-) diff --git a/prover/prover_cli/src/commands/status/utils.rs b/prover/prover_cli/src/commands/status/utils.rs index 171ad93800dd..2ec63dc8f942 100644 --- a/prover/prover_cli/src/commands/status/utils.rs +++ b/prover/prover_cli/src/commands/status/utils.rs @@ -2,7 +2,6 @@ use std::{collections::HashMap, fmt::Debug}; use anyhow::ensure; use colored::*; -use prover_dal::fri_proof_compressor_dal::ProofCompressionJobStatus; use strum::{Display, EnumString}; use zksync_basic_types::{basic_fri_types::AggregationRound, prover_dal::JobCountStatistics}; use zksync_config::PostgresConfig; @@ -209,21 +208,6 @@ impl Default for TaskStatus { } } -impl From for TaskStatus { - fn from(status: ProofCompressionJobStatus) -> Self { - match status { - ProofCompressionJobStatus::Queued => TaskStatus::Queued, - ProofCompressionJobStatus::InProgress => TaskStatus::InProgress, - ProofCompressionJobStatus::Successful => TaskStatus::Successful, - ProofCompressionJobStatus::Failed => TaskStatus::InProgress, - ProofCompressionJobStatus::SentToServer => { - TaskStatus::Custom("Sent to server 📤".to_owned()) - } - ProofCompressionJobStatus::Skipped => TaskStatus::Custom("Skipped ⏩".to_owned()), - } - } -} - type ProverJobsData = HashMap<(L1BatchNumber, AggregationRound), JobCountStatistics>; #[derive(EnumString, Clone, Display)] From c460029dd6e39e4af35160f226b4c3eb72a48de9 Mon Sep 17 00:00:00 2001 From: Ivan Litteri Date: Wed, 24 Apr 2024 20:45:17 -0300 Subject: [PATCH 36/65] impl From for TaskStatus --- prover/prover_cli/src/commands/status/utils.rs | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/prover/prover_cli/src/commands/status/utils.rs b/prover/prover_cli/src/commands/status/utils.rs index 2ec63dc8f942..171ad93800dd 100644 --- a/prover/prover_cli/src/commands/status/utils.rs +++ b/prover/prover_cli/src/commands/status/utils.rs @@ -2,6 +2,7 @@ use std::{collections::HashMap, fmt::Debug}; use anyhow::ensure; use colored::*; +use prover_dal::fri_proof_compressor_dal::ProofCompressionJobStatus; use strum::{Display, EnumString}; use zksync_basic_types::{basic_fri_types::AggregationRound, prover_dal::JobCountStatistics}; use zksync_config::PostgresConfig; @@ -208,6 +209,21 @@ impl Default for TaskStatus { } } +impl From for TaskStatus { + fn from(status: ProofCompressionJobStatus) -> Self { + match status { + ProofCompressionJobStatus::Queued => TaskStatus::Queued, + ProofCompressionJobStatus::InProgress => TaskStatus::InProgress, + ProofCompressionJobStatus::Successful => TaskStatus::Successful, + ProofCompressionJobStatus::Failed => TaskStatus::InProgress, + ProofCompressionJobStatus::SentToServer => { + TaskStatus::Custom("Sent to server 📤".to_owned()) + } + ProofCompressionJobStatus::Skipped => TaskStatus::Custom("Skipped ⏩".to_owned()), + } + } +} + type ProverJobsData = HashMap<(L1BatchNumber, AggregationRound), JobCountStatistics>; #[derive(EnumString, Clone, Display)] From e76da1fa596ad46fdb3290db92d3f69f355f2f06 Mon Sep 17 00:00:00 2001 From: Ivan Litteri Date: Wed, 24 Apr 2024 20:57:19 -0300 Subject: [PATCH 37/65] Fix TaskStatus::Custom fmt --- prover/prover_cli/src/commands/status/utils.rs | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/prover/prover_cli/src/commands/status/utils.rs b/prover/prover_cli/src/commands/status/utils.rs index 171ad93800dd..5367707ee2e1 100644 --- a/prover/prover_cli/src/commands/status/utils.rs +++ b/prover/prover_cli/src/commands/status/utils.rs @@ -184,7 +184,6 @@ impl Default for BatchData { pub enum TaskStatus { /// A custom status that can be set manually. /// Mostly used when a task has singular status. - #[strum(to_string = "{0}")] Custom(String), /// A task is considered queued when all of its jobs is queued. #[strum(to_string = "Queued 📥")] @@ -273,6 +272,10 @@ impl Task { impl Debug for Task { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { writeln!(f, "-- {} --", self.to_string().bold())?; - writeln!(f, "> {}", self.status().to_string()) + if let TaskStatus::Custom(msg) = self.status() { + writeln!(f, "> {msg}") + } else { + writeln!(f, "> {}", self.status().to_string()) + } } } From 4be214eb4a7444b312adca31257d7143764238ee Mon Sep 17 00:00:00 2001 From: Ivan Litteri Date: Wed, 24 Apr 2024 21:04:20 -0300 Subject: [PATCH 38/65] Add query for getting proof compression job info for a batch --- .../src/fri_proof_compressor_dal.rs | 57 ++++++++++++++++++- 1 file changed, 56 insertions(+), 1 deletion(-) diff --git a/prover/prover_dal/src/fri_proof_compressor_dal.rs b/prover/prover_dal/src/fri_proof_compressor_dal.rs index 01231d33b00e..d2910613d87a 100644 --- a/prover/prover_dal/src/fri_proof_compressor_dal.rs +++ b/prover/prover_dal/src/fri_proof_compressor_dal.rs @@ -1,7 +1,10 @@ #![doc = include_str!("../doc/FriProofCompressorDal.md")] use std::{collections::HashMap, str::FromStr, time::Duration}; -use sqlx::Row; +use sqlx::{ + types::chrono::{NaiveDateTime, NaiveTime}, + Row, +}; use strum::{Display, EnumString}; use zksync_basic_types::{ prover_dal::{JobCountStatistics, StuckJobs}, @@ -32,6 +35,20 @@ pub enum ProofCompressionJobStatus { Skipped, } +pub struct ProofCompressionJobInfo { + pub l1_batch_number: L1BatchNumber, + pub attempts: u32, + pub status: ProofCompressionJobStatus, + pub fri_proof_blob_url: Option, + pub l1_proof_blob_url: Option, + pub error: Option, + pub created_at: NaiveDateTime, + pub updated_at: NaiveDateTime, + pub processing_started_at: Option, + pub time_taken: Option, + pub picked_by: Option, +} + impl FriProofCompressorDal<'_, '_> { pub async fn insert_proof_compression_job( &mut self, @@ -328,4 +345,42 @@ impl FriProofCompressorDal<'_, '_> { .collect() } } + + pub async fn get_proof_compression_job_for_batch( + &mut self, + block_number: L1BatchNumber, + ) -> Option { + let row = sqlx::query!( + r#" + SELECT + * + FROM + proof_compression_jobs_fri + WHERE + l1_batch_number = $1 + "#, + i64::from(block_number.0) + ) + .fetch_optional(self.storage.conn()) + .await + .unwrap(); + + if let Some(row) = row { + Some(ProofCompressionJobInfo { + l1_batch_number: block_number, + attempts: row.attempts as u32, + status: ProofCompressionJobStatus::from_str(&row.status).unwrap(), + fri_proof_blob_url: row.fri_proof_blob_url, + l1_proof_blob_url: row.l1_proof_blob_url, + error: row.error, + created_at: row.created_at, + updated_at: row.updated_at, + processing_started_at: row.processing_started_at, + time_taken: row.time_taken, + picked_by: row.picked_by, + }) + } else { + None + } + } } From e9e197f6f0762789255a68f49c1a66b423ea54c7 Mon Sep 17 00:00:00 2001 From: Ivan Litteri Date: Wed, 24 Apr 2024 21:08:12 -0300 Subject: [PATCH 39/65] Add query for getting proof compression job info for a batch --- ...a8ea23052f714cd74c1d28ae1203ce8f0eaa9.json | 82 +++++++++++++++++++ 1 file changed, 82 insertions(+) create mode 100644 prover/prover_dal/.sqlx/query-2ab2f83b273c5aa88c1eefc8f70a8ea23052f714cd74c1d28ae1203ce8f0eaa9.json diff --git a/prover/prover_dal/.sqlx/query-2ab2f83b273c5aa88c1eefc8f70a8ea23052f714cd74c1d28ae1203ce8f0eaa9.json b/prover/prover_dal/.sqlx/query-2ab2f83b273c5aa88c1eefc8f70a8ea23052f714cd74c1d28ae1203ce8f0eaa9.json new file mode 100644 index 000000000000..3441906e0cea --- /dev/null +++ b/prover/prover_dal/.sqlx/query-2ab2f83b273c5aa88c1eefc8f70a8ea23052f714cd74c1d28ae1203ce8f0eaa9.json @@ -0,0 +1,82 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n *\n FROM\n proof_compression_jobs_fri\n WHERE\n l1_batch_number = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "l1_batch_number", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "attempts", + "type_info": "Int2" + }, + { + "ordinal": 2, + "name": "status", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "fri_proof_blob_url", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "l1_proof_blob_url", + "type_info": "Text" + }, + { + "ordinal": 5, + "name": "error", + "type_info": "Text" + }, + { + "ordinal": 6, + "name": "created_at", + "type_info": "Timestamp" + }, + { + "ordinal": 7, + "name": "updated_at", + "type_info": "Timestamp" + }, + { + "ordinal": 8, + "name": "processing_started_at", + "type_info": "Timestamp" + }, + { + "ordinal": 9, + "name": "time_taken", + "type_info": "Time" + }, + { + "ordinal": 10, + "name": "picked_by", + "type_info": "Text" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + false, + false, + false, + true, + true, + true, + false, + false, + true, + true, + true + ] + }, + "hash": "2ab2f83b273c5aa88c1eefc8f70a8ea23052f714cd74c1d28ae1203ce8f0eaa9" +} From 7c9a4999419bf416a7caafbf42ee44340c79f5b0 Mon Sep 17 00:00:00 2001 From: Ivan Litteri Date: Wed, 24 Apr 2024 21:08:41 -0300 Subject: [PATCH 40/65] Handle proof compression job status for batches --- .../prover_cli/src/commands/status/batch.rs | 34 +++++++++++++++---- 1 file changed, 28 insertions(+), 6 deletions(-) diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/prover_cli/src/commands/status/batch.rs index 30528d35d0ef..8a8113cefee8 100644 --- a/prover/prover_cli/src/commands/status/batch.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -1,9 +1,11 @@ use anyhow::{ensure, Context as _}; use clap::Args as ClapArgs; -use prover_dal::{ConnectionPool, Prover}; +use prover_dal::{ + fri_proof_compressor_dal::ProofCompressionJobStatus, ConnectionPool, Prover, ProverDal, +}; use zksync_types::L1BatchNumber; -use super::utils::BatchData; +use super::utils::{BatchData, BatchDataBuilder, Task, TaskStatus}; use crate::commands::status::utils::postgres_config; #[derive(ClapArgs)] @@ -29,7 +31,7 @@ pub(crate) async fn run(args: Args) -> anyhow::Result<()> { Ok(()) } -async fn get_batches_data(_batches: Vec) -> anyhow::Result> { +async fn get_batches_data(batches: Vec) -> anyhow::Result> { let config = postgres_config()?; let prover_connection_pool = @@ -38,9 +40,29 @@ async fn get_batches_data(_batches: Vec) -> anyhow::Result( + batch_number: L1BatchNumber, + conn: ConnectionPool<'a, Prover>, +) -> anyhow::Result { + conn.fri_proof_compressor_dal() + .get_proof_compression_job_for_batch(L1BatchNumber(0)) + .await + .map(|job| TaskStatus::from(job.status)) + .unwrap_or(TaskStatus::Custom("Compressor job not found 🚫".to_owned())) } From 17b587555ade6173e8e9fd2176a747227637fe54 Mon Sep 17 00:00:00 2001 From: Ivan Litteri Date: Wed, 24 Apr 2024 21:09:45 -0300 Subject: [PATCH 41/65] Fix get_proof_compression_job_status_for_batch --- prover/prover_cli/src/commands/status/batch.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/prover_cli/src/commands/status/batch.rs index 8a8113cefee8..2e8a9e7eb5b1 100644 --- a/prover/prover_cli/src/commands/status/batch.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -61,7 +61,7 @@ async fn get_proof_compression_job_status_for_batch<'a>( conn: ConnectionPool<'a, Prover>, ) -> anyhow::Result { conn.fri_proof_compressor_dal() - .get_proof_compression_job_for_batch(L1BatchNumber(0)) + .get_proof_compression_job_for_batch(batch_number) .await .map(|job| TaskStatus::from(job.status)) .unwrap_or(TaskStatus::Custom("Compressor job not found 🚫".to_owned())) From 3fdc055fb0298c64df30f5f1ddb1c93b173c9414 Mon Sep 17 00:00:00 2001 From: Ivan Litteri Date: Wed, 24 Apr 2024 21:14:59 -0300 Subject: [PATCH 42/65] Remove BatchDataBuilder struct It adds needless complexity to the code --- .../prover_cli/src/commands/status/utils.rs | 109 ------------------ 1 file changed, 109 deletions(-) diff --git a/prover/prover_cli/src/commands/status/utils.rs b/prover/prover_cli/src/commands/status/utils.rs index 2ec63dc8f942..e6edecc15039 100644 --- a/prover/prover_cli/src/commands/status/utils.rs +++ b/prover/prover_cli/src/commands/status/utils.rs @@ -1,6 +1,5 @@ use std::{collections::HashMap, fmt::Debug}; -use anyhow::ensure; use colored::*; use strum::{Display, EnumString}; use zksync_basic_types::{basic_fri_types::AggregationRound, prover_dal::JobCountStatistics}; @@ -12,114 +11,6 @@ pub fn postgres_config() -> anyhow::Result { Ok(PostgresConfig::from_env()?) } -pub struct BatchDataBuilder { - batch_number: L1BatchNumber, - basic_witness_generator: Task, - leaf_witness_generator: Task, - node_witness_generator: Task, - recursion_tip: Task, - scheduler: Task, - compressor: Task, -} - -impl BatchDataBuilder { - pub fn new(batch_number: L1BatchNumber) -> Self { - BatchDataBuilder { - batch_number, - ..Default::default() - } - } - - pub fn basic_witness_generator(mut self, task: Task) -> anyhow::Result { - ensure!( - matches!(task, Task::BasicWitnessGenerator(_)), - "Task should be a basic witness generator" - ); - self.basic_witness_generator = task; - Ok(self) - } - - pub fn leaf_witness_generator(mut self, task: Task) -> anyhow::Result { - ensure!( - matches!(task, Task::LeafWitnessGenerator { .. }), - "Task should be a leaf witness generator" - ); - self.leaf_witness_generator = task; - Ok(self) - } - - pub fn node_witness_generator(mut self, task: Task) -> anyhow::Result { - ensure!( - matches!(task, Task::NodeWitnessGenerator { .. }), - "Task should be a node witness generator" - ); - self.node_witness_generator = task; - Ok(self) - } - - pub fn recursion_tip(mut self, task: Task) -> anyhow::Result { - ensure!( - matches!(task, Task::RecursionTip { .. }), - "Task should be a recursion tip" - ); - self.recursion_tip = task; - Ok(self) - } - - pub fn scheduler(mut self, task: Task) -> anyhow::Result { - ensure!( - matches!(task, Task::Scheduler(_)), - "Task should be a scheduler" - ); - self.scheduler = task; - Ok(self) - } - - pub fn compressor(mut self, task: Task) -> anyhow::Result { - ensure!( - matches!(task, Task::Compressor(_)), - "Task should be a compressor" - ); - self.compressor = task; - Ok(self) - } - - pub fn build(self) -> BatchData { - BatchData { - batch_number: self.batch_number, - basic_witness_generator: self.basic_witness_generator, - leaf_witness_generator: self.leaf_witness_generator, - node_witness_generator: self.node_witness_generator, - recursion_tip: self.recursion_tip, - scheduler: self.scheduler, - compressor: self.compressor, - } - } -} - -impl Default for BatchDataBuilder { - fn default() -> Self { - BatchDataBuilder { - batch_number: L1BatchNumber::default(), - basic_witness_generator: Task::BasicWitnessGenerator(TaskStatus::Stuck), - leaf_witness_generator: Task::LeafWitnessGenerator { - status: TaskStatus::WaitingForProofs, - aggregation_round_0_prover_jobs_data: ProverJobsData::default(), - }, - node_witness_generator: Task::NodeWitnessGenerator { - status: TaskStatus::WaitingForProofs, - aggregation_round_1_prover_jobs_data: ProverJobsData::default(), - }, - recursion_tip: Task::RecursionTip { - status: TaskStatus::WaitingForProofs, - aggregation_round_2_prover_jobs_data: ProverJobsData::default(), - }, - scheduler: Task::Scheduler(TaskStatus::WaitingForProofs), - compressor: Task::Compressor(TaskStatus::WaitingForProofs), - } - } -} - /// Represents the proving data of a batch. pub struct BatchData { /// The number of the batch. From 3093cdeffac92ea70a311d2cb3f1bb64f6b7cb32 Mon Sep 17 00:00:00 2001 From: Ivan Litteri Date: Thu, 25 Apr 2024 10:28:15 -0300 Subject: [PATCH 43/65] Fix --- prover/prover_cli/src/commands/status/batch.rs | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/prover_cli/src/commands/status/batch.rs index 2e8a9e7eb5b1..57576225b55d 100644 --- a/prover/prover_cli/src/commands/status/batch.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -1,11 +1,9 @@ use anyhow::{ensure, Context as _}; use clap::Args as ClapArgs; -use prover_dal::{ - fri_proof_compressor_dal::ProofCompressionJobStatus, ConnectionPool, Prover, ProverDal, -}; +use prover_dal::{Connection, ConnectionPool, Prover, ProverDal}; use zksync_types::L1BatchNumber; -use super::utils::{BatchData, BatchDataBuilder, Task, TaskStatus}; +use super::utils::{BatchData, Task, TaskStatus}; use crate::commands::status::utils::postgres_config; #[derive(ClapArgs)] @@ -46,7 +44,7 @@ async fn get_batches_data(batches: Vec) -> anyhow::Result) -> anyhow::Result( batch_number: L1BatchNumber, - conn: ConnectionPool<'a, Prover>, -) -> anyhow::Result { + conn: &mut Connection<'a, Prover>, +) -> TaskStatus { conn.fri_proof_compressor_dal() .get_proof_compression_job_for_batch(batch_number) .await From fde3be1a1f8ab90019d222b984769afad3e82222 Mon Sep 17 00:00:00 2001 From: ilitteri Date: Thu, 25 Apr 2024 10:58:03 -0300 Subject: [PATCH 44/65] Move prover_dal types to basic_types::prover_dal module --- core/lib/basic_types/src/prover_dal.rs | 33 ++++++++++++++- .../prover_cli/src/commands/status/utils.rs | 3 +- .../src/fri_proof_compressor_dal.rs | 40 ++----------------- 3 files changed, 37 insertions(+), 39 deletions(-) diff --git a/core/lib/basic_types/src/prover_dal.rs b/core/lib/basic_types/src/prover_dal.rs index 41ab439a15fc..5c06e6876574 100644 --- a/core/lib/basic_types/src/prover_dal.rs +++ b/core/lib/basic_types/src/prover_dal.rs @@ -1,7 +1,8 @@ //! Types exposed by the prover DAL for general-purpose use. use std::{net::IpAddr, ops::Add, str::FromStr}; -use chrono::{DateTime, Duration, Utc}; +use chrono::{DateTime, Duration, NaiveDateTime, NaiveTime, Utc}; +use strum::{Display, EnumString}; use crate::{basic_fri_types::AggregationRound, L1BatchNumber}; @@ -229,3 +230,33 @@ impl FromStr for GpuProverInstanceStatus { } } } + +#[derive(Debug, EnumString, Display)] +pub enum ProofCompressionJobStatus { + #[strum(serialize = "queued")] + Queued, + #[strum(serialize = "in_progress")] + InProgress, + #[strum(serialize = "successful")] + Successful, + #[strum(serialize = "failed")] + Failed, + #[strum(serialize = "sent_to_server")] + SentToServer, + #[strum(serialize = "skipped")] + Skipped, +} + +pub struct ProofCompressionJobInfo { + pub l1_batch_number: L1BatchNumber, + pub attempts: u32, + pub status: ProofCompressionJobStatus, + pub fri_proof_blob_url: Option, + pub l1_proof_blob_url: Option, + pub error: Option, + pub created_at: NaiveDateTime, + pub updated_at: NaiveDateTime, + pub processing_started_at: Option, + pub time_taken: Option, + pub picked_by: Option, +} diff --git a/prover/prover_cli/src/commands/status/utils.rs b/prover/prover_cli/src/commands/status/utils.rs index 8704f64c1e07..e844098be567 100644 --- a/prover/prover_cli/src/commands/status/utils.rs +++ b/prover/prover_cli/src/commands/status/utils.rs @@ -1,12 +1,11 @@ use std::{collections::HashMap, fmt::Debug}; use colored::*; -use prover_dal::fri_proof_compressor_dal::ProofCompressionJobStatus; use strum::{Display, EnumString}; use zksync_basic_types::{basic_fri_types::AggregationRound, prover_dal::JobCountStatistics}; use zksync_config::PostgresConfig; use zksync_env_config::FromEnv; -use zksync_types::L1BatchNumber; +use zksync_types::{prover_dal::ProofCompressionJobStatus, L1BatchNumber}; pub fn postgres_config() -> anyhow::Result { Ok(PostgresConfig::from_env()?) diff --git a/prover/prover_dal/src/fri_proof_compressor_dal.rs b/prover/prover_dal/src/fri_proof_compressor_dal.rs index d2910613d87a..c32e5b0716ad 100644 --- a/prover/prover_dal/src/fri_proof_compressor_dal.rs +++ b/prover/prover_dal/src/fri_proof_compressor_dal.rs @@ -1,13 +1,11 @@ #![doc = include_str!("../doc/FriProofCompressorDal.md")] use std::{collections::HashMap, str::FromStr, time::Duration}; -use sqlx::{ - types::chrono::{NaiveDateTime, NaiveTime}, - Row, -}; -use strum::{Display, EnumString}; +use sqlx::Row; use zksync_basic_types::{ - prover_dal::{JobCountStatistics, StuckJobs}, + prover_dal::{ + JobCountStatistics, ProofCompressionJobInfo, ProofCompressionJobStatus, StuckJobs, + }, L1BatchNumber, }; use zksync_db_connection::connection::Connection; @@ -19,36 +17,6 @@ pub struct FriProofCompressorDal<'a, 'c> { pub(crate) storage: &'a mut Connection<'c, Prover>, } -#[derive(Debug, EnumString, Display)] -pub enum ProofCompressionJobStatus { - #[strum(serialize = "queued")] - Queued, - #[strum(serialize = "in_progress")] - InProgress, - #[strum(serialize = "successful")] - Successful, - #[strum(serialize = "failed")] - Failed, - #[strum(serialize = "sent_to_server")] - SentToServer, - #[strum(serialize = "skipped")] - Skipped, -} - -pub struct ProofCompressionJobInfo { - pub l1_batch_number: L1BatchNumber, - pub attempts: u32, - pub status: ProofCompressionJobStatus, - pub fri_proof_blob_url: Option, - pub l1_proof_blob_url: Option, - pub error: Option, - pub created_at: NaiveDateTime, - pub updated_at: NaiveDateTime, - pub processing_started_at: Option, - pub time_taken: Option, - pub picked_by: Option, -} - impl FriProofCompressorDal<'_, '_> { pub async fn insert_proof_compression_job( &mut self, From efd70acf461760f4abbb355857bff876862c05d0 Mon Sep 17 00:00:00 2001 From: ilitteri Date: Thu, 25 Apr 2024 17:39:47 -0300 Subject: [PATCH 45/65] Refactor query --- .../src/fri_proof_compressor_dal.rs | 35 ++++++++----------- 1 file changed, 15 insertions(+), 20 deletions(-) diff --git a/prover/prover_dal/src/fri_proof_compressor_dal.rs b/prover/prover_dal/src/fri_proof_compressor_dal.rs index c32e5b0716ad..7016fcd64ddd 100644 --- a/prover/prover_dal/src/fri_proof_compressor_dal.rs +++ b/prover/prover_dal/src/fri_proof_compressor_dal.rs @@ -318,7 +318,7 @@ impl FriProofCompressorDal<'_, '_> { &mut self, block_number: L1BatchNumber, ) -> Option { - let row = sqlx::query!( + sqlx::query!( r#" SELECT * @@ -331,24 +331,19 @@ impl FriProofCompressorDal<'_, '_> { ) .fetch_optional(self.storage.conn()) .await - .unwrap(); - - if let Some(row) = row { - Some(ProofCompressionJobInfo { - l1_batch_number: block_number, - attempts: row.attempts as u32, - status: ProofCompressionJobStatus::from_str(&row.status).unwrap(), - fri_proof_blob_url: row.fri_proof_blob_url, - l1_proof_blob_url: row.l1_proof_blob_url, - error: row.error, - created_at: row.created_at, - updated_at: row.updated_at, - processing_started_at: row.processing_started_at, - time_taken: row.time_taken, - picked_by: row.picked_by, - }) - } else { - None - } + .unwrap() + .map(|row| ProofCompressionJobInfo { + l1_batch_number: block_number, + attempts: row.attempts as u32, + status: ProofCompressionJobStatus::from_str(&row.status).unwrap(), + fri_proof_blob_url: row.fri_proof_blob_url, + l1_proof_blob_url: row.l1_proof_blob_url, + error: row.error, + created_at: row.created_at, + updated_at: row.updated_at, + processing_started_at: row.processing_started_at, + time_taken: row.time_taken, + picked_by: row.picked_by, + }) } } From 1830cedeaac954acd95d1ecefdec9354d7d22698 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Thu, 25 Apr 2024 18:56:18 -0300 Subject: [PATCH 46/65] add bwg query --- core/lib/basic_types/src/prover_dal.rs | 22 ++++- .../prover_cli/src/commands/status/batch.rs | 32 ++++++- .../prover_cli/src/commands/status/utils.rs | 18 +++- ...e118cabc67b6e507efefb7b69e102f1b43c58.json | 94 +++++++++++++++++++ .../src/fri_witness_generator_dal.rs | 40 +++++++- 5 files changed, 196 insertions(+), 10 deletions(-) create mode 100644 prover/prover_dal/.sqlx/query-e0a6cc885e437aa7ded9def71f3e118cabc67b6e507efefb7b69e102f1b43c58.json diff --git a/core/lib/basic_types/src/prover_dal.rs b/core/lib/basic_types/src/prover_dal.rs index 41ab439a15fc..abd36ef1f70f 100644 --- a/core/lib/basic_types/src/prover_dal.rs +++ b/core/lib/basic_types/src/prover_dal.rs @@ -1,9 +1,12 @@ //! Types exposed by the prover DAL for general-purpose use. use std::{net::IpAddr, ops::Add, str::FromStr}; -use chrono::{DateTime, Duration, Utc}; +use chrono::{DateTime, Duration, NaiveDateTime, NaiveTime, Utc}; -use crate::{basic_fri_types::AggregationRound, L1BatchNumber}; +use crate::{ + basic_fri_types::{AggregationRound, Eip4844Blobs}, + L1BatchNumber, +}; // This currently lives in `zksync_prover_types` -- we don't want a dependency between prover types (`zkevm_test_harness`) and DAL. // This will be gone as part of 1.5.0, when EIP4844 becomes normal jobs, rather than special cased ones. @@ -229,3 +232,18 @@ impl FromStr for GpuProverInstanceStatus { } } } +pub struct BasicWitnessGeneratorJobInfo { + pub l1_batch_number: L1BatchNumber, + pub merkle_tree_paths_blob_url: Option, + pub attempts: u32, + pub status: WitnessJobStatus, + pub error: Option, + pub created_at: NaiveDateTime, + pub updated_at: NaiveDateTime, + pub processing_started_at: Option, + pub time_taken: Option, + pub is_blob_cleaned: Option, + pub protocol_version: Option, + pub picked_by: Option, + pub eip_4844_blobs: Option, +} diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/prover_cli/src/commands/status/batch.rs index 30528d35d0ef..3fa26dee5d0b 100644 --- a/prover/prover_cli/src/commands/status/batch.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -1,9 +1,9 @@ use anyhow::{ensure, Context as _}; use clap::Args as ClapArgs; -use prover_dal::{ConnectionPool, Prover}; +use prover_dal::{Connection, ConnectionPool, Prover, ProverDal}; use zksync_types::L1BatchNumber; -use super::utils::BatchData; +use super::utils::{BatchData, Task, TaskStatus}; use crate::commands::status::utils::postgres_config; #[derive(ClapArgs)] @@ -29,7 +29,7 @@ pub(crate) async fn run(args: Args) -> anyhow::Result<()> { Ok(()) } -async fn get_batches_data(_batches: Vec) -> anyhow::Result> { +async fn get_batches_data(batches: Vec) -> anyhow::Result> { let config = postgres_config()?; let prover_connection_pool = @@ -38,9 +38,31 @@ async fn get_batches_data(_batches: Vec) -> anyhow::Result( + batch_number: L1BatchNumber, + conn: &mut Connection<'a, Prover>, +) -> TaskStatus { + conn.fri_witness_generator_dal() + .get_basic_witness_generator_job_for_batch(batch_number) + .await + .map(|job| TaskStatus::from(job.status)) + .unwrap_or(TaskStatus::Custom( + "Basic witness generator job not found 🚫".to_owned(), + )) +} diff --git a/prover/prover_cli/src/commands/status/utils.rs b/prover/prover_cli/src/commands/status/utils.rs index e6edecc15039..cdbfc343944d 100644 --- a/prover/prover_cli/src/commands/status/utils.rs +++ b/prover/prover_cli/src/commands/status/utils.rs @@ -5,7 +5,7 @@ use strum::{Display, EnumString}; use zksync_basic_types::{basic_fri_types::AggregationRound, prover_dal::JobCountStatistics}; use zksync_config::PostgresConfig; use zksync_env_config::FromEnv; -use zksync_types::L1BatchNumber; +use zksync_types::{prover_dal::WitnessJobStatus, L1BatchNumber}; pub fn postgres_config() -> anyhow::Result { Ok(PostgresConfig::from_env()?) @@ -151,3 +151,19 @@ impl Debug for Task { writeln!(f, "> {}", self.status().to_string()) } } + +impl From for TaskStatus { + fn from(status: WitnessJobStatus) -> Self { + match status { + WitnessJobStatus::Queued => TaskStatus::Queued, + WitnessJobStatus::InProgress => TaskStatus::InProgress, + WitnessJobStatus::Successful(_) => TaskStatus::Successful, + WitnessJobStatus::Failed(_) => TaskStatus::InProgress, + WitnessJobStatus::WaitingForArtifacts => { + TaskStatus::Custom("Waiting for Artifacts ⏱️".to_owned()) + } + WitnessJobStatus::Skipped => TaskStatus::Custom("Skipped ⏩".to_owned()), + WitnessJobStatus::WaitingForProofs => TaskStatus::WaitingForProofs, + } + } +} diff --git a/prover/prover_dal/.sqlx/query-e0a6cc885e437aa7ded9def71f3e118cabc67b6e507efefb7b69e102f1b43c58.json b/prover/prover_dal/.sqlx/query-e0a6cc885e437aa7ded9def71f3e118cabc67b6e507efefb7b69e102f1b43c58.json new file mode 100644 index 000000000000..a7b8d0dc8542 --- /dev/null +++ b/prover/prover_dal/.sqlx/query-e0a6cc885e437aa7ded9def71f3e118cabc67b6e507efefb7b69e102f1b43c58.json @@ -0,0 +1,94 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n *\n FROM\n witness_inputs_fri\n WHERE\n l1_batch_number = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "l1_batch_number", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "merkle_tree_paths_blob_url", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "attempts", + "type_info": "Int2" + }, + { + "ordinal": 3, + "name": "status", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "error", + "type_info": "Text" + }, + { + "ordinal": 5, + "name": "created_at", + "type_info": "Timestamp" + }, + { + "ordinal": 6, + "name": "updated_at", + "type_info": "Timestamp" + }, + { + "ordinal": 7, + "name": "processing_started_at", + "type_info": "Timestamp" + }, + { + "ordinal": 8, + "name": "time_taken", + "type_info": "Time" + }, + { + "ordinal": 9, + "name": "is_blob_cleaned", + "type_info": "Bool" + }, + { + "ordinal": 10, + "name": "protocol_version", + "type_info": "Int4" + }, + { + "ordinal": 11, + "name": "picked_by", + "type_info": "Text" + }, + { + "ordinal": 12, + "name": "eip_4844_blobs", + "type_info": "Bytea" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + false, + true, + false, + false, + true, + false, + false, + true, + true, + true, + true, + true, + true + ] + }, + "hash": "e0a6cc885e437aa7ded9def71f3e118cabc67b6e507efefb7b69e102f1b43c58" +} diff --git a/prover/prover_dal/src/fri_witness_generator_dal.rs b/prover/prover_dal/src/fri_witness_generator_dal.rs index d2b58f5f75d9..2662183d22bf 100644 --- a/prover/prover_dal/src/fri_witness_generator_dal.rs +++ b/prover/prover_dal/src/fri_witness_generator_dal.rs @@ -1,12 +1,13 @@ #![doc = include_str!("../doc/FriWitnessGeneratorDal.md")] -use std::{collections::HashMap, convert::TryFrom, time::Duration}; +use std::{collections::HashMap, convert::TryFrom, str::FromStr, time::Duration}; use sqlx::Row; use zksync_basic_types::{ basic_fri_types::{AggregationRound, Eip4844Blobs}, protocol_version::ProtocolVersionId, prover_dal::{ - JobCountStatistics, LeafAggregationJobMetadata, NodeAggregationJobMetadata, StuckJobs, + BasicWitnessGeneratorJobInfo, JobCountStatistics, LeafAggregationJobMetadata, + NodeAggregationJobMetadata, StuckJobs, WitnessJobStatus, }, L1BatchNumber, }; @@ -1151,4 +1152,39 @@ impl FriWitnessGeneratorDal<'_, '_> { .map(|id| ProtocolVersionId::try_from(id as u16).unwrap()) .unwrap() } + + pub async fn get_basic_witness_generator_job_for_batch( + &mut self, + l1_batch_number: L1BatchNumber, + ) -> Option { + sqlx::query!( + r#" + SELECT + * + FROM + witness_inputs_fri + WHERE + l1_batch_number = $1 + "#, + i64::from(l1_batch_number.0) + ) + .fetch_optional(self.storage.conn()) + .await + .unwrap() + .map(|row| BasicWitnessGeneratorJobInfo { + l1_batch_number: l1_batch_number, + merkle_tree_paths_blob_url: row.merkle_tree_paths_blob_url, + attempts: row.attempts as u32, + status: WitnessJobStatus::from_str(&row.status).unwrap(), + error: row.error, + created_at: row.created_at, + updated_at: row.updated_at, + processing_started_at: row.processing_started_at, + time_taken: row.time_taken, + is_blob_cleaned: row.is_blob_cleaned, + protocol_version: row.protocol_version, + picked_by: row.picked_by, + eip_4844_blobs: row.eip_4844_blobs.map(|vec_u8| Eip4844Blobs::from(vec_u8)), + }) + } } From bbe5dc3626fc1eb15e6b976ebbfbdb704a760954 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Fri, 26 Apr 2024 17:30:47 -0300 Subject: [PATCH 47/65] add status for prover jobs --- core/lib/basic_types/src/prover_dal.rs | 32 ++++- .../prover_cli/src/commands/status/batch.rs | 26 +++- .../prover_cli/src/commands/status/utils.rs | 90 +++++++++--- ...076096de57cdba25831f86c1428081ca0a14f.json | 40 ------ ...d34a5baece02812f8c950fc84d37eeebd33a4.json | 131 ++++++++++++++++++ prover/prover_dal/src/fri_prover_dal.rs | 112 +++++++-------- 6 files changed, 302 insertions(+), 129 deletions(-) delete mode 100644 prover/prover_dal/.sqlx/query-676020e89f0833cc92be1c1114a076096de57cdba25831f86c1428081ca0a14f.json create mode 100644 prover/prover_dal/.sqlx/query-c2c140d136df5303d7b3a66ccd0d34a5baece02812f8c950fc84d37eeebd33a4.json diff --git a/core/lib/basic_types/src/prover_dal.rs b/core/lib/basic_types/src/prover_dal.rs index 10c87e63a7bf..f355932bb48f 100644 --- a/core/lib/basic_types/src/prover_dal.rs +++ b/core/lib/basic_types/src/prover_dal.rs @@ -6,6 +6,7 @@ use strum::{Display, EnumString}; use crate::{ basic_fri_types::{AggregationRound, Eip4844Blobs}, + protocol_version::ProtocolVersionId, L1BatchNumber, }; @@ -97,13 +98,13 @@ pub struct JobPosition { pub sequence_number: usize, } -#[derive(Debug, Default)] +#[derive(Debug, Default, PartialEq)] pub struct ProverJobStatusFailed { pub started_at: DateTime, pub error: String, } -#[derive(Debug)] +#[derive(Debug, PartialEq)] pub struct ProverJobStatusSuccessful { pub started_at: DateTime, pub time_taken: Duration, @@ -118,7 +119,7 @@ impl Default for ProverJobStatusSuccessful { } } -#[derive(Debug, Default)] +#[derive(Debug, Default, PartialEq)] pub struct ProverJobStatusInProgress { pub started_at: DateTime, } @@ -144,7 +145,7 @@ pub struct WitnessJobStatusFailed { pub error: String, } -#[derive(Debug, strum::Display, strum::EnumString, strum::AsRefStr)] +#[derive(Debug, strum::Display, strum::EnumString, strum::AsRefStr, PartialEq)] pub enum ProverJobStatus { #[strum(serialize = "queued")] Queued, @@ -233,6 +234,29 @@ impl FromStr for GpuProverInstanceStatus { } } } + +pub struct ProverJobFriInfo { + pub id: u32, + pub l1_batch_number: L1BatchNumber, + pub circuit_id: u32, + pub circuit_blob_url: String, + pub aggregation_round: AggregationRound, + pub sequence_number: u32, + pub status: ProverJobStatus, + pub error: Option, + pub attempts: u8, + pub processing_started_at: Option, + pub created_at: NaiveDateTime, + pub updated_at: NaiveDateTime, + pub time_taken: Option, + pub is_blob_cleaned: Option, + pub depth: u32, + pub is_node_final_proof: bool, + pub proof_blob_url: Option, + pub protocol_version: Option, + pub picked_by: Option, +} + pub struct BasicWitnessGeneratorJobInfo { pub l1_batch_number: L1BatchNumber, pub merkle_tree_paths_blob_url: Option, diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/prover_cli/src/commands/status/batch.rs index 84e0c9106209..4b7933f0438c 100644 --- a/prover/prover_cli/src/commands/status/batch.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -1,7 +1,7 @@ use anyhow::{ensure, Context as _}; use clap::Args as ClapArgs; use prover_dal::{Connection, ConnectionPool, Prover, ProverDal}; -use zksync_types::L1BatchNumber; +use zksync_types::{basic_fri_types::AggregationRound, L1BatchNumber}; use super::utils::{BatchData, Task, TaskStatus}; use crate::commands::status::utils::postgres_config; @@ -43,9 +43,16 @@ async fn get_batches_data(batches: Vec) -> anyhow::Result) -> anyhow::Result( + batch_number: L1BatchNumber, + aggation_round: AggregationRound, + conn: &mut Connection<'a, Prover>, +) -> TaskStatus { + conn.fri_prover_jobs_dal() + .get_prover_jobs_stats_for_batch(batch_number, aggation_round) + .await + .into() +} + async fn get_proof_basic_witness_generator_status_for_batch<'a>( batch_number: L1BatchNumber, conn: &mut Connection<'a, Prover>, diff --git a/prover/prover_cli/src/commands/status/utils.rs b/prover/prover_cli/src/commands/status/utils.rs index 2a13147e696f..3b19bd65df04 100644 --- a/prover/prover_cli/src/commands/status/utils.rs +++ b/prover/prover_cli/src/commands/status/utils.rs @@ -1,12 +1,11 @@ -use std::{collections::HashMap, fmt::Debug}; +use std::fmt::Debug; use colored::*; use strum::{Display, EnumString}; -use zksync_basic_types::{basic_fri_types::AggregationRound, prover_dal::JobCountStatistics}; use zksync_config::PostgresConfig; use zksync_env_config::FromEnv; use zksync_types::{ - prover_dal::{ProofCompressionJobStatus, WitnessJobStatus}, + prover_dal::{ProofCompressionJobStatus, ProverJobFriInfo, ProverJobStatus, WitnessJobStatus}, L1BatchNumber, }; @@ -54,18 +53,21 @@ impl Default for BatchData { fn default() -> Self { BatchData { batch_number: L1BatchNumber::default(), - basic_witness_generator: Task::BasicWitnessGenerator(TaskStatus::Stuck), + basic_witness_generator: Task::BasicWitnessGenerator { + status: TaskStatus::WaitingForProofs, + prover_jobs_status: TaskStatus::default(), + }, leaf_witness_generator: Task::LeafWitnessGenerator { status: TaskStatus::WaitingForProofs, - aggregation_round_0_prover_jobs_data: ProverJobsData::default(), + prover_jobs_status: TaskStatus::default(), }, node_witness_generator: Task::NodeWitnessGenerator { status: TaskStatus::WaitingForProofs, - aggregation_round_1_prover_jobs_data: ProverJobsData::default(), + prover_jobs_status: TaskStatus::default(), }, recursion_tip: Task::RecursionTip { status: TaskStatus::WaitingForProofs, - aggregation_round_2_prover_jobs_data: ProverJobsData::default(), + prover_jobs_status: TaskStatus::default(), }, scheduler: Task::Scheduler(TaskStatus::WaitingForProofs), compressor: Task::Compressor(TaskStatus::WaitingForProofs), @@ -101,6 +103,28 @@ impl Default for TaskStatus { } } +impl From> for TaskStatus { + fn from(jobs_vector: Vec) -> Self { + if jobs_vector.is_empty() { + return TaskStatus::Custom("No Jobs found ".to_owned()); + } + + if jobs_vector + .iter() + .all(|job| job.status == ProverJobStatus::Queued) + { + return TaskStatus::Queued; + } else if jobs_vector.iter().all(|job| match job.status { + ProverJobStatus::Successful(_) => true, + _ => false, + }) { + return TaskStatus::Successful; + } + + TaskStatus::InProgress + } +} + impl From for TaskStatus { fn from(status: ProofCompressionJobStatus) -> Self { match status { @@ -116,30 +140,31 @@ impl From for TaskStatus { } } -type ProverJobsData = HashMap<(L1BatchNumber, AggregationRound), JobCountStatistics>; - #[derive(EnumString, Clone, Display)] pub enum Task { /// Represents the basic witness generator task and its status. #[strum(to_string = "Basic Witness Generator")] - BasicWitnessGenerator(TaskStatus), + BasicWitnessGenerator { + status: TaskStatus, + prover_jobs_status: TaskStatus, + }, /// Represents the leaf witness generator task, its status and the aggregation round 0 prover jobs data. #[strum(to_string = "Leaf Witness Generator")] LeafWitnessGenerator { status: TaskStatus, - aggregation_round_0_prover_jobs_data: ProverJobsData, + prover_jobs_status: TaskStatus, }, /// Represents the node witness generator task, its status and the aggregation round 1 prover jobs data. #[strum(to_string = "Node Witness Generator")] NodeWitnessGenerator { status: TaskStatus, - aggregation_round_1_prover_jobs_data: ProverJobsData, + prover_jobs_status: TaskStatus, }, /// Represents the recursion tip task, its status and the aggregation round 2 prover jobs data. #[strum(to_string = "Recursion Tip")] RecursionTip { status: TaskStatus, - aggregation_round_2_prover_jobs_data: ProverJobsData, + prover_jobs_status: TaskStatus, }, /// Represents the scheduler task and its status. #[strum(to_string = "Scheduler")] @@ -152,7 +177,7 @@ pub enum Task { impl Task { fn status(&self) -> TaskStatus { match self { - Task::BasicWitnessGenerator(status) + Task::BasicWitnessGenerator { status, .. } | Task::LeafWitnessGenerator { status, .. } | Task::NodeWitnessGenerator { status, .. } | Task::RecursionTip { status, .. } @@ -162,14 +187,43 @@ impl Task { } } +impl Task { + fn prover_jobs_status(&self) -> Option { + match self { + Task::BasicWitnessGenerator { + prover_jobs_status, .. + } + | Task::LeafWitnessGenerator { + prover_jobs_status, .. + } + | Task::NodeWitnessGenerator { + prover_jobs_status, .. + } + | Task::RecursionTip { + prover_jobs_status, .. + } => Some(prover_jobs_status.clone()), + Task::Scheduler(_) => None, + Task::Compressor(_) => None, + } + } +} + impl Debug for Task { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { writeln!(f, "-- {} --", self.to_string().bold())?; - if let TaskStatus::Custom(msg) = self.status() { - writeln!(f, "> {msg}") - } else { - writeln!(f, "> {}", self.status().to_string()) + match self.status() { + TaskStatus::InProgress | TaskStatus::Successful => { + writeln!(f, "> {}", self.status().to_string())?; + if let Some(status) = self.prover_jobs_status() { + writeln!(f, "> {}", status.to_string())?; + } + } + TaskStatus::Queued | TaskStatus::WaitingForProofs | TaskStatus::Stuck => { + writeln!(f, "> {}", self.status().to_string())? + } + TaskStatus::Custom(msg) => writeln!(f, "> {msg}")?, } + Ok(()) } } diff --git a/prover/prover_dal/.sqlx/query-676020e89f0833cc92be1c1114a076096de57cdba25831f86c1428081ca0a14f.json b/prover/prover_dal/.sqlx/query-676020e89f0833cc92be1c1114a076096de57cdba25831f86c1428081ca0a14f.json deleted file mode 100644 index b578881deeb1..000000000000 --- a/prover/prover_dal/.sqlx/query-676020e89f0833cc92be1c1114a076096de57cdba25831f86c1428081ca0a14f.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT\n COUNT(*) AS \"count!\",\n l1_batch_number AS \"l1_batch_number!\",\n aggregation_round AS \"aggregation_round!\",\n status AS \"status!\"\n FROM\n prover_jobs_fri\n WHERE\n l1_batch_number = ANY ($1)\n GROUP BY\n l1_batch_number,\n aggregation_round,\n status\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "count!", - "type_info": "Int8" - }, - { - "ordinal": 1, - "name": "l1_batch_number!", - "type_info": "Int8" - }, - { - "ordinal": 2, - "name": "aggregation_round!", - "type_info": "Int2" - }, - { - "ordinal": 3, - "name": "status!", - "type_info": "Text" - } - ], - "parameters": { - "Left": [ - "Int8Array" - ] - }, - "nullable": [ - null, - false, - false, - false - ] - }, - "hash": "676020e89f0833cc92be1c1114a076096de57cdba25831f86c1428081ca0a14f" -} diff --git a/prover/prover_dal/.sqlx/query-c2c140d136df5303d7b3a66ccd0d34a5baece02812f8c950fc84d37eeebd33a4.json b/prover/prover_dal/.sqlx/query-c2c140d136df5303d7b3a66ccd0d34a5baece02812f8c950fc84d37eeebd33a4.json new file mode 100644 index 000000000000..7ced88426e4d --- /dev/null +++ b/prover/prover_dal/.sqlx/query-c2c140d136df5303d7b3a66ccd0d34a5baece02812f8c950fc84d37eeebd33a4.json @@ -0,0 +1,131 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n *\n FROM\n prover_jobs_fri\n WHERE\n l1_batch_number = $1\n AND aggregation_round = $2\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "l1_batch_number", + "type_info": "Int8" + }, + { + "ordinal": 2, + "name": "circuit_id", + "type_info": "Int2" + }, + { + "ordinal": 3, + "name": "circuit_blob_url", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "aggregation_round", + "type_info": "Int2" + }, + { + "ordinal": 5, + "name": "sequence_number", + "type_info": "Int4" + }, + { + "ordinal": 6, + "name": "status", + "type_info": "Text" + }, + { + "ordinal": 7, + "name": "error", + "type_info": "Text" + }, + { + "ordinal": 8, + "name": "attempts", + "type_info": "Int2" + }, + { + "ordinal": 9, + "name": "processing_started_at", + "type_info": "Timestamp" + }, + { + "ordinal": 10, + "name": "created_at", + "type_info": "Timestamp" + }, + { + "ordinal": 11, + "name": "updated_at", + "type_info": "Timestamp" + }, + { + "ordinal": 12, + "name": "time_taken", + "type_info": "Time" + }, + { + "ordinal": 13, + "name": "is_blob_cleaned", + "type_info": "Bool" + }, + { + "ordinal": 14, + "name": "depth", + "type_info": "Int4" + }, + { + "ordinal": 15, + "name": "is_node_final_proof", + "type_info": "Bool" + }, + { + "ordinal": 16, + "name": "proof_blob_url", + "type_info": "Text" + }, + { + "ordinal": 17, + "name": "protocol_version", + "type_info": "Int4" + }, + { + "ordinal": 18, + "name": "picked_by", + "type_info": "Text" + } + ], + "parameters": { + "Left": [ + "Int8", + "Int2" + ] + }, + "nullable": [ + false, + false, + false, + false, + false, + false, + false, + true, + false, + true, + false, + false, + true, + true, + false, + false, + true, + true, + true + ] + }, + "hash": "c2c140d136df5303d7b3a66ccd0d34a5baece02812f8c950fc84d37eeebd33a4" +} diff --git a/prover/prover_dal/src/fri_prover_dal.rs b/prover/prover_dal/src/fri_prover_dal.rs index 0639d33e144a..a122665853f7 100644 --- a/prover/prover_dal/src/fri_prover_dal.rs +++ b/prover/prover_dal/src/fri_prover_dal.rs @@ -1,10 +1,13 @@ #![doc = include_str!("../doc/FriProverDal.md")] -use std::{collections::HashMap, convert::TryFrom, time::Duration}; +use std::{collections::HashMap, convert::TryFrom, str::FromStr, time::Duration}; use zksync_basic_types::{ basic_fri_types::{AggregationRound, CircuitIdRoundTuple}, protocol_version::ProtocolVersionId, - prover_dal::{FriProverJobMetadata, JobCountStatistics, StuckJobs, EIP_4844_CIRCUIT_ID}, + prover_dal::{ + FriProverJobMetadata, JobCountStatistics, ProverJobFriInfo, ProverJobStatus, StuckJobs, + EIP_4844_CIRCUIT_ID, + }, L1BatchNumber, }; use zksync_db_connection::{ @@ -597,66 +600,49 @@ impl FriProverDal<'_, '_> { pub async fn get_prover_jobs_stats_for_batch( &mut self, - l1_batches_numbers: Vec, - ) -> HashMap<(L1BatchNumber, AggregationRound), JobCountStatistics> { - { - sqlx::query!( - r#" - SELECT - COUNT(*) AS "count!", - l1_batch_number AS "l1_batch_number!", - aggregation_round AS "aggregation_round!", - status AS "status!" - FROM - prover_jobs_fri - WHERE - l1_batch_number = ANY ($1) - GROUP BY - l1_batch_number, - aggregation_round, - status - "#, - &l1_batches_numbers - .into_iter() - .map(|x| i64::from(x.0)) - .collect::>() - ) - .fetch_all(self.storage.conn()) - .await - .unwrap() - .into_iter() - .map(|row| { - ( - row.l1_batch_number, - row.aggregation_round, - row.status, - row.count as usize, - ) - }) - .fold( - HashMap::new(), - |mut acc, (l1_batch_number, aggregation_round, status, value)| { - let stats = acc - .entry(( - L1BatchNumber(l1_batch_number as u32), - AggregationRound::from(aggregation_round as u8), - )) - .or_insert(JobCountStatistics { - queued: 0, - in_progress: 0, - failed: 0, - successful: 0, - }); - match status.as_ref() { - "queued" => stats.queued = value, - "in_progress" => stats.in_progress = value, - "failed" => stats.failed = value, - "successful" => stats.successful = value, - _ => (), - } - acc - }, - ) - } + l1_batch_number: L1BatchNumber, + aggregation_round: AggregationRound, + ) -> Vec { + sqlx::query!( + r#" + SELECT + * + FROM + prover_jobs_fri + WHERE + l1_batch_number = $1 + AND aggregation_round = $2 + "#, + i64::from(l1_batch_number.0), + aggregation_round as i16 + ) + .fetch_all(self.storage.conn()) + .await + .unwrap() + .iter() + .map(|row| ProverJobFriInfo { + id: row.id as u32, + l1_batch_number: l1_batch_number, + circuit_id: row.circuit_id as u32, + circuit_blob_url: row.circuit_blob_url.clone(), + aggregation_round: aggregation_round, + sequence_number: row.sequence_number as u32, + status: ProverJobStatus::from_str(&row.status).unwrap(), + error: row.error.clone(), + attempts: row.attempts as u8, + processing_started_at: row.processing_started_at, + created_at: row.created_at, + updated_at: row.updated_at, + time_taken: row.time_taken, + is_blob_cleaned: row.is_blob_cleaned, + depth: row.depth as u32, + is_node_final_proof: row.is_node_final_proof, + proof_blob_url: row.proof_blob_url.clone(), + protocol_version: row.protocol_version.map(|protocol_version| { + ProtocolVersionId::try_from(protocol_version as u16).unwrap() + }), + picked_by: row.picked_by.clone(), + }) + .collect() } } From 111884bd1db2e33c9c8f56a08cb751bb62d6411c Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Fri, 26 Apr 2024 17:55:51 -0300 Subject: [PATCH 48/65] add title for prover jobs --- prover/prover_cli/src/commands/status/utils.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prover/prover_cli/src/commands/status/utils.rs b/prover/prover_cli/src/commands/status/utils.rs index 3b19bd65df04..c07a4c5c2d5f 100644 --- a/prover/prover_cli/src/commands/status/utils.rs +++ b/prover/prover_cli/src/commands/status/utils.rs @@ -215,7 +215,7 @@ impl Debug for Task { TaskStatus::InProgress | TaskStatus::Successful => { writeln!(f, "> {}", self.status().to_string())?; if let Some(status) = self.prover_jobs_status() { - writeln!(f, "> {}", status.to_string())?; + writeln!(f, " > Prover Jobs Status: {}", status.to_string())?; } } TaskStatus::Queued | TaskStatus::WaitingForProofs | TaskStatus::Stuck => { From 9fef23d2c3cb4254e843533e1563c68c6099ef1b Mon Sep 17 00:00:00 2001 From: ilitteri Date: Fri, 26 Apr 2024 19:30:43 -0300 Subject: [PATCH 49/65] Refactor --- .../prover_cli/src/commands/status/batch.rs | 26 ++- .../prover_cli/src/commands/status/utils.rs | 194 +++++++++++++----- 2 files changed, 152 insertions(+), 68 deletions(-) diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/prover_cli/src/commands/status/batch.rs index 4b7933f0438c..1b7685344b74 100644 --- a/prover/prover_cli/src/commands/status/batch.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -3,7 +3,7 @@ use clap::Args as ClapArgs; use prover_dal::{Connection, ConnectionPool, Prover, ProverDal}; use zksync_types::{basic_fri_types::AggregationRound, L1BatchNumber}; -use super::utils::{BatchData, Task, TaskStatus}; +use super::utils::{AggregationRoundInfo, BatchData, Task, TaskStatus}; use crate::commands::status::utils::postgres_config; #[derive(ClapArgs)] @@ -46,7 +46,7 @@ async fn get_batches_data(batches: Vec) -> anyhow::Result) -> anyhow::Result( batch_number: L1BatchNumber, - aggation_round: AggregationRound, + aggregation_round: AggregationRound, conn: &mut Connection<'a, Prover>, -) -> TaskStatus { - conn.fri_prover_jobs_dal() - .get_prover_jobs_stats_for_batch(batch_number, aggation_round) +) -> AggregationRoundInfo { + let status: TaskStatus = conn + .fri_prover_jobs_dal() + .get_prover_jobs_stats_for_batch(batch_number, aggregation_round) .await - .into() + .into(); + + AggregationRoundInfo { + round: AggregationRound::BasicCircuits, + prover_jobs_status: status, + } } async fn get_proof_basic_witness_generator_status_for_batch<'a>( @@ -83,9 +89,7 @@ async fn get_proof_basic_witness_generator_status_for_batch<'a>( .get_basic_witness_generator_job_for_batch(batch_number) .await .map(|job| TaskStatus::from(job.status)) - .unwrap_or(TaskStatus::Custom( - "Basic witness generator job not found 🚫".to_owned(), - )) + .unwrap_or_default() } async fn get_proof_compression_job_status_for_batch<'a>( @@ -96,5 +100,5 @@ async fn get_proof_compression_job_status_for_batch<'a>( .get_proof_compression_job_for_batch(batch_number) .await .map(|job| TaskStatus::from(job.status)) - .unwrap_or(TaskStatus::Custom("Compressor job not found 🚫".to_owned())) + .unwrap_or_default() } diff --git a/prover/prover_cli/src/commands/status/utils.rs b/prover/prover_cli/src/commands/status/utils.rs index c07a4c5c2d5f..e2408d9c17a9 100644 --- a/prover/prover_cli/src/commands/status/utils.rs +++ b/prover/prover_cli/src/commands/status/utils.rs @@ -5,12 +5,13 @@ use strum::{Display, EnumString}; use zksync_config::PostgresConfig; use zksync_env_config::FromEnv; use zksync_types::{ + basic_fri_types::AggregationRound, prover_dal::{ProofCompressionJobStatus, ProverJobFriInfo, ProverJobStatus, WitnessJobStatus}, L1BatchNumber, }; pub fn postgres_config() -> anyhow::Result { - Ok(PostgresConfig::from_env()?) + PostgresConfig::from_env() } /// Represents the proving data of a batch. @@ -39,7 +40,7 @@ impl Debug for BatchData { format!("Batch {} Status", self.batch_number).bold() )?; writeln!(f)?; - writeln!(f, "= {} =", format!("Proving Stages").bold())?; + writeln!(f, "= {} =", "Proving Stages".to_owned().bold())?; writeln!(f, "{:?}", self.basic_witness_generator)?; writeln!(f, "{:?}", self.leaf_witness_generator)?; writeln!(f, "{:?}", self.node_witness_generator)?; @@ -54,28 +55,46 @@ impl Default for BatchData { BatchData { batch_number: L1BatchNumber::default(), basic_witness_generator: Task::BasicWitnessGenerator { - status: TaskStatus::WaitingForProofs, - prover_jobs_status: TaskStatus::default(), + status: TaskStatus::default(), + aggregation_round_info: AggregationRoundInfo { + round: AggregationRound::BasicCircuits, + prover_jobs_status: TaskStatus::default(), + }, }, leaf_witness_generator: Task::LeafWitnessGenerator { - status: TaskStatus::WaitingForProofs, - prover_jobs_status: TaskStatus::default(), + status: TaskStatus::default(), + aggregation_round_info: AggregationRoundInfo { + round: AggregationRound::LeafAggregation, + prover_jobs_status: TaskStatus::default(), + }, }, node_witness_generator: Task::NodeWitnessGenerator { - status: TaskStatus::WaitingForProofs, - prover_jobs_status: TaskStatus::default(), + status: TaskStatus::default(), + aggregation_round_info: AggregationRoundInfo { + round: AggregationRound::NodeAggregation, + prover_jobs_status: TaskStatus::default(), + }, }, recursion_tip: Task::RecursionTip { - status: TaskStatus::WaitingForProofs, - prover_jobs_status: TaskStatus::default(), + status: TaskStatus::default(), + aggregation_round_info: AggregationRoundInfo { + round: AggregationRound::Scheduler, + prover_jobs_status: TaskStatus::default(), + }, }, - scheduler: Task::Scheduler(TaskStatus::WaitingForProofs), - compressor: Task::Compressor(TaskStatus::WaitingForProofs), + scheduler: Task::Scheduler { + status: TaskStatus::default(), + aggregation_round_info: AggregationRoundInfo { + round: AggregationRound::Scheduler, + prover_jobs_status: TaskStatus::default(), + }, + }, + compressor: Task::Compressor(TaskStatus::JobsNotFound), } } } -#[derive(Debug, EnumString, Clone, Display)] +#[derive(Default, Debug, EnumString, Clone, Display)] pub enum TaskStatus { /// A custom status that can be set manually. /// Mostly used when a task has singular status. @@ -93,35 +112,33 @@ pub enum TaskStatus { #[strum(to_string = "Waiting for Proof ⏱️")] WaitingForProofs, /// A task is considered stuck when at least one of its jobs is stuck. - #[strum(to_string = "Stuck 🛑")] + #[strum(to_string = "Stuck ⛔️")] Stuck, + /// A task has no jobs. + #[default] + #[strum(to_string = "Jobs not found 🚫")] + JobsNotFound, } -impl Default for TaskStatus { - fn default() -> Self { - TaskStatus::WaitingForProofs - } -} - +// This implementation will change to From> for AggregationRoundInfo +// once the --verbose flag is implemented. impl From> for TaskStatus { fn from(jobs_vector: Vec) -> Self { if jobs_vector.is_empty() { - return TaskStatus::Custom("No Jobs found ".to_owned()); - } - - if jobs_vector + TaskStatus::JobsNotFound + } else if jobs_vector .iter() - .all(|job| job.status == ProverJobStatus::Queued) + .all(|job| matches!(job.status, ProverJobStatus::Queued)) { - return TaskStatus::Queued; - } else if jobs_vector.iter().all(|job| match job.status { - ProverJobStatus::Successful(_) => true, - _ => false, - }) { - return TaskStatus::Successful; + TaskStatus::Queued + } else if jobs_vector + .iter() + .all(|job| matches!(job.status, ProverJobStatus::InProgress(_))) + { + TaskStatus::Successful + } else { + TaskStatus::InProgress } - - TaskStatus::InProgress } } @@ -146,29 +163,32 @@ pub enum Task { #[strum(to_string = "Basic Witness Generator")] BasicWitnessGenerator { status: TaskStatus, - prover_jobs_status: TaskStatus, + aggregation_round_info: AggregationRoundInfo, }, /// Represents the leaf witness generator task, its status and the aggregation round 0 prover jobs data. #[strum(to_string = "Leaf Witness Generator")] LeafWitnessGenerator { status: TaskStatus, - prover_jobs_status: TaskStatus, + aggregation_round_info: AggregationRoundInfo, }, /// Represents the node witness generator task, its status and the aggregation round 1 prover jobs data. #[strum(to_string = "Node Witness Generator")] NodeWitnessGenerator { status: TaskStatus, - prover_jobs_status: TaskStatus, + aggregation_round_info: AggregationRoundInfo, }, /// Represents the recursion tip task, its status and the aggregation round 2 prover jobs data. #[strum(to_string = "Recursion Tip")] RecursionTip { status: TaskStatus, - prover_jobs_status: TaskStatus, + aggregation_round_info: AggregationRoundInfo, }, /// Represents the scheduler task and its status. #[strum(to_string = "Scheduler")] - Scheduler(TaskStatus), + Scheduler { + status: TaskStatus, + aggregation_round_info: AggregationRoundInfo, + }, /// Represents the compressor task and its status. #[strum(to_string = "Compressor")] Compressor(TaskStatus), @@ -181,28 +201,68 @@ impl Task { | Task::LeafWitnessGenerator { status, .. } | Task::NodeWitnessGenerator { status, .. } | Task::RecursionTip { status, .. } - | Task::Scheduler(status) + | Task::Scheduler { status, .. } | Task::Compressor(status) => status.clone(), } } -} -impl Task { + fn aggregation_round(&self) -> Option { + match self { + Task::BasicWitnessGenerator { + aggregation_round_info, + .. + } + | Task::LeafWitnessGenerator { + aggregation_round_info, + .. + } + | Task::NodeWitnessGenerator { + aggregation_round_info, + .. + } + | Task::RecursionTip { + aggregation_round_info, + .. + } + | Task::Scheduler { + aggregation_round_info, + .. + } => Some(aggregation_round_info.round), + Task::Compressor(_) => None, + } + } + + /// Returns the status of the prover jobs. + /// If the task is not in progress or successful, returns None. + /// Otherwise, returns the status of the prover jobs if the task + /// has prover jobs. fn prover_jobs_status(&self) -> Option { match self { Task::BasicWitnessGenerator { - prover_jobs_status, .. + status, + aggregation_round_info, } | Task::LeafWitnessGenerator { - prover_jobs_status, .. + status, + aggregation_round_info, } | Task::NodeWitnessGenerator { - prover_jobs_status, .. + status, + aggregation_round_info, } | Task::RecursionTip { - prover_jobs_status, .. - } => Some(prover_jobs_status.clone()), - Task::Scheduler(_) => None, + status, + aggregation_round_info, + } + | Task::Scheduler { + status, + aggregation_round_info, + } => match status { + TaskStatus::InProgress | TaskStatus::Successful => { + Some(aggregation_round_info.prover_jobs_status.clone()) + } + _ => None, + }, Task::Compressor(_) => None, } } @@ -210,18 +270,23 @@ impl Task { impl Debug for Task { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - writeln!(f, "-- {} --", self.to_string().bold())?; - match self.status() { - TaskStatus::InProgress | TaskStatus::Successful => { - writeln!(f, "> {}", self.status().to_string())?; - if let Some(status) = self.prover_jobs_status() { - writeln!(f, " > Prover Jobs Status: {}", status.to_string())?; - } + if let Some(aggregation_round_number) = self.aggregation_round() { + writeln!( + f, + "-- {} --", + format!("Aggregation Round {}", aggregation_round_number as u8).bold() + )?; + if let TaskStatus::Custom(msg) = self.status() { + writeln!(f, "{}: {}", self.to_string().bold(), msg)?; + } else { + writeln!(f, "{}: {}", self.to_string().bold(), self.status())?; } - TaskStatus::Queued | TaskStatus::WaitingForProofs | TaskStatus::Stuck => { - writeln!(f, "> {}", self.status().to_string())? + if let Some(prover_jobs_status) = self.prover_jobs_status() { + writeln!(f, "> Prover Jobs: {prover_jobs_status}")?; } - TaskStatus::Custom(msg) => writeln!(f, "> {msg}")?, + } else { + writeln!(f, "-- {} --", self.to_string().bold())?; + writeln!(f, "{}", self.status())?; } Ok(()) } @@ -242,3 +307,18 @@ impl From for TaskStatus { } } } + +#[derive(Clone)] +pub struct AggregationRoundInfo { + pub round: AggregationRound, + pub prover_jobs_status: TaskStatus, +} + +impl Default for AggregationRoundInfo { + fn default() -> Self { + AggregationRoundInfo { + round: AggregationRound::BasicCircuits, + prover_jobs_status: TaskStatus::default(), + } + } +} From 01ecd2f9b7ca36ddead16536cfa54da1f943a97a Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Mon, 29 Apr 2024 13:20:34 -0300 Subject: [PATCH 50/65] add leaf query --- core/lib/basic_types/src/prover_dal.rs | 19 +++- .../prover_cli/src/commands/status/batch.rs | 22 ++++ ...5d2832571464e74b5fed92cf54617573c84ec.json | 106 ++++++++++++++++++ .../src/fri_witness_generator_dal.rs | 38 ++++++- 4 files changed, 183 insertions(+), 2 deletions(-) create mode 100644 prover/prover_dal/.sqlx/query-21621153e545859d71188e2421f5d2832571464e74b5fed92cf54617573c84ec.json diff --git a/core/lib/basic_types/src/prover_dal.rs b/core/lib/basic_types/src/prover_dal.rs index f355932bb48f..7daea8a2205b 100644 --- a/core/lib/basic_types/src/prover_dal.rs +++ b/core/lib/basic_types/src/prover_dal.rs @@ -5,7 +5,7 @@ use chrono::{DateTime, Duration, NaiveDateTime, NaiveTime, Utc}; use strum::{Display, EnumString}; use crate::{ - basic_fri_types::{AggregationRound, Eip4844Blobs}, + basic_fri_types::{AggregationRound, CircuitIdRoundTuple, Eip4844Blobs}, protocol_version::ProtocolVersionId, L1BatchNumber, }; @@ -273,6 +273,23 @@ pub struct BasicWitnessGeneratorJobInfo { pub eip_4844_blobs: Option, } +pub struct LeafWitnessGeneratorJobInfo { + pub l1_batch_number: L1BatchNumber, + pub circuit_id: u32, + pub closed_form_inputs_blob_url: Option, + pub attempts: u32, + pub status: WitnessJobStatus, + pub error: Option, + pub created_at: NaiveDateTime, + pub updated_at: NaiveDateTime, + pub processing_started_at: Option, + pub time_taken: Option, + pub is_blob_cleaned: Option, + pub number_of_basic_circuits: Option, + pub protocol_version: Option, + pub picked_by: Option, +} + #[derive(Debug, EnumString, Display)] pub enum ProofCompressionJobStatus { #[strum(serialize = "queued")] diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/prover_cli/src/commands/status/batch.rs index 4b7933f0438c..d6f33c023716 100644 --- a/prover/prover_cli/src/commands/status/batch.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -53,6 +53,15 @@ async fn get_batches_data(batches: Vec) -> anyhow::Result( )) } +async fn get_proof_leaf_witness_generator_status_for_batch<'a>( + batch_number: L1BatchNumber, + conn: &mut Connection<'a, Prover>, +) -> TaskStatus { + conn.fri_witness_generator_dal() + .get_leaf_witness_generator_job_for_batch(batch_number) + .await + .map(|job| TaskStatus::from(job.status)) + .unwrap_or(TaskStatus::Custom( + "Leaf witness generator job not found 🚫".to_owned(), + )) +} + async fn get_proof_compression_job_status_for_batch<'a>( batch_number: L1BatchNumber, conn: &mut Connection<'a, Prover>, diff --git a/prover/prover_dal/.sqlx/query-21621153e545859d71188e2421f5d2832571464e74b5fed92cf54617573c84ec.json b/prover/prover_dal/.sqlx/query-21621153e545859d71188e2421f5d2832571464e74b5fed92cf54617573c84ec.json new file mode 100644 index 000000000000..9e750348decb --- /dev/null +++ b/prover/prover_dal/.sqlx/query-21621153e545859d71188e2421f5d2832571464e74b5fed92cf54617573c84ec.json @@ -0,0 +1,106 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n *\n FROM\n leaf_aggregation_witness_jobs_fri\n WHERE\n l1_batch_number = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "l1_batch_number", + "type_info": "Int8" + }, + { + "ordinal": 2, + "name": "circuit_id", + "type_info": "Int2" + }, + { + "ordinal": 3, + "name": "closed_form_inputs_blob_url", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "attempts", + "type_info": "Int2" + }, + { + "ordinal": 5, + "name": "status", + "type_info": "Text" + }, + { + "ordinal": 6, + "name": "error", + "type_info": "Text" + }, + { + "ordinal": 7, + "name": "created_at", + "type_info": "Timestamp" + }, + { + "ordinal": 8, + "name": "updated_at", + "type_info": "Timestamp" + }, + { + "ordinal": 9, + "name": "processing_started_at", + "type_info": "Timestamp" + }, + { + "ordinal": 10, + "name": "time_taken", + "type_info": "Time" + }, + { + "ordinal": 11, + "name": "is_blob_cleaned", + "type_info": "Bool" + }, + { + "ordinal": 12, + "name": "number_of_basic_circuits", + "type_info": "Int4" + }, + { + "ordinal": 13, + "name": "protocol_version", + "type_info": "Int4" + }, + { + "ordinal": 14, + "name": "picked_by", + "type_info": "Text" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + false, + false, + false, + true, + false, + false, + true, + false, + false, + true, + true, + true, + true, + true, + true + ] + }, + "hash": "21621153e545859d71188e2421f5d2832571464e74b5fed92cf54617573c84ec" +} diff --git a/prover/prover_dal/src/fri_witness_generator_dal.rs b/prover/prover_dal/src/fri_witness_generator_dal.rs index 2662183d22bf..30d9bb21c977 100644 --- a/prover/prover_dal/src/fri_witness_generator_dal.rs +++ b/prover/prover_dal/src/fri_witness_generator_dal.rs @@ -7,7 +7,7 @@ use zksync_basic_types::{ protocol_version::ProtocolVersionId, prover_dal::{ BasicWitnessGeneratorJobInfo, JobCountStatistics, LeafAggregationJobMetadata, - NodeAggregationJobMetadata, StuckJobs, WitnessJobStatus, + LeafWitnessGeneratorJobInfo, NodeAggregationJobMetadata, StuckJobs, WitnessJobStatus, }, L1BatchNumber, }; @@ -1187,4 +1187,40 @@ impl FriWitnessGeneratorDal<'_, '_> { eip_4844_blobs: row.eip_4844_blobs.map(|vec_u8| Eip4844Blobs::from(vec_u8)), }) } + + pub async fn get_leaf_witness_generator_job_for_batch( + &mut self, + l1_batch_number: L1BatchNumber, + ) -> Option { + sqlx::query!( + r#" + SELECT + * + FROM + leaf_aggregation_witness_jobs_fri + WHERE + l1_batch_number = $1 + "#, + i64::from(l1_batch_number.0) + ) + .fetch_optional(self.storage.conn()) + .await + .unwrap() + .map(|row| LeafWitnessGeneratorJobInfo { + l1_batch_number: l1_batch_number, + circuit_id: row.circuit_id as u32, + closed_form_inputs_blob_url: row.closed_form_inputs_blob_url, + attempts: row.attempts as u32, + status: WitnessJobStatus::from_str(&row.status).unwrap(), + error: row.error, + created_at: row.created_at, + updated_at: row.updated_at, + processing_started_at: row.processing_started_at, + time_taken: row.time_taken, + is_blob_cleaned: row.is_blob_cleaned, + protocol_version: row.protocol_version, + picked_by: row.picked_by, + number_of_basic_circuits: row.number_of_basic_circuits, + }) + } } From 590c58bbff5576f297e73d0cf836a440f3386f3c Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Mon, 29 Apr 2024 13:26:05 -0300 Subject: [PATCH 51/65] fix merge --- prover/prover_cli/src/commands/status/batch.rs | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/prover_cli/src/commands/status/batch.rs index bb2d1a802aae..456d2b6e29d9 100644 --- a/prover/prover_cli/src/commands/status/batch.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -55,7 +55,7 @@ async fn get_batches_data(batches: Vec) -> anyhow::Result( .get_leaf_witness_generator_job_for_batch(batch_number) .await .map(|job| TaskStatus::from(job.status)) - .unwrap_or(TaskStatus::Custom( - "Leaf witness generator job not found 🚫".to_owned(), - )) + .unwrap_or_default() } async fn get_proof_compression_job_status_for_batch<'a>( From e08b05e383f0d35512df22fda48cbe8699dbda46 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Mon, 29 Apr 2024 14:26:55 -0300 Subject: [PATCH 52/65] add querys --- core/lib/basic_types/src/prover_dal.rs | 33 +++++- .../prover_cli/src/commands/status/batch.rs | 41 +++++++ .../prover_cli/src/commands/status/utils.rs | 14 +-- ...dd8547a1ad20492ec37c3c0be5639e5d49952.json | 82 ++++++++++++++ ...9bfb838c787fc58d7536f9e9976e5e515431a.json | 106 ++++++++++++++++++ .../src/fri_witness_generator_dal.rs | 72 +++++++++++- 6 files changed, 339 insertions(+), 9 deletions(-) create mode 100644 prover/prover_dal/.sqlx/query-285d0ff850fa5c9af36564fcb14dd8547a1ad20492ec37c3c0be5639e5d49952.json create mode 100644 prover/prover_dal/.sqlx/query-94a75b05ecbab75d6ebf39cca029bfb838c787fc58d7536f9e9976e5e515431a.json diff --git a/core/lib/basic_types/src/prover_dal.rs b/core/lib/basic_types/src/prover_dal.rs index 7daea8a2205b..0e5963801082 100644 --- a/core/lib/basic_types/src/prover_dal.rs +++ b/core/lib/basic_types/src/prover_dal.rs @@ -5,7 +5,7 @@ use chrono::{DateTime, Duration, NaiveDateTime, NaiveTime, Utc}; use strum::{Display, EnumString}; use crate::{ - basic_fri_types::{AggregationRound, CircuitIdRoundTuple, Eip4844Blobs}, + basic_fri_types::{AggregationRound, Eip4844Blobs}, protocol_version::ProtocolVersionId, L1BatchNumber, }; @@ -290,6 +290,37 @@ pub struct LeafWitnessGeneratorJobInfo { pub picked_by: Option, } +pub struct NodeWitnessGeneratorJobInfo { + pub l1_batch_number: L1BatchNumber, + pub circuit_id: u32, + pub depth: u32, + pub status: WitnessJobStatus, + pub attempts: u32, + pub aggregations_url: Option, + pub processing_started_at: Option, + pub time_taken: Option, + pub error: Option, + pub created_at: NaiveDateTime, + pub updated_at: NaiveDateTime, + pub number_of_dependent_jobs: Option, + pub protocol_version: Option, + pub picked_by: Option, +} + +pub struct SchedulerWitnessGeneratorJobInfo { + pub l1_batch_number: L1BatchNumber, + pub scheduler_partial_input_blob_url: String, + pub status: WitnessJobStatus, + pub processing_started_at: Option, + pub time_taken: Option, + pub error: Option, + pub created_at: NaiveDateTime, + pub updated_at: NaiveDateTime, + pub attempts: u32, + pub protocol_version: Option, + pub picked_by: Option, +} + #[derive(Debug, EnumString, Display)] pub enum ProofCompressionJobStatus { #[strum(serialize = "queued")] diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/prover_cli/src/commands/status/batch.rs index 456d2b6e29d9..bd1b768ba27f 100644 --- a/prover/prover_cli/src/commands/status/batch.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -62,6 +62,25 @@ async fn get_batches_data(batches: Vec) -> anyhow::Result( .unwrap_or_default() } +async fn get_proof_node_witness_generator_status_for_batch<'a>( + batch_number: L1BatchNumber, + conn: &mut Connection<'a, Prover>, +) -> TaskStatus { + conn.fri_witness_generator_dal() + .get_node_witness_generator_job_for_batch(batch_number) + .await + .map(|job| TaskStatus::from(job.status)) + .unwrap_or_default() +} + +async fn get_proof_scheduler_witness_generator_status_for_batch<'a>( + batch_number: L1BatchNumber, + conn: &mut Connection<'a, Prover>, +) -> TaskStatus { + conn.fri_witness_generator_dal() + .get_scheduler_witness_generator_job_for_batch(batch_number) + .await + .map(|job| TaskStatus::from(job.status)) + .unwrap_or_default() +} + async fn get_proof_compression_job_status_for_batch<'a>( batch_number: L1BatchNumber, conn: &mut Connection<'a, Prover>, diff --git a/prover/prover_cli/src/commands/status/utils.rs b/prover/prover_cli/src/commands/status/utils.rs index e2408d9c17a9..6a0ebc1b7fb9 100644 --- a/prover/prover_cli/src/commands/status/utils.rs +++ b/prover/prover_cli/src/commands/status/utils.rs @@ -27,7 +27,7 @@ pub struct BatchData { /// The recursion tip data. pub recursion_tip: Task, /// The scheduler data. - pub scheduler: Task, + pub scheduler_witness_generator: Task, /// The compressor data. pub compressor: Task, } @@ -45,7 +45,7 @@ impl Debug for BatchData { writeln!(f, "{:?}", self.leaf_witness_generator)?; writeln!(f, "{:?}", self.node_witness_generator)?; writeln!(f, "{:?}", self.recursion_tip)?; - writeln!(f, "{:?}", self.scheduler)?; + writeln!(f, "{:?}", self.scheduler_witness_generator)?; writeln!(f, "{:?}", self.compressor) } } @@ -82,7 +82,7 @@ impl Default for BatchData { prover_jobs_status: TaskStatus::default(), }, }, - scheduler: Task::Scheduler { + scheduler_witness_generator: Task::SchedulerWitnessGenerator { status: TaskStatus::default(), aggregation_round_info: AggregationRoundInfo { round: AggregationRound::Scheduler, @@ -185,7 +185,7 @@ pub enum Task { }, /// Represents the scheduler task and its status. #[strum(to_string = "Scheduler")] - Scheduler { + SchedulerWitnessGenerator { status: TaskStatus, aggregation_round_info: AggregationRoundInfo, }, @@ -201,7 +201,7 @@ impl Task { | Task::LeafWitnessGenerator { status, .. } | Task::NodeWitnessGenerator { status, .. } | Task::RecursionTip { status, .. } - | Task::Scheduler { status, .. } + | Task::SchedulerWitnessGenerator { status, .. } | Task::Compressor(status) => status.clone(), } } @@ -224,7 +224,7 @@ impl Task { aggregation_round_info, .. } - | Task::Scheduler { + | Task::SchedulerWitnessGenerator { aggregation_round_info, .. } => Some(aggregation_round_info.round), @@ -254,7 +254,7 @@ impl Task { status, aggregation_round_info, } - | Task::Scheduler { + | Task::SchedulerWitnessGenerator { status, aggregation_round_info, } => match status { diff --git a/prover/prover_dal/.sqlx/query-285d0ff850fa5c9af36564fcb14dd8547a1ad20492ec37c3c0be5639e5d49952.json b/prover/prover_dal/.sqlx/query-285d0ff850fa5c9af36564fcb14dd8547a1ad20492ec37c3c0be5639e5d49952.json new file mode 100644 index 000000000000..415b3e31c798 --- /dev/null +++ b/prover/prover_dal/.sqlx/query-285d0ff850fa5c9af36564fcb14dd8547a1ad20492ec37c3c0be5639e5d49952.json @@ -0,0 +1,82 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n *\n FROM\n scheduler_witness_jobs_fri\n WHERE\n l1_batch_number = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "l1_batch_number", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "scheduler_partial_input_blob_url", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "status", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "processing_started_at", + "type_info": "Timestamp" + }, + { + "ordinal": 4, + "name": "time_taken", + "type_info": "Time" + }, + { + "ordinal": 5, + "name": "error", + "type_info": "Text" + }, + { + "ordinal": 6, + "name": "created_at", + "type_info": "Timestamp" + }, + { + "ordinal": 7, + "name": "updated_at", + "type_info": "Timestamp" + }, + { + "ordinal": 8, + "name": "attempts", + "type_info": "Int2" + }, + { + "ordinal": 9, + "name": "protocol_version", + "type_info": "Int4" + }, + { + "ordinal": 10, + "name": "picked_by", + "type_info": "Text" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + false, + false, + false, + true, + true, + true, + false, + false, + false, + true, + true + ] + }, + "hash": "285d0ff850fa5c9af36564fcb14dd8547a1ad20492ec37c3c0be5639e5d49952" +} diff --git a/prover/prover_dal/.sqlx/query-94a75b05ecbab75d6ebf39cca029bfb838c787fc58d7536f9e9976e5e515431a.json b/prover/prover_dal/.sqlx/query-94a75b05ecbab75d6ebf39cca029bfb838c787fc58d7536f9e9976e5e515431a.json new file mode 100644 index 000000000000..896f10a4ca3a --- /dev/null +++ b/prover/prover_dal/.sqlx/query-94a75b05ecbab75d6ebf39cca029bfb838c787fc58d7536f9e9976e5e515431a.json @@ -0,0 +1,106 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n *\n FROM\n node_aggregation_witness_jobs_fri\n WHERE\n l1_batch_number = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "l1_batch_number", + "type_info": "Int8" + }, + { + "ordinal": 2, + "name": "circuit_id", + "type_info": "Int2" + }, + { + "ordinal": 3, + "name": "depth", + "type_info": "Int4" + }, + { + "ordinal": 4, + "name": "status", + "type_info": "Text" + }, + { + "ordinal": 5, + "name": "attempts", + "type_info": "Int2" + }, + { + "ordinal": 6, + "name": "aggregations_url", + "type_info": "Text" + }, + { + "ordinal": 7, + "name": "processing_started_at", + "type_info": "Timestamp" + }, + { + "ordinal": 8, + "name": "time_taken", + "type_info": "Time" + }, + { + "ordinal": 9, + "name": "error", + "type_info": "Text" + }, + { + "ordinal": 10, + "name": "created_at", + "type_info": "Timestamp" + }, + { + "ordinal": 11, + "name": "updated_at", + "type_info": "Timestamp" + }, + { + "ordinal": 12, + "name": "number_of_dependent_jobs", + "type_info": "Int4" + }, + { + "ordinal": 13, + "name": "protocol_version", + "type_info": "Int4" + }, + { + "ordinal": 14, + "name": "picked_by", + "type_info": "Text" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + false, + false, + false, + false, + false, + false, + true, + true, + true, + true, + false, + false, + true, + true, + true + ] + }, + "hash": "94a75b05ecbab75d6ebf39cca029bfb838c787fc58d7536f9e9976e5e515431a" +} diff --git a/prover/prover_dal/src/fri_witness_generator_dal.rs b/prover/prover_dal/src/fri_witness_generator_dal.rs index 30d9bb21c977..62464a49d7af 100644 --- a/prover/prover_dal/src/fri_witness_generator_dal.rs +++ b/prover/prover_dal/src/fri_witness_generator_dal.rs @@ -7,7 +7,8 @@ use zksync_basic_types::{ protocol_version::ProtocolVersionId, prover_dal::{ BasicWitnessGeneratorJobInfo, JobCountStatistics, LeafAggregationJobMetadata, - LeafWitnessGeneratorJobInfo, NodeAggregationJobMetadata, StuckJobs, WitnessJobStatus, + LeafWitnessGeneratorJobInfo, NodeAggregationJobMetadata, NodeWitnessGeneratorJobInfo, + SchedulerWitnessGeneratorJobInfo, StuckJobs, WitnessJobStatus, }, L1BatchNumber, }; @@ -1223,4 +1224,73 @@ impl FriWitnessGeneratorDal<'_, '_> { number_of_basic_circuits: row.number_of_basic_circuits, }) } + + pub async fn get_node_witness_generator_job_for_batch( + &mut self, + l1_batch_number: L1BatchNumber, + ) -> Option { + sqlx::query!( + r#" + SELECT + * + FROM + node_aggregation_witness_jobs_fri + WHERE + l1_batch_number = $1 + "#, + i64::from(l1_batch_number.0) + ) + .fetch_optional(self.storage.conn()) + .await + .unwrap() + .map(|row| NodeWitnessGeneratorJobInfo { + l1_batch_number: l1_batch_number, + circuit_id: row.circuit_id as u32, + depth: row.depth as u32, + status: WitnessJobStatus::from_str(&row.status).unwrap(), + attempts: row.attempts as u32, + aggregations_url: row.aggregations_url, + processing_started_at: row.processing_started_at, + time_taken: row.time_taken, + error: row.error, + created_at: row.created_at, + updated_at: row.updated_at, + number_of_dependent_jobs: row.number_of_dependent_jobs, + protocol_version: row.protocol_version, + picked_by: row.picked_by, + }) + } + + pub async fn get_scheduler_witness_generator_job_for_batch( + &mut self, + l1_batch_number: L1BatchNumber, + ) -> Option { + sqlx::query!( + r#" + SELECT + * + FROM + scheduler_witness_jobs_fri + WHERE + l1_batch_number = $1 + "#, + i64::from(l1_batch_number.0) + ) + .fetch_optional(self.storage.conn()) + .await + .unwrap() + .map(|row| SchedulerWitnessGeneratorJobInfo { + l1_batch_number: l1_batch_number, + scheduler_partial_input_blob_url: row.scheduler_partial_input_blob_url, + status: WitnessJobStatus::from_str(&row.status).unwrap(), + processing_started_at: row.processing_started_at, + time_taken: row.time_taken, + error: row.error, + created_at: row.created_at, + updated_at: row.updated_at, + attempts: row.attempts as u32, + protocol_version: row.protocol_version, + picked_by: row.picked_by, + }) + } } From cf069b9b2def6e47e019b3dc7beeeae08e7df26c Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Tue, 30 Apr 2024 15:32:54 -0300 Subject: [PATCH 53/65] fix witnes jobs status --- Cargo.lock | 1 + core/lib/basic_types/Cargo.toml | 1 + core/lib/basic_types/src/prover_dal.rs | 31 +++++++++++-- prover/Cargo.lock | 1 + .../prover_cli/src/commands/status/batch.rs | 45 ++++++++++++------- .../prover_cli/src/commands/status/utils.rs | 25 +++++++++++ .../src/fri_witness_generator_dal.rs | 44 ++++++++++-------- 7 files changed, 110 insertions(+), 38 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a1d3481d0009..327543d495ae 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8124,6 +8124,7 @@ dependencies = [ "num_enum 0.7.2", "serde", "serde_json", + "sqlx", "strum", "web3", ] diff --git a/core/lib/basic_types/Cargo.toml b/core/lib/basic_types/Cargo.toml index 13ce9cc357d7..ef6733b0ef19 100644 --- a/core/lib/basic_types/Cargo.toml +++ b/core/lib/basic_types/Cargo.toml @@ -20,3 +20,4 @@ serde_json.workspace = true chrono.workspace = true strum = { workspace = true, features = ["derive"] } num_enum.workspace = true +sqlx = { workspace = true, feature= ["derive"]} diff --git a/core/lib/basic_types/src/prover_dal.rs b/core/lib/basic_types/src/prover_dal.rs index 0e5963801082..d81f45e31231 100644 --- a/core/lib/basic_types/src/prover_dal.rs +++ b/core/lib/basic_types/src/prover_dal.rs @@ -124,7 +124,7 @@ pub struct ProverJobStatusInProgress { pub started_at: DateTime, } -#[derive(Debug)] +#[derive(Debug, Clone)] pub struct WitnessJobStatusSuccessful { pub started_at: DateTime, pub time_taken: Duration, @@ -139,7 +139,7 @@ impl Default for WitnessJobStatusSuccessful { } } -#[derive(Debug, Default)] +#[derive(Debug, Default, Clone)] pub struct WitnessJobStatusFailed { pub started_at: DateTime, pub error: String, @@ -161,7 +161,7 @@ pub enum ProverJobStatus { Ignored, } -#[derive(Debug, strum::Display, strum::EnumString, strum::AsRefStr)] +#[derive(Debug, Clone, strum::Display, strum::EnumString, strum::AsRefStr)] pub enum WitnessJobStatus { #[strum(serialize = "failed")] Failed(WitnessJobStatusFailed), @@ -273,7 +273,31 @@ pub struct BasicWitnessGeneratorJobInfo { pub eip_4844_blobs: Option, } +// impl FromRow<'_, R> for BasicWitnessGeneratorJobInfo { +// fn from_row(row: &R) -> sqlx::Result { +// let l1_batch_number: i32 = row.try_get("l1_batch_number")?; +// let attempts: i32 = row.try_get("attempts")?; +// Ok(Self { +// l1_batch_number: L1BatchNumber(l1_batch_number as u32), +// merkle_tree_paths_blob_url: row.try_get("merkle_tree_paths_blob_url")?, +// attempts: attempts as u32, +// status: WitnessJobStatus::from_str(row.try_get("status")?).unwrap(), +// error: row.try_get("error")?, +// created_at: row.try_get("created_at")?, +// updated_at: row.try_get("updated_at")?, +// processing_started_at: row.try_get("processing_started_at")?, +// time_taken: row.try_get("time_taken")?, +// is_blob_cleaned: row.try_get("is_blob_cleaned")?, +// protocol_version: row.try_get("protocol_version")?, +// picked_by: row.try_get("picked_by")?, +// eip_4844_blobs: +// Some(Eip4844Blobs::from(row.try_get::, &str>("eip_4844_blobs")?)) +// }) +// } +// } + pub struct LeafWitnessGeneratorJobInfo { + pub id: u32, pub l1_batch_number: L1BatchNumber, pub circuit_id: u32, pub closed_form_inputs_blob_url: Option, @@ -291,6 +315,7 @@ pub struct LeafWitnessGeneratorJobInfo { } pub struct NodeWitnessGeneratorJobInfo { + pub id: u32, pub l1_batch_number: L1BatchNumber, pub circuit_id: u32, pub depth: u32, diff --git a/prover/Cargo.lock b/prover/Cargo.lock index e690f7ddf1fe..3edd696f5013 100644 --- a/prover/Cargo.lock +++ b/prover/Cargo.lock @@ -7516,6 +7516,7 @@ dependencies = [ "num_enum 0.7.2", "serde", "serde_json", + "sqlx", "strum", "web3", ] diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/prover_cli/src/commands/status/batch.rs index bd1b768ba27f..0b11151e53e5 100644 --- a/prover/prover_cli/src/commands/status/batch.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -1,7 +1,9 @@ use anyhow::{ensure, Context as _}; use clap::Args as ClapArgs; use prover_dal::{Connection, ConnectionPool, Prover, ProverDal}; -use zksync_types::{basic_fri_types::AggregationRound, L1BatchNumber}; +use zksync_types::{ + basic_fri_types::AggregationRound, prover_dal::WitnessJobStatus, L1BatchNumber, +}; use super::utils::{AggregationRoundInfo, BatchData, Task, TaskStatus}; use crate::commands::status::utils::postgres_config; @@ -62,21 +64,21 @@ async fn get_batches_data(batches: Vec) -> anyhow::Result( batch_number: L1BatchNumber, conn: &mut Connection<'a, Prover>, ) -> TaskStatus { - conn.fri_witness_generator_dal() - .get_leaf_witness_generator_job_for_batch(batch_number) + let status_vec: Vec = conn + .fri_witness_generator_dal() + .get_leaf_witness_generator_jobs_for_batch(batch_number) .await - .map(|job| TaskStatus::from(job.status)) - .unwrap_or_default() + .iter() + .map(|s| s.status.clone()) + .collect(); + TaskStatus::from(status_vec) } async fn get_proof_node_witness_generator_status_for_batch<'a>( batch_number: L1BatchNumber, conn: &mut Connection<'a, Prover>, ) -> TaskStatus { - conn.fri_witness_generator_dal() - .get_node_witness_generator_job_for_batch(batch_number) + let status_vec: Vec = conn + .fri_witness_generator_dal() + .get_node_witness_generator_jobs_for_batch(batch_number) .await - .map(|job| TaskStatus::from(job.status)) - .unwrap_or_default() + .iter() + .map(|s| s.status.clone()) + .collect(); + TaskStatus::from(status_vec) } async fn get_proof_scheduler_witness_generator_status_for_batch<'a>( batch_number: L1BatchNumber, conn: &mut Connection<'a, Prover>, ) -> TaskStatus { - conn.fri_witness_generator_dal() - .get_scheduler_witness_generator_job_for_batch(batch_number) + let status_vec: Vec = conn + .fri_witness_generator_dal() + .get_scheduler_witness_generator_jobs_for_batch(batch_number) .await - .map(|job| TaskStatus::from(job.status)) - .unwrap_or_default() + .iter() + .map(|s| s.status.clone()) + .collect(); + TaskStatus::from(status_vec) } async fn get_proof_compression_job_status_for_batch<'a>( diff --git a/prover/prover_cli/src/commands/status/utils.rs b/prover/prover_cli/src/commands/status/utils.rs index 6a0ebc1b7fb9..73daffbba61a 100644 --- a/prover/prover_cli/src/commands/status/utils.rs +++ b/prover/prover_cli/src/commands/status/utils.rs @@ -157,6 +157,31 @@ impl From for TaskStatus { } } +impl From> for TaskStatus { + fn from(status_vector: Vec) -> Self { + if status_vector.is_empty() { + TaskStatus::JobsNotFound + } else if status_vector + .iter() + .all(|job| matches!(job, WitnessJobStatus::Queued)) + { + TaskStatus::Queued + } else if status_vector + .iter() + .all(|job| matches!(job, WitnessJobStatus::WaitingForProofs)) + { + TaskStatus::WaitingForProofs + } else if status_vector + .iter() + .all(|job| matches!(job, WitnessJobStatus::InProgress)) + { + TaskStatus::Successful + } else { + TaskStatus::InProgress + } + } +} + #[derive(EnumString, Clone, Display)] pub enum Task { /// Represents the basic witness generator task and its status. diff --git a/prover/prover_dal/src/fri_witness_generator_dal.rs b/prover/prover_dal/src/fri_witness_generator_dal.rs index 62464a49d7af..57f14c0742dc 100644 --- a/prover/prover_dal/src/fri_witness_generator_dal.rs +++ b/prover/prover_dal/src/fri_witness_generator_dal.rs @@ -1189,10 +1189,10 @@ impl FriWitnessGeneratorDal<'_, '_> { }) } - pub async fn get_leaf_witness_generator_job_for_batch( + pub async fn get_leaf_witness_generator_jobs_for_batch( &mut self, l1_batch_number: L1BatchNumber, - ) -> Option { + ) -> Vec { sqlx::query!( r#" SELECT @@ -1204,31 +1204,34 @@ impl FriWitnessGeneratorDal<'_, '_> { "#, i64::from(l1_batch_number.0) ) - .fetch_optional(self.storage.conn()) + .fetch_all(self.storage.conn()) .await .unwrap() + .iter() .map(|row| LeafWitnessGeneratorJobInfo { + id: row.id as u32, l1_batch_number: l1_batch_number, circuit_id: row.circuit_id as u32, - closed_form_inputs_blob_url: row.closed_form_inputs_blob_url, + closed_form_inputs_blob_url: row.closed_form_inputs_blob_url.clone(), attempts: row.attempts as u32, status: WitnessJobStatus::from_str(&row.status).unwrap(), - error: row.error, + error: row.error.clone(), created_at: row.created_at, updated_at: row.updated_at, processing_started_at: row.processing_started_at, time_taken: row.time_taken, is_blob_cleaned: row.is_blob_cleaned, protocol_version: row.protocol_version, - picked_by: row.picked_by, + picked_by: row.picked_by.clone(), number_of_basic_circuits: row.number_of_basic_circuits, }) + .collect() } - pub async fn get_node_witness_generator_job_for_batch( + pub async fn get_node_witness_generator_jobs_for_batch( &mut self, l1_batch_number: L1BatchNumber, - ) -> Option { + ) -> Vec { sqlx::query!( r#" SELECT @@ -1240,31 +1243,34 @@ impl FriWitnessGeneratorDal<'_, '_> { "#, i64::from(l1_batch_number.0) ) - .fetch_optional(self.storage.conn()) + .fetch_all(self.storage.conn()) .await .unwrap() + .iter() .map(|row| NodeWitnessGeneratorJobInfo { + id: row.id as u32, l1_batch_number: l1_batch_number, circuit_id: row.circuit_id as u32, depth: row.depth as u32, status: WitnessJobStatus::from_str(&row.status).unwrap(), attempts: row.attempts as u32, - aggregations_url: row.aggregations_url, + aggregations_url: row.aggregations_url.clone(), processing_started_at: row.processing_started_at, time_taken: row.time_taken, - error: row.error, + error: row.error.clone(), created_at: row.created_at, updated_at: row.updated_at, number_of_dependent_jobs: row.number_of_dependent_jobs, protocol_version: row.protocol_version, - picked_by: row.picked_by, + picked_by: row.picked_by.clone(), }) + .collect() } - pub async fn get_scheduler_witness_generator_job_for_batch( + pub async fn get_scheduler_witness_generator_jobs_for_batch( &mut self, l1_batch_number: L1BatchNumber, - ) -> Option { + ) -> Vec { sqlx::query!( r#" SELECT @@ -1276,21 +1282,23 @@ impl FriWitnessGeneratorDal<'_, '_> { "#, i64::from(l1_batch_number.0) ) - .fetch_optional(self.storage.conn()) + .fetch_all(self.storage.conn()) .await .unwrap() + .iter() .map(|row| SchedulerWitnessGeneratorJobInfo { l1_batch_number: l1_batch_number, - scheduler_partial_input_blob_url: row.scheduler_partial_input_blob_url, + scheduler_partial_input_blob_url: row.scheduler_partial_input_blob_url.clone(), status: WitnessJobStatus::from_str(&row.status).unwrap(), processing_started_at: row.processing_started_at, time_taken: row.time_taken, - error: row.error, + error: row.error.clone(), created_at: row.created_at, updated_at: row.updated_at, attempts: row.attempts as u32, protocol_version: row.protocol_version, - picked_by: row.picked_by, + picked_by: row.picked_by.clone(), }) + .collect() } } From 6ad55660dd55ee75e1985c0ffa6aa05e3a22310c Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Tue, 30 Apr 2024 15:35:15 -0300 Subject: [PATCH 54/65] rm comments --- core/lib/basic_types/src/prover_dal.rs | 23 ----------------------- 1 file changed, 23 deletions(-) diff --git a/core/lib/basic_types/src/prover_dal.rs b/core/lib/basic_types/src/prover_dal.rs index d81f45e31231..dc106da18a7b 100644 --- a/core/lib/basic_types/src/prover_dal.rs +++ b/core/lib/basic_types/src/prover_dal.rs @@ -273,29 +273,6 @@ pub struct BasicWitnessGeneratorJobInfo { pub eip_4844_blobs: Option, } -// impl FromRow<'_, R> for BasicWitnessGeneratorJobInfo { -// fn from_row(row: &R) -> sqlx::Result { -// let l1_batch_number: i32 = row.try_get("l1_batch_number")?; -// let attempts: i32 = row.try_get("attempts")?; -// Ok(Self { -// l1_batch_number: L1BatchNumber(l1_batch_number as u32), -// merkle_tree_paths_blob_url: row.try_get("merkle_tree_paths_blob_url")?, -// attempts: attempts as u32, -// status: WitnessJobStatus::from_str(row.try_get("status")?).unwrap(), -// error: row.try_get("error")?, -// created_at: row.try_get("created_at")?, -// updated_at: row.try_get("updated_at")?, -// processing_started_at: row.try_get("processing_started_at")?, -// time_taken: row.try_get("time_taken")?, -// is_blob_cleaned: row.try_get("is_blob_cleaned")?, -// protocol_version: row.try_get("protocol_version")?, -// picked_by: row.try_get("picked_by")?, -// eip_4844_blobs: -// Some(Eip4844Blobs::from(row.try_get::, &str>("eip_4844_blobs")?)) -// }) -// } -// } - pub struct LeafWitnessGeneratorJobInfo { pub id: u32, pub l1_batch_number: L1BatchNumber, From a3f1938a9a0aef5829f81ad14ed7a8758684f701 Mon Sep 17 00:00:00 2001 From: ilitteri Date: Tue, 30 Apr 2024 15:56:08 -0300 Subject: [PATCH 55/65] Fix & refactors --- .../prover_cli/src/commands/status/batch.rs | 38 +++++++++---------- 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/prover_cli/src/commands/status/batch.rs index 0b11151e53e5..661c2eef65c6 100644 --- a/prover/prover_cli/src/commands/status/batch.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -40,7 +40,10 @@ async fn get_batches_data(batches: Vec) -> anyhow::Result) -> anyhow::Result) -> anyhow::Result) -> anyhow::Result) -> anyhow::Result) -> anyhow::Result( +async fn get_aggregation_round_info_for_batch<'a>( batch_number: L1BatchNumber, aggregation_round: AggregationRound, conn: &mut Connection<'a, Prover>, @@ -106,7 +109,7 @@ async fn get_prover_jobs_data_for_batch<'a>( .into(); AggregationRoundInfo { - round: AggregationRound::BasicCircuits, + round: aggregation_round, prover_jobs_status: status, } } @@ -126,42 +129,39 @@ async fn get_proof_leaf_witness_generator_status_for_batch<'a>( batch_number: L1BatchNumber, conn: &mut Connection<'a, Prover>, ) -> TaskStatus { - let status_vec: Vec = conn - .fri_witness_generator_dal() + conn.fri_witness_generator_dal() .get_leaf_witness_generator_jobs_for_batch(batch_number) .await .iter() .map(|s| s.status.clone()) - .collect(); - TaskStatus::from(status_vec) + .collect::>() + .into() } async fn get_proof_node_witness_generator_status_for_batch<'a>( batch_number: L1BatchNumber, conn: &mut Connection<'a, Prover>, ) -> TaskStatus { - let status_vec: Vec = conn - .fri_witness_generator_dal() + conn.fri_witness_generator_dal() .get_node_witness_generator_jobs_for_batch(batch_number) .await .iter() .map(|s| s.status.clone()) - .collect(); - TaskStatus::from(status_vec) + .collect::>() + .into() } async fn get_proof_scheduler_witness_generator_status_for_batch<'a>( batch_number: L1BatchNumber, conn: &mut Connection<'a, Prover>, ) -> TaskStatus { - let status_vec: Vec = conn - .fri_witness_generator_dal() + conn.fri_witness_generator_dal() .get_scheduler_witness_generator_jobs_for_batch(batch_number) .await .iter() .map(|s| s.status.clone()) - .collect(); - TaskStatus::from(status_vec) + .collect::>() + .into() } async fn get_proof_compression_job_status_for_batch<'a>( From 78c7cd8c7b220a76f68ac56c56a1019a01a9821e Mon Sep 17 00:00:00 2001 From: ilitteri Date: Tue, 30 Apr 2024 16:07:00 -0300 Subject: [PATCH 56/65] zk lint rust --- prover/prover_dal/src/fri_prover_dal.rs | 4 ++-- prover/prover_dal/src/fri_witness_generator_dal.rs | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/prover/prover_dal/src/fri_prover_dal.rs b/prover/prover_dal/src/fri_prover_dal.rs index a122665853f7..62890236b993 100644 --- a/prover/prover_dal/src/fri_prover_dal.rs +++ b/prover/prover_dal/src/fri_prover_dal.rs @@ -622,10 +622,10 @@ impl FriProverDal<'_, '_> { .iter() .map(|row| ProverJobFriInfo { id: row.id as u32, - l1_batch_number: l1_batch_number, + l1_batch_number, circuit_id: row.circuit_id as u32, circuit_blob_url: row.circuit_blob_url.clone(), - aggregation_round: aggregation_round, + aggregation_round, sequence_number: row.sequence_number as u32, status: ProverJobStatus::from_str(&row.status).unwrap(), error: row.error.clone(), diff --git a/prover/prover_dal/src/fri_witness_generator_dal.rs b/prover/prover_dal/src/fri_witness_generator_dal.rs index 2662183d22bf..2740ca609d1c 100644 --- a/prover/prover_dal/src/fri_witness_generator_dal.rs +++ b/prover/prover_dal/src/fri_witness_generator_dal.rs @@ -1172,7 +1172,7 @@ impl FriWitnessGeneratorDal<'_, '_> { .await .unwrap() .map(|row| BasicWitnessGeneratorJobInfo { - l1_batch_number: l1_batch_number, + l1_batch_number, merkle_tree_paths_blob_url: row.merkle_tree_paths_blob_url, attempts: row.attempts as u32, status: WitnessJobStatus::from_str(&row.status).unwrap(), @@ -1184,7 +1184,7 @@ impl FriWitnessGeneratorDal<'_, '_> { is_blob_cleaned: row.is_blob_cleaned, protocol_version: row.protocol_version, picked_by: row.picked_by, - eip_4844_blobs: row.eip_4844_blobs.map(|vec_u8| Eip4844Blobs::from(vec_u8)), + eip_4844_blobs: row.eip_4844_blobs.map(Eip4844Blobs::from), }) } } From 33729b8b2baf139af81cf0b9609ee771fd3bc7d2 Mon Sep 17 00:00:00 2001 From: ilitteri Date: Tue, 30 Apr 2024 16:09:31 -0300 Subject: [PATCH 57/65] Fix import --- prover/prover_fri_gateway/src/proof_submitter.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/prover/prover_fri_gateway/src/proof_submitter.rs b/prover/prover_fri_gateway/src/proof_submitter.rs index 1c5850d31a0b..025d79e2f8c3 100644 --- a/prover/prover_fri_gateway/src/proof_submitter.rs +++ b/prover/prover_fri_gateway/src/proof_submitter.rs @@ -1,7 +1,7 @@ use async_trait::async_trait; -use prover_dal::{fri_proof_compressor_dal::ProofCompressionJobStatus, ProverDal}; +use prover_dal::ProverDal; use zksync_prover_interface::api::{SubmitProofRequest, SubmitProofResponse}; -use zksync_types::L1BatchNumber; +use zksync_types::{prover_dal::ProofCompressionJobStatus, L1BatchNumber}; use crate::api_data_fetcher::{PeriodicApi, PeriodicApiStruct}; From 112ae934e763215775f33a23a61b15657da9c2bc Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Tue, 30 Apr 2024 16:12:15 -0300 Subject: [PATCH 58/65] fix import --- prover/prover_fri_gateway/src/proof_submitter.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/prover/prover_fri_gateway/src/proof_submitter.rs b/prover/prover_fri_gateway/src/proof_submitter.rs index 1c5850d31a0b..025d79e2f8c3 100644 --- a/prover/prover_fri_gateway/src/proof_submitter.rs +++ b/prover/prover_fri_gateway/src/proof_submitter.rs @@ -1,7 +1,7 @@ use async_trait::async_trait; -use prover_dal::{fri_proof_compressor_dal::ProofCompressionJobStatus, ProverDal}; +use prover_dal::ProverDal; use zksync_prover_interface::api::{SubmitProofRequest, SubmitProofResponse}; -use zksync_types::L1BatchNumber; +use zksync_types::{prover_dal::ProofCompressionJobStatus, L1BatchNumber}; use crate::api_data_fetcher::{PeriodicApi, PeriodicApiStruct}; From e1854fa6e7ce89a3e9667be0afdc8afbf5b7fca4 Mon Sep 17 00:00:00 2001 From: ilitteri Date: Tue, 30 Apr 2024 16:12:49 -0300 Subject: [PATCH 59/65] zk lint rust --- prover/prover_dal/src/fri_witness_generator_dal.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/prover/prover_dal/src/fri_witness_generator_dal.rs b/prover/prover_dal/src/fri_witness_generator_dal.rs index a65e88b7c325..c336084e7baa 100644 --- a/prover/prover_dal/src/fri_witness_generator_dal.rs +++ b/prover/prover_dal/src/fri_witness_generator_dal.rs @@ -1210,7 +1210,7 @@ impl FriWitnessGeneratorDal<'_, '_> { .iter() .map(|row| LeafWitnessGeneratorJobInfo { id: row.id as u32, - l1_batch_number: l1_batch_number, + l1_batch_number, circuit_id: row.circuit_id as u32, closed_form_inputs_blob_url: row.closed_form_inputs_blob_url.clone(), attempts: row.attempts as u32, @@ -1249,7 +1249,7 @@ impl FriWitnessGeneratorDal<'_, '_> { .iter() .map(|row| NodeWitnessGeneratorJobInfo { id: row.id as u32, - l1_batch_number: l1_batch_number, + l1_batch_number, circuit_id: row.circuit_id as u32, depth: row.depth as u32, status: WitnessJobStatus::from_str(&row.status).unwrap(), @@ -1287,7 +1287,7 @@ impl FriWitnessGeneratorDal<'_, '_> { .unwrap() .iter() .map(|row| SchedulerWitnessGeneratorJobInfo { - l1_batch_number: l1_batch_number, + l1_batch_number, scheduler_partial_input_blob_url: row.scheduler_partial_input_blob_url.clone(), status: WitnessJobStatus::from_str(&row.status).unwrap(), processing_started_at: row.processing_started_at, From fa26c26aee21eca4fae8e13fedbb2b5fe268cefb Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Tue, 30 Apr 2024 17:26:33 -0300 Subject: [PATCH 60/65] fix wg task types --- prover/prover_cli/src/commands/status/batch.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/prover_cli/src/commands/status/batch.rs index 661c2eef65c6..c29d25dc62e5 100644 --- a/prover/prover_cli/src/commands/status/batch.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -67,7 +67,7 @@ async fn get_batches_data(batches: Vec) -> anyhow::Result) -> anyhow::Result Date: Tue, 30 Apr 2024 18:02:04 -0300 Subject: [PATCH 61/65] update README --- prover/prover_cli/README.md | 61 ++++++++++++++++++++++--------------- 1 file changed, 36 insertions(+), 25 deletions(-) diff --git a/prover/prover_cli/README.md b/prover/prover_cli/README.md index 99869b36bf25..f773bf60dcd8 100644 --- a/prover/prover_cli/README.md +++ b/prover/prover_cli/README.md @@ -1,6 +1,4 @@ -# CLI to better understand and debug provers - -## Usage +# Usage > Note: For now, its necessary to use the 'zk f' tool to set up the environment. The main command will later be changed > to `pli`. @@ -10,7 +8,7 @@ Usage: zk f cargo run --release -- Commands: file-info - status-jobs + status help Print this message or the help of the given subcommand(s) Options: @@ -18,35 +16,48 @@ Options: -V, --version Print version ``` -### Status-jobs +## Status -You can get the progress for some batch proof, for a bunch of batches the `status-jobs` command: +### Status batch -```bash -# Displays the proof progress of the batch 1 - zk f cargo run -- status-jobs --batch 1 -# Displays the proof progress of the batches 1 and 2 - zk f cargo run -- status-jobs --batch 1 2 -# Displays the proof progress of the batch 3, with additional information - zk f cargo run -- status-jobs --batch 3 --verbose -``` +Displays the proof status for a given batch or a set of batches. Example: ```bash -$ zk f cargo run -- status-jobs --batch 1 --verbose - -Batch number: 1 -Progress: 34.88% (45/129) -In progress: 1 -Queued: 83 -Successful: 45 -Failed: 0 +$ zk f run --release -- status batch -n 1 + +== Batch 1 Status == +> In Progress ⌛️ + +== Proving Stages == +-- Aggregaton Round 0 -- +Basic Witness Generator: Done ✅ +> Prover Jobs: In progress ⌛️ + +-- Aggregaton Round 1 -- +Leaf Witness Generator: In progress ⌛️ +> Prover Jobs: Waiting for proofs ⏱️ + +-- Aggregaton Round 2 -- +Node Witness Generator: In progress ⌛️ +> Prover Jobs: Waiting for proofs ⏱️ + +-- Aggregaton Round 3 -- +Recursion Tip: In progress ⌛️ +> Prover Jobs: Waiting for proofs ⏱️ + +-- Aggregaton Round 4 -- +Scheduler: In progress ⌛️ +> Prover Jobs: Waiting for proofs ⏱️ + +-- Compressor -- +> Compressor job not found 🚫 ``` -### File-Info +## File-Info -Displays the information about a given file: +Displays de information about a given file: ```bash cargo run -- file-info --file-path /zksync-era/prover/artifacts/proofs_fri/l1_batch_proof_1.bin @@ -80,4 +91,4 @@ Previous block meta hash: [63, 236, 0, 236, 23, 236, 175, 242, 75, 187, 203, 193 Previous block aux hash: [200, 12, 70, 33, 103, 13, 251, 174, 96, 165, 135, 138, 34, 75, 249, 81, 93, 86, 110, 52, 30, 172, 198, 51, 155, 82, 86, 137, 156, 215, 11, 119] EIP 4844 - witnesses: None EIP 4844 - proofs: 0 -``` +``` \ No newline at end of file From 10fdcb9345794294b39d2bad07f640b1822e6080 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Tue, 30 Apr 2024 18:44:51 -0300 Subject: [PATCH 62/65] fix EIP Blob use --- prover/prover_cli/README.md | 2 +- prover/prover_dal/src/fri_prover_dal.rs | 3 +-- prover/prover_dal/src/fri_witness_generator_dal.rs | 7 ++++++- 3 files changed, 8 insertions(+), 4 deletions(-) diff --git a/prover/prover_cli/README.md b/prover/prover_cli/README.md index f773bf60dcd8..6296f63b0b18 100644 --- a/prover/prover_cli/README.md +++ b/prover/prover_cli/README.md @@ -91,4 +91,4 @@ Previous block meta hash: [63, 236, 0, 236, 23, 236, 175, 242, 75, 187, 203, 193 Previous block aux hash: [200, 12, 70, 33, 103, 13, 251, 174, 96, 165, 135, 138, 34, 75, 249, 81, 93, 86, 110, 52, 30, 172, 198, 51, 155, 82, 86, 137, 156, 215, 11, 119] EIP 4844 - witnesses: None EIP 4844 - proofs: 0 -``` \ No newline at end of file +``` diff --git a/prover/prover_dal/src/fri_prover_dal.rs b/prover/prover_dal/src/fri_prover_dal.rs index 3c23f7fc6ac1..df0525cd9774 100644 --- a/prover/prover_dal/src/fri_prover_dal.rs +++ b/prover/prover_dal/src/fri_prover_dal.rs @@ -5,8 +5,7 @@ use zksync_basic_types::{ basic_fri_types::{AggregationRound, CircuitIdRoundTuple}, protocol_version::ProtocolVersionId, prover_dal::{ - FriProverJobMetadata, JobCountStatistics, ProverJobFriInfo, ProverJobStatus, StuckJobs, - EIP_4844_CIRCUIT_ID, + z, FriProverJobMetadata, JobCountStatistics, ProverJobFriInfo, ProverJobStatus, StuckJobs, }, L1BatchNumber, }; diff --git a/prover/prover_dal/src/fri_witness_generator_dal.rs b/prover/prover_dal/src/fri_witness_generator_dal.rs index e74e9541bed8..947adf535db1 100644 --- a/prover/prover_dal/src/fri_witness_generator_dal.rs +++ b/prover/prover_dal/src/fri_witness_generator_dal.rs @@ -1429,7 +1429,12 @@ impl FriWitnessGeneratorDal<'_, '_> { is_blob_cleaned: row.is_blob_cleaned, protocol_version: row.protocol_version, picked_by: row.picked_by, - eip_4844_blobs: row.eip_4844_blobs.map(Eip4844Blobs::from), + eip_4844_blobs: row + .eip_4844_blobs + .as_deref() + .map(Eip4844Blobs::decode) + .transpose() + .unwrap(), }) } From 8a1101f815abd4c0858c2b79e0a2957120d3215c Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Tue, 30 Apr 2024 19:25:25 -0300 Subject: [PATCH 63/65] fix typos --- prover/prover_cli/README.md | 2 +- prover/prover_dal/src/fri_prover_dal.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/prover/prover_cli/README.md b/prover/prover_cli/README.md index 6296f63b0b18..ceef016a1d2a 100644 --- a/prover/prover_cli/README.md +++ b/prover/prover_cli/README.md @@ -57,7 +57,7 @@ Scheduler: In progress ⌛️ ## File-Info -Displays de information about a given file: +Displays the information about a given file: ```bash cargo run -- file-info --file-path /zksync-era/prover/artifacts/proofs_fri/l1_batch_proof_1.bin diff --git a/prover/prover_dal/src/fri_prover_dal.rs b/prover/prover_dal/src/fri_prover_dal.rs index df0525cd9774..942d1c06612d 100644 --- a/prover/prover_dal/src/fri_prover_dal.rs +++ b/prover/prover_dal/src/fri_prover_dal.rs @@ -5,7 +5,7 @@ use zksync_basic_types::{ basic_fri_types::{AggregationRound, CircuitIdRoundTuple}, protocol_version::ProtocolVersionId, prover_dal::{ - z, FriProverJobMetadata, JobCountStatistics, ProverJobFriInfo, ProverJobStatus, StuckJobs, + FriProverJobMetadata, JobCountStatistics, ProverJobFriInfo, ProverJobStatus, StuckJobs, }, L1BatchNumber, }; From 1bbde9345b797779a132b5507f253e179eeb66aa Mon Sep 17 00:00:00 2001 From: ilitteri Date: Thu, 2 May 2024 09:33:42 -0300 Subject: [PATCH 64/65] Fix typo --- prover/prover_cli/README.md | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/prover/prover_cli/README.md b/prover/prover_cli/README.md index ceef016a1d2a..48ca52bdde13 100644 --- a/prover/prover_cli/README.md +++ b/prover/prover_cli/README.md @@ -31,23 +31,23 @@ $ zk f run --release -- status batch -n 1 > In Progress ⌛️ == Proving Stages == --- Aggregaton Round 0 -- +-- Aggregation Round 0 -- Basic Witness Generator: Done ✅ > Prover Jobs: In progress ⌛️ --- Aggregaton Round 1 -- +-- Aggregation Round 1 -- Leaf Witness Generator: In progress ⌛️ > Prover Jobs: Waiting for proofs ⏱️ --- Aggregaton Round 2 -- +-- Aggregation Round 2 -- Node Witness Generator: In progress ⌛️ > Prover Jobs: Waiting for proofs ⏱️ --- Aggregaton Round 3 -- +-- Aggregation Round 3 -- Recursion Tip: In progress ⌛️ > Prover Jobs: Waiting for proofs ⏱️ --- Aggregaton Round 4 -- +-- Aggregation Round 4 -- Scheduler: In progress ⌛️ > Prover Jobs: Waiting for proofs ⏱️ From 6f45f43b381a4310bb47cbd2cc86783990960d3b Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Thu, 2 May 2024 18:03:19 -0300 Subject: [PATCH 65/65] update cargo.toml --- Cargo.lock | 1 - core/lib/basic_types/Cargo.toml | 1 - prover/Cargo.lock | 1 - 3 files changed, 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index f41df0e2f4aa..6e6786a5930b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8120,7 +8120,6 @@ dependencies = [ "num_enum 0.7.2", "serde", "serde_json", - "sqlx", "strum", "web3", ] diff --git a/core/lib/basic_types/Cargo.toml b/core/lib/basic_types/Cargo.toml index dd5b3c743144..ad3b4c9e5cd8 100644 --- a/core/lib/basic_types/Cargo.toml +++ b/core/lib/basic_types/Cargo.toml @@ -16,7 +16,6 @@ serde_json.workspace = true chrono.workspace = true strum = { workspace = true, features = ["derive"] } num_enum.workspace = true -sqlx = { workspace = true, feature= ["derive"]} anyhow.workspace = true [dev-dependencies] diff --git a/prover/Cargo.lock b/prover/Cargo.lock index 23ed1e37d2e9..77bea744d2e5 100644 --- a/prover/Cargo.lock +++ b/prover/Cargo.lock @@ -7515,7 +7515,6 @@ dependencies = [ "num_enum 0.7.2", "serde", "serde_json", - "sqlx", "strum", "web3", ]