diff --git a/Cargo.toml b/Cargo.toml index 3b716e512..11f325628 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -58,11 +58,12 @@ serde_json = { version = "1", default-features = false, features = ["alloc"] } sha2 = "0.10.8" snafu = { version = "0.8.4", default-features = false } sqlparser = { version = "0.45.0", default-features = false } +sysinfo = { version = "0.33" } tiny-keccak = { version = "2.0.2", features = [ "keccak" ] } tempfile = "3.13.0" tracing = { version = "0.1.36", default-features = false } tracing-opentelemetry = { version = "0.22.0" } -tracing-subscriber = { version = "0.3.0" } +tracing-subscriber = { version = "0.3.0", features = ["env-filter"] } wasm-bindgen = { version = "0.2.92" } zerocopy = { version = "0.7.34" } diff --git a/crates/proof-of-sql/Cargo.toml b/crates/proof-of-sql/Cargo.toml index eeeccfa8f..0e5edc2ba 100644 --- a/crates/proof-of-sql/Cargo.toml +++ b/crates/proof-of-sql/Cargo.toml @@ -51,6 +51,7 @@ serde_json = { workspace = true } sha2 = { workspace = true } snafu = { workspace = true } sqlparser = { workspace = true, features = ["serde"] } +sysinfo = {workspace = true } tiny-keccak = { workspace = true } tracing = { workspace = true, features = ["attributes"] } zerocopy = { workspace = true } diff --git a/crates/proof-of-sql/benches/README.md b/crates/proof-of-sql/benches/README.md index 61440e1b6..62a6e8603 100644 --- a/crates/proof-of-sql/benches/README.md +++ b/crates/proof-of-sql/benches/README.md @@ -20,6 +20,15 @@ To run benchmarks with Jaeger, you need to do the following docker kill jaeger ``` +### Memory logging (optional) + +Jaeger benchmarks default to logging any traces at `DEBUG` level and above. Memory consumption is logged at `TRACE` level. In order to capture memory consumption in the Jaeger benchmarks, add `RUST_LOG=trace` to the command. + +Example +``` +RUST_LOG=trace cargo bench -p proof-of-sql --bench jaeger_benches DynamicDory +``` + ## Criterion benchmarking To run benchmarks with Criterion, you need to do the following diff --git a/crates/proof-of-sql/benches/jaeger_benches.rs b/crates/proof-of-sql/benches/jaeger_benches.rs index 7bdd9aa3d..4ef307148 100644 --- a/crates/proof-of-sql/benches/jaeger_benches.rs +++ b/crates/proof-of-sql/benches/jaeger_benches.rs @@ -24,14 +24,21 @@ const SIZE: usize = 1_000_000; #[allow(clippy::items_after_statements)] fn main() { init_backend(); - use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt}; + + use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt, EnvFilter}; + let tracer = opentelemetry_jaeger::new_agent_pipeline() .with_service_name("benches") .install_simple() .unwrap(); + let opentelemetry = tracing_opentelemetry::layer().with_tracer(tracer); + + let filter = EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::new("DEBUG")); + tracing_subscriber::registry() .with(opentelemetry) + .with(filter) .try_init() .unwrap(); diff --git a/crates/proof-of-sql/src/base/polynomial/evaluation_vector.rs b/crates/proof-of-sql/src/base/polynomial/evaluation_vector.rs index 66af38721..6dc11235b 100644 --- a/crates/proof-of-sql/src/base/polynomial/evaluation_vector.rs +++ b/crates/proof-of-sql/src/base/polynomial/evaluation_vector.rs @@ -1,4 +1,4 @@ -use crate::base::if_rayon; +use crate::{base::if_rayon, utils::log}; use core::{ cmp, ops::{Mul, MulAssign, Sub, SubAssign}, @@ -43,6 +43,8 @@ pub fn compute_evaluation_vector(v: &mut [F], point: &[F]) where F: One + Sub + MulAssign + SubAssign + Mul + Send + Sync + Copy, { + log::log_memory_usage("Start"); + assert!(v.len() <= (1 << point.len())); if point.is_empty() || v.is_empty() { // v is guaranteed to be at most length 1 by the assert!. @@ -62,4 +64,6 @@ where }; compute_evaluation_vector_impl(left, right, *p); } + + log::log_memory_usage("End"); } diff --git a/crates/proof-of-sql/src/proof_primitive/dory/blitzar_metadata_table.rs b/crates/proof-of-sql/src/proof_primitive/dory/blitzar_metadata_table.rs index a957f7546..a79973857 100644 --- a/crates/proof-of-sql/src/proof_primitive/dory/blitzar_metadata_table.rs +++ b/crates/proof-of-sql/src/proof_primitive/dory/blitzar_metadata_table.rs @@ -7,6 +7,7 @@ use crate::{ full_width_of_row, index_from_row_and_column, matrix_size, row_and_column_from_index, }, }, + utils::log, }; use alloc::{vec, vec::Vec}; use ark_ec::CurveGroup; @@ -61,7 +62,9 @@ pub fn signed_commits( return vec![]; } - if_rayon!( + log::log_memory_usage("Start"); + + let res = if_rayon!( all_sub_commits.par_chunks_exact(committable_columns.len() * 2), all_sub_commits.chunks_exact(committable_columns.len() * 2) ) @@ -80,7 +83,11 @@ pub fn signed_commits( }) .collect::>() }) - .collect() + .collect(); + + log::log_memory_usage("End"); + + res } /// Copies the column data to the scalar row slice. @@ -151,6 +158,8 @@ pub fn create_blitzar_metadata_tables( return (vec![], vec![], vec![]); } + log::log_memory_usage("Start"); + // Keep track of the lengths of the columns to handled signed data columns. let ones_columns_lengths = committable_columns .iter() @@ -218,7 +227,7 @@ pub fn create_blitzar_metadata_tables( let mut blitzar_scalars = vec![0u8; num_scalar_rows * num_scalar_columns]; // Populate the scalars array. - let span = span!(Level::INFO, "pack_blitzar_scalars").entered(); + let span = span!(Level::DEBUG, "pack_blitzar_scalars").entered(); if !blitzar_scalars.is_empty() { if_rayon!( blitzar_scalars.par_chunks_exact_mut(num_scalar_columns), @@ -269,6 +278,8 @@ pub fn create_blitzar_metadata_tables( } span.exit(); + log::log_memory_usage("End"); + ( blitzar_output_bit_table, blitzar_output_length_table, diff --git a/crates/proof-of-sql/src/proof_primitive/dory/deferred_msm.rs b/crates/proof-of-sql/src/proof_primitive/dory/deferred_msm.rs index c1cc2c9eb..6a253bb6d 100644 --- a/crates/proof-of-sql/src/proof_primitive/dory/deferred_msm.rs +++ b/crates/proof-of-sql/src/proof_primitive/dory/deferred_msm.rs @@ -1,3 +1,4 @@ +use crate::utils::log; use alloc::{vec, vec::Vec}; use ark_ec::VariableBaseMSM; use core::ops::{Add, AddAssign, Mul, MulAssign}; @@ -24,12 +25,18 @@ impl DeferredMSM { /// Collapse/compute the MSM into a single group element #[tracing::instrument(name = "DeferredMSM::compute", level = "debug", skip_all)] pub fn compute>(self) -> V { + log::log_memory_usage("Start"); + let (bases, scalars): (Vec<_>, Vec<_>) = self .pairs .into_iter() .map(|(gt, f)| (gt, f.unwrap_or(F::one()))) .unzip(); - V::msm_unchecked(&bases, &scalars) + let res = V::msm_unchecked(&bases, &scalars); + + log::log_memory_usage("End"); + + res } } diff --git a/crates/proof-of-sql/src/proof_primitive/dory/dory_commitment_evaluation_proof.rs b/crates/proof-of-sql/src/proof_primitive/dory/dory_commitment_evaluation_proof.rs index 3b29c1800..2d266b2a5 100644 --- a/crates/proof-of-sql/src/proof_primitive/dory/dory_commitment_evaluation_proof.rs +++ b/crates/proof-of-sql/src/proof_primitive/dory/dory_commitment_evaluation_proof.rs @@ -5,7 +5,10 @@ use super::{ extended_dory_reduce_helper::extended_dory_reduce_verify_fold_s_vecs, DeferredGT, DoryCommitment, DoryMessages, DoryProverPublicSetup, DoryScalar, DoryVerifierPublicSetup, F, }; -use crate::base::{commitment::CommitmentEvaluationProof, proof::Transcript}; +use crate::{ + base::{commitment::CommitmentEvaluationProof, proof::Transcript}, + utils::log, +}; use snafu::Snafu; /// The `CommitmentEvaluationProof` for the Dory PCS. @@ -40,6 +43,8 @@ impl CommitmentEvaluationProof for DoryEvaluationProof { generators_offset: u64, setup: &Self::ProverPublicSetup<'_>, ) -> Self { + log::log_memory_usage("Start"); + // Dory PCS Logic if generators_offset != 0 { // TODO: support offsets other than 0. @@ -59,6 +64,9 @@ impl CommitmentEvaluationProof for DoryEvaluationProof { let mut messages = DoryMessages::default(); let extended_state = eval_vmv_re_prove(&mut messages, transcript, state, prover_setup); extended_dory_inner_product_prove(&mut messages, transcript, extended_state, prover_setup); + + log::log_memory_usage("End"); + messages } @@ -78,6 +86,8 @@ impl CommitmentEvaluationProof for DoryEvaluationProof { _table_length: usize, setup: &Self::VerifierPublicSetup<'_>, ) -> Result<(), Self::Error> { + log::log_memory_usage("Start"); + let a_commit = DeferredGT::new( commit_batch.iter().map(|c| c.0), batching_factors.iter().map(|f| f.0), @@ -115,6 +125,9 @@ impl CommitmentEvaluationProof for DoryEvaluationProof { ) { Err(DoryError::VerificationError)?; } + + log::log_memory_usage("End"); + Ok(()) } } diff --git a/crates/proof-of-sql/src/proof_primitive/dory/dory_commitment_helper_cpu.rs b/crates/proof-of-sql/src/proof_primitive/dory/dory_commitment_helper_cpu.rs index 611e84f1b..2bc2017dd 100644 --- a/crates/proof-of-sql/src/proof_primitive/dory/dory_commitment_helper_cpu.rs +++ b/crates/proof-of-sql/src/proof_primitive/dory/dory_commitment_helper_cpu.rs @@ -1,5 +1,5 @@ use super::{pairings, DoryCommitment, DoryProverPublicSetup, DoryScalar, G1Projective}; -use crate::base::commitment::CommittableColumn; +use crate::{base::commitment::CommittableColumn, utils::log}; use alloc::vec::Vec; use ark_ec::VariableBaseMSM; use core::iter::once; @@ -21,6 +21,8 @@ where &'a T: Into, T: Sync, { + log::log_memory_usage("Start"); + // Compute offsets for the matrix. let num_columns = 1 << setup.sigma(); let first_row_offset = offset % num_columns; @@ -46,11 +48,15 @@ where }); // Compute the commitment for the entire matrix. - DoryCommitment(pairings::multi_pairing( + let res = DoryCommitment(pairings::multi_pairing( once(first_row_commit).chain(remaining_row_commits), &setup.prover_setup().Gamma_2.last().unwrap() [rows_offset..(rows_offset + remaining_row_count + 1)], - )) + )); + + log::log_memory_usage("End"); + + res } fn compute_dory_commitment( diff --git a/crates/proof-of-sql/src/proof_primitive/dory/dory_commitment_helper_gpu.rs b/crates/proof-of-sql/src/proof_primitive/dory/dory_commitment_helper_gpu.rs index 9730dcc15..aa01ba94f 100644 --- a/crates/proof-of-sql/src/proof_primitive/dory/dory_commitment_helper_gpu.rs +++ b/crates/proof-of-sql/src/proof_primitive/dory/dory_commitment_helper_gpu.rs @@ -1,5 +1,8 @@ use super::{pack_scalars, pairings, DoryCommitment, DoryProverPublicSetup, G1Affine}; -use crate::base::{commitment::CommittableColumn, if_rayon, slice_ops::slice_cast}; +use crate::{ + base::{commitment::CommittableColumn, if_rayon, slice_ops::slice_cast}, + utils::log, +}; use blitzar::compute::ElementP2; #[cfg(feature = "rayon")] use rayon::prelude::*; @@ -20,6 +23,8 @@ fn compute_dory_commitments_packed_impl( offset: usize, setup: &DoryProverPublicSetup, ) -> Vec { + log::log_memory_usage("Start"); + // Make sure that the committable columns are not empty. if committable_columns.is_empty() { return vec![]; @@ -84,7 +89,7 @@ fn compute_dory_commitments_packed_impl( .collect(); // Compute the Dory commitments using multi pairing of sub-commits. - let span = span!(Level::INFO, "multi_pairing").entered(); + let span = span!(Level::DEBUG, "multi_pairing").entered(); let dc: Vec = if_rayon!( cumulative_sub_commit_sums.par_iter(), cumulative_sub_commit_sums.iter() @@ -100,6 +105,8 @@ fn compute_dory_commitments_packed_impl( .collect(); span.exit(); + log::log_memory_usage("End"); + dc } diff --git a/crates/proof-of-sql/src/proof_primitive/dory/dory_reduce_helper.rs b/crates/proof-of-sql/src/proof_primitive/dory/dory_reduce_helper.rs index 4c2554cbb..9ab5767a6 100644 --- a/crates/proof-of-sql/src/proof_primitive/dory/dory_reduce_helper.rs +++ b/crates/proof-of-sql/src/proof_primitive/dory/dory_reduce_helper.rs @@ -2,7 +2,7 @@ use super::{ pairings::{multi_pairing_2, multi_pairing_4}, DeferredGT, ProverSetup, ProverState, VerifierSetup, VerifierState, F, GT, }; -use crate::base::if_rayon; +use crate::{base::if_rayon, utils::log}; #[cfg(feature = "rayon")] use rayon::{ iter::IndexedParallelIterator, @@ -24,6 +24,8 @@ pub fn dory_reduce_prove_compute_Ds( setup: &ProverSetup, half_n: usize, ) -> (GT, GT, GT, GT) { + log::log_memory_usage("Start"); + let (v_1L, v_1R) = state.v1.split_at(half_n); let (v_2L, v_2R) = state.v2.split_at(half_n); let (D_1L, D_1R, D_2L, D_2R) = multi_pairing_4( @@ -32,6 +34,9 @@ pub fn dory_reduce_prove_compute_Ds( (setup.Gamma_1[state.nu - 1], v_2L), (setup.Gamma_1[state.nu - 1], v_2R), ); + + log::log_memory_usage("End"); + (D_1L, D_1R, D_2L, D_2R) } /// From the Dory-Reduce algorithm in section 3.2 of https://eprint.iacr.org/2020/1274.pdf. @@ -45,12 +50,16 @@ pub fn dory_reduce_prove_mutate_v_vecs( setup: &ProverSetup, (beta, beta_inv): (F, F), ) { + log::log_memory_usage("Start"); + if_rayon!(state.v1.par_iter_mut(), state.v1.iter_mut()) .zip(setup.Gamma_1[state.nu]) .for_each(|(v, &g)| *v = (*v + g * beta).into()); if_rayon!(state.v2.par_iter_mut(), state.v2.iter_mut()) .zip(setup.Gamma_2[state.nu]) .for_each(|(v, &g)| *v = (*v + g * beta_inv).into()); + + log::log_memory_usage("End"); } /// From the Dory-Reduce algorithm in section 3.2 of https://eprint.iacr.org/2020/1274.pdf. /// @@ -59,9 +68,14 @@ pub fn dory_reduce_prove_mutate_v_vecs( /// * C_minus = #[tracing::instrument(level = "debug", skip_all)] pub fn dory_reduce_prove_compute_Cs(state: &ProverState, half_n: usize) -> (GT, GT) { + log::log_memory_usage("Start"); + let (v_1L, v_1R) = state.v1.split_at(half_n); let (v_2L, v_2R) = state.v2.split_at(half_n); let (C_plus, C_minus) = multi_pairing_2((v_1L, v_2R), (v_1R, v_2L)); + + log::log_memory_usage("End"); + (C_plus, C_minus) } @@ -76,6 +90,8 @@ pub fn dory_reduce_prove_fold_v_vecs( (alpha, alpha_inv): (F, F), half_n: usize, ) { + log::log_memory_usage("Start"); + let (v_1L, v_1R) = state.v1.split_at_mut(half_n); let (v_2L, v_2R) = state.v2.split_at_mut(half_n); if_rayon!(v_1L.par_iter_mut(), v_1L.iter_mut()) @@ -86,6 +102,8 @@ pub fn dory_reduce_prove_fold_v_vecs( .for_each(|(v_L, v_R)| *v_L = (*v_L * alpha_inv + v_R).into()); state.v1.truncate(half_n); state.v2.truncate(half_n); + + log::log_memory_usage("End"); } /// From the Dory-Reduce algorithm in section 3.2 of . /// diff --git a/crates/proof-of-sql/src/proof_primitive/dory/dory_vmv_helper.rs b/crates/proof-of-sql/src/proof_primitive/dory/dory_vmv_helper.rs index 4041ddbc3..67f8a25d9 100644 --- a/crates/proof-of-sql/src/proof_primitive/dory/dory_vmv_helper.rs +++ b/crates/proof-of-sql/src/proof_primitive/dory/dory_vmv_helper.rs @@ -1,9 +1,9 @@ #[cfg(not(feature = "blitzar"))] use super::G1Projective; use super::{transpose, G1Affine, ProverSetup, F}; -use crate::base::polynomial::compute_evaluation_vector; #[cfg(feature = "blitzar")] use crate::base::slice_ops::slice_cast; +use crate::{base::polynomial::compute_evaluation_vector, utils::log}; use alloc::{vec, vec::Vec}; #[cfg(not(feature = "blitzar"))] use ark_ec::{AffineRepr, VariableBaseMSM}; @@ -44,6 +44,8 @@ pub(super) fn compute_T_vec_prime( nu: usize, prover_setup: &ProverSetup, ) -> Vec { + log::log_memory_usage("Start"); + let num_columns = 1 << sigma; let num_outputs = 1 << nu; let data_size = mem::size_of::(); @@ -60,7 +62,11 @@ pub(super) fn compute_T_vec_prime( a_transpose.as_slice(), ); - slice_cast(&blitzar_commits) + let res = slice_cast(&blitzar_commits); + + log::log_memory_usage("End"); + + res } #[tracing::instrument(level = "debug", skip_all)] @@ -71,11 +77,18 @@ pub(super) fn compute_T_vec_prime( nu: usize, prover_setup: &ProverSetup, ) -> Vec { - a.chunks(1 << sigma) + log::log_memory_usage("Start"); + + let res = a + .chunks(1 << sigma) .map(|row| G1Projective::msm_unchecked(prover_setup.Gamma_1[nu], row).into()) .chain(core::iter::repeat(G1Affine::zero())) .take(1 << nu) - .collect() + .collect(); + + log::log_memory_usage("End"); + + res } /// Compute the size of the matrix M that is derived from `a`. diff --git a/crates/proof-of-sql/src/proof_primitive/dory/dynamic_dory_commitment_evaluation_proof.rs b/crates/proof-of-sql/src/proof_primitive/dory/dynamic_dory_commitment_evaluation_proof.rs index 98c582119..d95841adf 100644 --- a/crates/proof-of-sql/src/proof_primitive/dory/dynamic_dory_commitment_evaluation_proof.rs +++ b/crates/proof-of-sql/src/proof_primitive/dory/dynamic_dory_commitment_evaluation_proof.rs @@ -5,7 +5,10 @@ use super::{ extended_dory_inner_product_verify, DeferredGT, DoryMessages, DoryScalar, DynamicDoryCommitment, ProverSetup, VerifierSetup, F, }; -use crate::base::{commitment::CommitmentEvaluationProof, proof::Transcript}; +use crate::{ + base::{commitment::CommitmentEvaluationProof, proof::Transcript}, + utils::log, +}; use serde::{Deserialize, Serialize}; use snafu::Snafu; @@ -42,6 +45,8 @@ impl CommitmentEvaluationProof for DynamicDoryEvaluationProof { generators_offset: u64, setup: &Self::ProverPublicSetup<'_>, ) -> Self { + log::log_memory_usage("Start"); + // Dory PCS Logic if generators_offset != 0 { // TODO: support offsets other than 0. @@ -60,6 +65,9 @@ impl CommitmentEvaluationProof for DynamicDoryEvaluationProof { let mut messages = DoryMessages::default(); let extended_state = eval_vmv_re_prove(&mut messages, transcript, state, setup); extended_dory_inner_product_prove(&mut messages, transcript, extended_state, setup); + + log::log_memory_usage("End"); + Self(messages) } @@ -79,6 +87,8 @@ impl CommitmentEvaluationProof for DynamicDoryEvaluationProof { _table_length: usize, setup: &Self::VerifierPublicSetup<'_>, ) -> Result<(), Self::Error> { + log::log_memory_usage("Start"); + let a_commit = DeferredGT::new( commit_batch.iter().map(|c| c.0), batching_factors.iter().map(|f| f.0), @@ -115,6 +125,9 @@ impl CommitmentEvaluationProof for DynamicDoryEvaluationProof { ) { Err(DoryError::VerificationError)?; } + + log::log_memory_usage("End"); + Ok(()) } } diff --git a/crates/proof-of-sql/src/proof_primitive/dory/dynamic_dory_commitment_helper_gpu.rs b/crates/proof-of-sql/src/proof_primitive/dory/dynamic_dory_commitment_helper_gpu.rs index 88c6e50c6..7a0447604 100644 --- a/crates/proof-of-sql/src/proof_primitive/dory/dynamic_dory_commitment_helper_gpu.rs +++ b/crates/proof-of-sql/src/proof_primitive/dory/dynamic_dory_commitment_helper_gpu.rs @@ -5,6 +5,7 @@ use super::{ use crate::{ base::{commitment::CommittableColumn, if_rayon, slice_ops::slice_cast}, proof_primitive::dynamic_matrix_utils::matrix_structure::row_and_column_from_index, + utils::log, }; use blitzar::compute::ElementP2; #[cfg(feature = "rayon")] @@ -36,6 +37,8 @@ pub(super) fn compute_dynamic_dory_commitments( offset: usize, setup: &ProverSetup, ) -> Vec { + log::log_memory_usage("Start"); + if committable_columns.is_empty() { return vec![]; } @@ -69,7 +72,7 @@ pub(super) fn compute_dynamic_dory_commitments( let num_commits = signed_sub_commits.len() / committable_columns.len(); // Calculate the dynamic Dory commitments. - let span = span!(Level::INFO, "multi_pairing").entered(); + let span = span!(Level::DEBUG, "multi_pairing").entered(); let ddc: Vec = signed_sub_commits .is_empty() .then_some(vec![ @@ -95,5 +98,7 @@ pub(super) fn compute_dynamic_dory_commitments( }); span.exit(); + log::log_memory_usage("End"); + ddc } diff --git a/crates/proof-of-sql/src/proof_primitive/dory/eval_vmv_re.rs b/crates/proof-of-sql/src/proof_primitive/dory/eval_vmv_re.rs index 7273c48f1..6c691d5e9 100644 --- a/crates/proof-of-sql/src/proof_primitive/dory/eval_vmv_re.rs +++ b/crates/proof-of-sql/src/proof_primitive/dory/eval_vmv_re.rs @@ -2,7 +2,10 @@ use super::{ pairings, DeferredG2, DoryMessages, ExtendedProverState, ExtendedVerifierState, G1Projective, ProverSetup, VMVProverState, VMVVerifierState, VerifierSetup, }; -use crate::base::{if_rayon, proof::Transcript}; +use crate::{ + base::{if_rayon, proof::Transcript}, + utils::log, +}; use alloc::vec::Vec; use ark_ec::VariableBaseMSM; #[cfg(feature = "rayon")] @@ -23,6 +26,8 @@ pub fn eval_vmv_re_prove( state: VMVProverState, setup: &ProverSetup, ) -> ExtendedProverState { + log::log_memory_usage("Start"); + let C = pairings::pairing( G1Projective::msm_unchecked(&state.T_vec_prime, &state.v_vec), setup.Gamma_2_fin, @@ -39,7 +44,11 @@ pub fn eval_vmv_re_prove( let v2 = if_rayon!(state.v_vec.par_iter(), state.v_vec.iter()) .map(|c| (Gamma_2_fin * c).into()) .collect::>(); - ExtendedProverState::from_vmv_prover_state(state, v2) + let res = ExtendedProverState::from_vmv_prover_state(state, v2); + + log::log_memory_usage("End"); + + res } /// This is the verifier side of the Eval-VMV-RE algorithm in section 5 of . diff --git a/crates/proof-of-sql/src/proof_primitive/dory/extended_dory_inner_product.rs b/crates/proof-of-sql/src/proof_primitive/dory/extended_dory_inner_product.rs index bd7e0e6ac..76bff637f 100644 --- a/crates/proof-of-sql/src/proof_primitive/dory/extended_dory_inner_product.rs +++ b/crates/proof-of-sql/src/proof_primitive/dory/extended_dory_inner_product.rs @@ -8,6 +8,7 @@ use crate::{ extended_dory_reduce_prove, extended_dory_reduce_verify, fold_scalars_0_prove, fold_scalars_0_verify, }, + utils::log, }; /// This is the prover side of the extended Dory-Innerproduct algorithm in section 4.3 of https://eprint.iacr.org/2020/1274.pdf. @@ -19,6 +20,8 @@ pub fn extended_dory_inner_product_prove( mut state: ExtendedProverState, setup: &ProverSetup, ) { + log::log_memory_usage("Start"); + let nu = state.base_state.nu; assert!(setup.max_nu >= nu); for _ in 0..nu { @@ -26,6 +29,8 @@ pub fn extended_dory_inner_product_prove( } let base_state = fold_scalars_0_prove(messages, transcript, state, setup); scalar_product_prove(messages, transcript, &base_state); + + log::log_memory_usage("End"); } /// This is the verifier side of the extended Dory-Innerproduct algorithm in section 4.3 of https://eprint.iacr.org/2020/1274.pdf. @@ -38,6 +43,8 @@ pub fn extended_dory_inner_product_verify( setup: &VerifierSetup, fold_s_tensors_verify: impl Fn(&ExtendedVerifierState) -> (F, F), ) -> bool { + log::log_memory_usage("Start"); + let nu = state.base_state.nu; assert!(setup.max_nu >= nu); for _ in 0..nu { @@ -47,5 +54,9 @@ pub fn extended_dory_inner_product_verify( } let base_state = fold_scalars_0_verify(messages, transcript, state, setup, fold_s_tensors_verify); - scalar_product_verify(messages, transcript, base_state, setup) + let res = scalar_product_verify(messages, transcript, base_state, setup); + + log::log_memory_usage("End"); + + res } diff --git a/crates/proof-of-sql/src/proof_primitive/dory/extended_dory_reduce.rs b/crates/proof-of-sql/src/proof_primitive/dory/extended_dory_reduce.rs index 49d6fa093..6653c2239 100644 --- a/crates/proof-of-sql/src/proof_primitive/dory/extended_dory_reduce.rs +++ b/crates/proof-of-sql/src/proof_primitive/dory/extended_dory_reduce.rs @@ -10,7 +10,7 @@ use super::{ extended_state::{ExtendedProverState, ExtendedVerifierState}, DoryMessages, ProverSetup, VerifierSetup, }; -use crate::base::proof::Transcript; +use crate::{base::proof::Transcript, utils::log}; /// This is the prover side of the extended Dory-Reduce algorithm in section 3.2 & 4.2 of https://eprint.iacr.org/2020/1274.pdf. #[tracing::instrument(level = "debug", skip_all)] @@ -20,6 +20,8 @@ pub fn extended_dory_reduce_prove( state: &mut ExtendedProverState, setup: &ProverSetup, ) { + log::log_memory_usage("Start"); + assert!(state.base_state.nu > 0); let half_n = 1usize << (state.base_state.nu - 1); let (D_1L, D_1R, D_2L, D_2R) = dory_reduce_prove_compute_Ds(&state.base_state, setup, half_n); @@ -45,6 +47,8 @@ pub fn extended_dory_reduce_prove( dory_reduce_prove_fold_v_vecs(&mut state.base_state, alphas, half_n); extended_dory_reduce_prove_fold_s_vecs(state, alphas, half_n); state.base_state.nu -= 1; + + log::log_memory_usage("End"); } /// This is the verifier side of the extended Dory-Reduce algorithm in section 3.2 & 4.2 of https://eprint.iacr.org/2020/1274.pdf. @@ -55,6 +59,8 @@ pub fn extended_dory_reduce_verify( state: &mut ExtendedVerifierState, setup: &VerifierSetup, ) -> bool { + log::log_memory_usage("Start"); + assert!(state.base_state.nu > 0); if messages.GT_messages.len() < 6 || messages.G1_messages.len() < 3 @@ -100,5 +106,8 @@ pub fn extended_dory_reduce_verify( state.alphas[state.base_state.nu - 1] = alphas.0; state.alpha_invs[state.base_state.nu - 1] = alphas.1; state.base_state.nu -= 1; + + log::log_memory_usage("End"); + true } diff --git a/crates/proof-of-sql/src/proof_primitive/dory/extended_dory_reduce_helper.rs b/crates/proof-of-sql/src/proof_primitive/dory/extended_dory_reduce_helper.rs index ddeefa1eb..4c812a8ba 100644 --- a/crates/proof-of-sql/src/proof_primitive/dory/extended_dory_reduce_helper.rs +++ b/crates/proof-of-sql/src/proof_primitive/dory/extended_dory_reduce_helper.rs @@ -2,6 +2,7 @@ use super::{ extended_state::{ExtendedProverState, ExtendedVerifierState}, DeferredG1, DeferredG2, G1Affine, G1Projective, G2Affine, G2Projective, ProverSetup, F, }; +use crate::utils::log; use ark_ec::VariableBaseMSM; use ark_ff::Field; @@ -15,10 +16,15 @@ pub fn extended_dory_reduce_prove_compute_E_betas( state: &ExtendedProverState, setup: &ProverSetup, ) -> (G1Affine, G2Affine) { + log::log_memory_usage("Start"); + let E_1beta: G1Affine = G1Projective::msm_unchecked(setup.Gamma_1[state.base_state.nu], &state.s2).into(); let E_2beta: G2Affine = G2Projective::msm_unchecked(setup.Gamma_2[state.base_state.nu], &state.s1).into(); + + log::log_memory_usage("End"); + (E_1beta, E_2beta) } /// From the extended Dory-Reduce algorithm in section 4.2 of https://eprint.iacr.org/2020/1274.pdf. @@ -33,6 +39,8 @@ pub fn extended_dory_reduce_prove_compute_signed_Es( state: &ExtendedProverState, half_n: usize, ) -> (G1Affine, G1Affine, G2Affine, G2Affine) { + log::log_memory_usage("Start"); + let (v_1L, v_1R) = state.base_state.v1.split_at(half_n); let (v_2L, v_2R) = state.base_state.v2.split_at(half_n); let (s_1L, s_1R) = state.s1.split_at(half_n); @@ -41,6 +49,9 @@ pub fn extended_dory_reduce_prove_compute_signed_Es( let E_1minus = G1Projective::msm_unchecked(v_1R, s_2L).into(); let E_2plus = G2Projective::msm_unchecked(v_2R, s_1L).into(); let E_2minus = G2Projective::msm_unchecked(v_2L, s_1R).into(); + + log::log_memory_usage("End"); + (E_1plus, E_1minus, E_2plus, E_2minus) } /// From the extended Dory-Reduce algorithm in section 4.2 of https://eprint.iacr.org/2020/1274.pdf. @@ -54,6 +65,8 @@ pub fn extended_dory_reduce_prove_fold_s_vecs( (alpha, alpha_inv): (F, F), half_n: usize, ) { + log::log_memory_usage("Start"); + let (s_1L, s_1R) = state.s1.split_at_mut(half_n); let (s_2L, s_2R) = state.s2.split_at_mut(half_n); s_1L.iter_mut() @@ -64,6 +77,8 @@ pub fn extended_dory_reduce_prove_fold_s_vecs( .for_each(|(s_L, s_R)| *s_L = *s_L * alpha_inv + s_R); state.s1.truncate(half_n); state.s2.truncate(half_n); + + log::log_memory_usage("End"); } /// From the extended Dory-Reduce algorithm in section 4.2 of . /// diff --git a/crates/proof-of-sql/src/proof_primitive/dory/fold_scalars.rs b/crates/proof-of-sql/src/proof_primitive/dory/fold_scalars.rs index 0769ff6ea..8a4ca0ac0 100644 --- a/crates/proof-of-sql/src/proof_primitive/dory/fold_scalars.rs +++ b/crates/proof-of-sql/src/proof_primitive/dory/fold_scalars.rs @@ -3,7 +3,7 @@ use super::{ pairings, DeferredGT, DoryMessages, G1Projective, G2Projective, ProverSetup, ProverState, VerifierSetup, VerifierState, F, }; -use crate::base::proof::Transcript; +use crate::{base::proof::Transcript, utils::log}; /// This is the prover side of the Fold-Scalars algorithm in section 4.1 of . /// @@ -36,6 +36,8 @@ pub fn fold_scalars_0_verify( setup: &VerifierSetup, fold_s_tensors_verify: impl Fn(&ExtendedVerifierState) -> (F, F), ) -> VerifierState { + log::log_memory_usage("Start"); + assert_eq!(state.base_state.nu, 0); let (gamma, gamma_inv) = messages.verifier_F_message(transcript); let (s1_folded, s2_folded) = fold_s_tensors_verify(&state); @@ -50,5 +52,8 @@ pub fn fold_scalars_0_verify( )) * gamma_inv; state.base_state.D_1 += pairings::pairing(setup.H_1, setup.Gamma_2_0 * s1_folded * gamma); state.base_state.D_2 += pairings::pairing(setup.Gamma_1_0 * s2_folded * gamma_inv, setup.H_2); + + log::log_memory_usage("End"); + state.base_state } diff --git a/crates/proof-of-sql/src/proof_primitive/dory/pack_scalars.rs b/crates/proof-of-sql/src/proof_primitive/dory/pack_scalars.rs index 6f4e0d6ac..c39bfcbcc 100644 --- a/crates/proof-of-sql/src/proof_primitive/dory/pack_scalars.rs +++ b/crates/proof-of-sql/src/proof_primitive/dory/pack_scalars.rs @@ -1,6 +1,7 @@ use super::{blitzar_metadata_table::min_as_f, G1Affine, G1Projective}; use crate::{ base::commitment::CommittableColumn, proof_primitive::dory::offset_to_bytes::OffsetToBytes, + utils::log, }; use alloc::{vec, vec::Vec}; use ark_std::ops::Mul; @@ -65,6 +66,8 @@ pub fn modify_commits( offset: usize, num_matrix_commitment_columns: usize, ) -> Vec { + log::log_memory_usage("Start"); + // Set parameters let num_offset_commits = OFFSET_SIZE + committable_columns.len(); let num_sub_commits_in_full_commit = bit_table.len() - num_offset_commits; @@ -122,7 +125,11 @@ pub fn modify_commits( } } - modifed_commits.into_iter().map(Into::into).collect() + let res = modifed_commits.into_iter().map(Into::into).collect(); + + log::log_memory_usage("End"); + + res } /// Packs bits of a committable column into the packed scalars array. @@ -314,6 +321,8 @@ pub fn bit_table_and_scalars_for_packed_msm( offset: usize, num_matrix_commitment_columns: usize, ) -> (Vec, Vec) { + log::log_memory_usage("Start"); + // Make sure that the committable columns are not empty. if committable_columns.is_empty() { return (vec![], vec![]); @@ -450,6 +459,8 @@ pub fn bit_table_and_scalars_for_packed_msm( } }); + log::log_memory_usage("End"); + (bit_table, packed_scalars) } diff --git a/crates/proof-of-sql/src/proof_primitive/dory/pairings.rs b/crates/proof-of-sql/src/proof_primitive/dory/pairings.rs index 50c86a381..44f127688 100644 --- a/crates/proof-of-sql/src/proof_primitive/dory/pairings.rs +++ b/crates/proof-of-sql/src/proof_primitive/dory/pairings.rs @@ -1,4 +1,4 @@ -use crate::base::if_rayon; +use crate::{base::if_rayon, utils::log}; #[cfg(feature = "rayon")] use ark_ec::pairing::MillerLoopOutput; use ark_ec::pairing::{Pairing, PairingOutput}; @@ -18,6 +18,8 @@ pub fn multi_pairing( a: impl IntoIterator + Send> + Send, b: impl IntoIterator + Send> + Send, ) -> PairingOutput

{ + log::log_memory_usage("Start"); + multi_pairing_impl(a, b) } #[tracing::instrument(level = "debug", skip_all)] @@ -32,6 +34,8 @@ pub fn multi_pairing_2( impl IntoIterator + Send> + Send, ), ) -> (PairingOutput

, PairingOutput

) { + log::log_memory_usage("Start"); + multi_pairing_2_impl((a0, b0), (a1, b1)) } #[tracing::instrument(level = "debug", skip_all)] @@ -59,6 +63,8 @@ pub fn multi_pairing_4( PairingOutput

, PairingOutput

, ) { + log::log_memory_usage("Start"); + multi_pairing_4_impl((a0, b0), (a1, b1), (a2, b2), (a3, b3)) } /// # Panics diff --git a/crates/proof-of-sql/src/proof_primitive/dory/scalar_product.rs b/crates/proof-of-sql/src/proof_primitive/dory/scalar_product.rs index a49a2a7d3..23cea7409 100644 --- a/crates/proof-of-sql/src/proof_primitive/dory/scalar_product.rs +++ b/crates/proof-of-sql/src/proof_primitive/dory/scalar_product.rs @@ -1,6 +1,6 @@ #![allow(unused_variables)] use super::{pairings, DoryMessages, ProverState, VerifierSetup, VerifierState}; -use crate::base::proof::Transcript; +use crate::{base::proof::Transcript, utils::log}; /// This is the prover side of the Scalar-Product algorithm in section 3.1 of . #[allow(clippy::missing_panics_doc)] @@ -61,6 +61,8 @@ pub fn scalar_product_verify( // * `Gamma_1_0` is the Γ_1 used in Scalar-Product algorithm. // * `Gamma_2_0` is the Γ_2 used in Scalar-Product algorithm. + log::log_memory_usage("Start"); + assert_eq!(state.nu, 0); if messages.G1_messages.len() != 1 || messages.G2_messages.len() != 1 @@ -71,6 +73,10 @@ pub fn scalar_product_verify( let E_1 = messages.prover_recieve_G1_message(transcript); let E_2 = messages.prover_recieve_G2_message(transcript); let (d, d_inv) = messages.verifier_F_message(transcript); - pairings::pairing(E_1 + setup.Gamma_1_0 * d, E_2 + setup.Gamma_2_0 * d_inv) - == (state.C + setup.chi[0] + state.D_2 * d + state.D_1 * d_inv).compute() + let res = pairings::pairing(E_1 + setup.Gamma_1_0 * d, E_2 + setup.Gamma_2_0 * d_inv) + == (state.C + setup.chi[0] + state.D_2 * d + state.D_1 * d_inv).compute(); + + log::log_memory_usage("End"); + + res } diff --git a/crates/proof-of-sql/src/proof_primitive/dory/setup.rs b/crates/proof-of-sql/src/proof_primitive/dory/setup.rs index 587df3add..4f276e8b0 100644 --- a/crates/proof-of-sql/src/proof_primitive/dory/setup.rs +++ b/crates/proof-of-sql/src/proof_primitive/dory/setup.rs @@ -1,5 +1,5 @@ use super::{G1Affine, G2Affine, PublicParameters, GT}; -use crate::base::impl_serde_for_ark_serde_unchecked; +use crate::{base::impl_serde_for_ark_serde_unchecked, utils::log}; use alloc::vec::Vec; use ark_ec::pairing::{Pairing, PairingOutput}; use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, Compress, Validate}; @@ -124,6 +124,8 @@ impl<'a> ProverSetup<'a> { element_num_bytes: u32, scalars: &[u8], ) { + log::log_memory_usage("Start"); + self.blitzar_handle.msm(res, element_num_bytes, scalars); } @@ -135,6 +137,8 @@ impl<'a> ProverSetup<'a> { output_bit_table: &[u32], scalars: &[u8], ) { + log::log_memory_usage("Start"); + self.blitzar_handle .packed_msm(res, output_bit_table, scalars); } @@ -148,6 +152,8 @@ impl<'a> ProverSetup<'a> { output_lengths: &[u32], scalars: &[u8], ) { + log::log_memory_usage("Start"); + self.blitzar_handle .vlen_msm(res, output_bit_table, output_lengths, scalars); } diff --git a/crates/proof-of-sql/src/proof_primitive/dory/transpose.rs b/crates/proof-of-sql/src/proof_primitive/dory/transpose.rs index d3ad9775e..82d60ff64 100644 --- a/crates/proof-of-sql/src/proof_primitive/dory/transpose.rs +++ b/crates/proof-of-sql/src/proof_primitive/dory/transpose.rs @@ -1,4 +1,4 @@ -use crate::proof_primitive::dory::offset_to_bytes::OffsetToBytes; +use crate::{proof_primitive::dory::offset_to_bytes::OffsetToBytes, utils::log}; use alloc::{vec, vec::Vec}; #[tracing::instrument(name = "transpose_for_fixed_msm (gpu)", level = "debug", skip_all)] @@ -9,6 +9,8 @@ pub fn transpose_for_fixed_msm>( cols: usize, data_size: usize, ) -> Vec { + log::log_memory_usage("Start"); + let total_length_bytes = data_size * rows * cols; let mut transpose = vec![0_u8; total_length_bytes]; for n in offset..(column.len() + offset) { @@ -20,6 +22,9 @@ pub fn transpose_for_fixed_msm>( transpose[t_idx..t_idx + data_size] .copy_from_slice(column[p_idx].offset_to_bytes().as_slice()); } + + log::log_memory_usage("End"); + transpose } diff --git a/crates/proof-of-sql/src/proof_primitive/sumcheck/proof.rs b/crates/proof-of-sql/src/proof_primitive/sumcheck/proof.rs index 6ce2c7e12..f0a0f8b7e 100644 --- a/crates/proof-of-sql/src/proof_primitive/sumcheck/proof.rs +++ b/crates/proof-of-sql/src/proof_primitive/sumcheck/proof.rs @@ -5,6 +5,7 @@ use crate::{ scalar::Scalar, }, proof_primitive::sumcheck::{prove_round, ProverState}, + utils::log, }; /* * Adapted from arkworks @@ -31,6 +32,8 @@ impl SumcheckProof { evaluation_point: &mut [S], mut state: ProverState, ) -> Self { + log::log_memory_usage("Start"); + assert_eq!(evaluation_point.len(), state.num_vars); transcript.extend_as_be([state.max_multiplicands as u64, state.num_vars as u64]); // This challenge is in order to keep transcript messages grouped. (This simplifies the Solidity implementation.) @@ -47,6 +50,8 @@ impl SumcheckProof { r = Some(*scalar); } + log::log_memory_usage("End"); + SumcheckProof { coefficients } } @@ -61,6 +66,8 @@ impl SumcheckProof { num_variables: usize, claimed_sum: &S, ) -> Result, ProofError> { + log::log_memory_usage("Start"); + let coefficients_len = self.coefficients.len(); if coefficients_len % num_variables != 0 { return Err(ProofError::VerificationError { @@ -96,6 +103,9 @@ impl SumcheckProof { } expected_evaluation = round_evaluation; } + + log::log_memory_usage("End"); + Ok(Subclaim { evaluation_point, expected_evaluation, diff --git a/crates/proof-of-sql/src/proof_primitive/sumcheck/prover_round.rs b/crates/proof-of-sql/src/proof_primitive/sumcheck/prover_round.rs index 60c29d1aa..151657eac 100644 --- a/crates/proof-of-sql/src/proof_primitive/sumcheck/prover_round.rs +++ b/crates/proof-of-sql/src/proof_primitive/sumcheck/prover_round.rs @@ -6,6 +6,7 @@ use crate::{ base::{if_rayon, scalar::Scalar}, proof_primitive::sumcheck::ProverState, + utils::log, }; use alloc::{vec, vec::Vec}; #[cfg(feature = "rayon")] @@ -13,6 +14,8 @@ use rayon::prelude::*; #[tracing::instrument(level = "debug", skip_all)] pub fn prove_round(prover_state: &mut ProverState, r_maybe: &Option) -> Vec { + log::log_memory_usage("Start"); + if let Some(r) = r_maybe { assert!( prover_state.round != 0, @@ -91,10 +94,14 @@ pub fn prove_round(prover_state: &mut ProverState, r_maybe: &Optio products_iter.fold(vec![S::zero(); degree + 1], vec_elementwise_add) ) }); - if_rayon!( + let res = if_rayon!( sums_iter.reduce(|| vec![S::zero(); degree + 1], vec_elementwise_add), sums_iter.fold(vec![S::zero(); degree + 1], vec_elementwise_add) - ) + ); + + log::log_memory_usage("End"); + + res } /// This is equivalent to diff --git a/crates/proof-of-sql/src/proof_primitive/sumcheck/prover_state.rs b/crates/proof-of-sql/src/proof_primitive/sumcheck/prover_state.rs index 371ab8242..33d3d85f6 100644 --- a/crates/proof-of-sql/src/proof_primitive/sumcheck/prover_state.rs +++ b/crates/proof-of-sql/src/proof_primitive/sumcheck/prover_state.rs @@ -4,7 +4,7 @@ use crate::base::polynomial::CompositePolynomial; * * See third_party/license/arkworks.LICENSE */ -use crate::base::scalar::Scalar; +use crate::{base::scalar::Scalar, utils::log}; use alloc::vec::Vec; #[derive(Debug)] @@ -37,6 +37,8 @@ impl ProverState { #[tracing::instrument(name = "ProverState::create", level = "debug", skip_all)] pub fn create(polynomial: &CompositePolynomial) -> Self { + log::log_memory_usage("Start"); + assert!( polynomial.num_variables != 0, "Attempt to prove a constant." @@ -49,6 +51,8 @@ impl ProverState { .map(|x| x.as_ref().clone()) .collect(); + log::log_memory_usage("End"); + ProverState::new( polynomial.products.clone(), flattened_ml_extensions, diff --git a/crates/proof-of-sql/src/sql/proof/final_round_builder.rs b/crates/proof-of-sql/src/sql/proof/final_round_builder.rs index 1ce16dbd6..98094f6d9 100644 --- a/crates/proof-of-sql/src/sql/proof/final_round_builder.rs +++ b/crates/proof-of-sql/src/sql/proof/final_round_builder.rs @@ -1,9 +1,12 @@ use super::{SumcheckSubpolynomial, SumcheckSubpolynomialTerm, SumcheckSubpolynomialType}; -use crate::base::{ - bit::BitDistribution, - commitment::{Commitment, CommittableColumn, VecCommitmentExt}, - polynomial::MultilinearExtension, - scalar::Scalar, +use crate::{ + base::{ + bit::BitDistribution, + commitment::{Commitment, CommittableColumn, VecCommitmentExt}, + polynomial::MultilinearExtension, + scalar::Scalar, + }, + utils::log, }; use alloc::{boxed::Box, vec::Vec}; @@ -95,11 +98,17 @@ impl<'a, S: Scalar> FinalRoundBuilder<'a, S> { offset_generators: usize, setup: &C::PublicSetup<'_>, ) -> Vec { - Vec::from_commitable_columns_with_offset( + log::log_memory_usage("Start"); + + let res = Vec::from_commitable_columns_with_offset( &self.commitment_descriptor, offset_generators, setup, - ) + ); + + log::log_memory_usage("End"); + + res } /// Produce a subpolynomial to be aggegated into sumcheck where the sum across binary @@ -116,10 +125,15 @@ impl<'a, S: Scalar> FinalRoundBuilder<'a, S> { skip_all )] pub fn evaluate_pcs_proof_mles(&self, evaluation_vec: &[S]) -> Vec { + log::log_memory_usage("Start"); + let mut res = Vec::with_capacity(self.pcs_proof_mles.len()); for evaluator in &self.pcs_proof_mles { res.push(evaluator.inner_product(evaluation_vec)); } + + log::log_memory_usage("End"); + res } diff --git a/crates/proof-of-sql/src/sql/proof/query_proof.rs b/crates/proof-of-sql/src/sql/proof/query_proof.rs index e4357a104..708b330d1 100644 --- a/crates/proof-of-sql/src/sql/proof/query_proof.rs +++ b/crates/proof-of-sql/src/sql/proof/query_proof.rs @@ -18,6 +18,7 @@ use crate::{ scalar::Scalar, }, proof_primitive::sumcheck::SumcheckProof, + utils::log, }; use alloc::{string::String, vec, vec::Vec}; use bumpalo::Bump; @@ -78,6 +79,8 @@ impl QueryProof { accessor: &impl DataAccessor, setup: &CP::ProverPublicSetup<'_>, ) -> (Self, OwnedTable) { + log::log_memory_usage("Start"); + let (min_row_num, max_row_num) = get_index_range(accessor, &expr.get_table_references()); let initial_range_length = max_row_num - min_row_num; let alloc = Bump::new(); @@ -212,6 +215,9 @@ impl QueryProof { subpolynomial_constraint_count, post_result_challenge_count, }; + + log::log_memory_usage("End"); + (proof, provable_result) } @@ -224,6 +230,8 @@ impl QueryProof { result: OwnedTable, setup: &CP::VerifierPublicSetup<'_>, ) -> QueryResult { + log::log_memory_usage("Start"); + let table_refs = expr.get_table_references(); let (min_row_num, _) = get_index_range(accessor, &table_refs); let num_sumcheck_variables = cmp::max(log2_up(self.range_length), 1); @@ -381,6 +389,9 @@ impl QueryProof { })?; let verification_hash = transcript.challenge_as_le(); + + log::log_memory_usage("End"); + Ok(QueryData { table: result, verification_hash, diff --git a/crates/proof-of-sql/src/sql/proof/verifiable_query_result.rs b/crates/proof-of-sql/src/sql/proof/verifiable_query_result.rs index 820abdd69..6609fd3ad 100644 --- a/crates/proof-of-sql/src/sql/proof/verifiable_query_result.rs +++ b/crates/proof-of-sql/src/sql/proof/verifiable_query_result.rs @@ -1,11 +1,14 @@ use super::{ProofPlan, QueryData, QueryProof, QueryResult}; -use crate::base::{ - commitment::CommitmentEvaluationProof, - database::{ - ColumnField, ColumnType, CommitmentAccessor, DataAccessor, OwnedColumn, OwnedTable, +use crate::{ + base::{ + commitment::CommitmentEvaluationProof, + database::{ + ColumnField, ColumnType, CommitmentAccessor, DataAccessor, OwnedColumn, OwnedTable, + }, + proof::ProofError, + scalar::Scalar, }, - proof::ProofError, - scalar::Scalar, + utils::log, }; use alloc::vec; use serde::{Deserialize, Serialize}; @@ -79,11 +82,14 @@ impl VerifiableQueryResult { /// /// This function both computes the result of a query and constructs a proof of the results /// validity. + #[tracing::instrument(name = "VerifiableQueryResult::new", level = "info", skip_all)] pub fn new( expr: &(impl ProofPlan + Serialize), accessor: &impl DataAccessor, setup: &CP::ProverPublicSetup<'_>, ) -> Self { + log::log_memory_usage("Start"); + // a query must have at least one result column; if not, it should // have been rejected at the parsing stage. @@ -100,6 +106,9 @@ impl VerifiableQueryResult { } let (proof, res) = QueryProof::new(expr, accessor, setup); + + log::log_memory_usage("End"); + Self { result: Some(res), proof: Some(proof), @@ -113,12 +122,15 @@ impl VerifiableQueryResult { /// error. /// /// Note: This does NOT transform the result! + #[tracing::instrument(name = "VerifiableQueryResult::verify", level = "info", skip_all)] pub fn verify( self, expr: &(impl ProofPlan + Serialize), accessor: &impl CommitmentAccessor, setup: &CP::VerifierPublicSetup<'_>, ) -> QueryResult { + log::log_memory_usage("Start"); + match (self.result, self.proof) { (Some(result), Some(proof)) => { let QueryData { diff --git a/crates/proof-of-sql/src/sql/proof_exprs/add_subtract_expr.rs b/crates/proof-of-sql/src/sql/proof_exprs/add_subtract_expr.rs index 0e10f1188..d08a6d74e 100644 --- a/crates/proof-of-sql/src/sql/proof_exprs/add_subtract_expr.rs +++ b/crates/proof-of-sql/src/sql/proof_exprs/add_subtract_expr.rs @@ -7,6 +7,7 @@ use crate::{ scalar::Scalar, }, sql::proof::{FinalRoundBuilder, VerificationBuilder}, + utils::log, }; use alloc::boxed::Box; use bumpalo::Bump; @@ -65,16 +66,22 @@ impl ProofExpr for AddSubtractExpr { alloc: &'a Bump, table: &Table<'a, S>, ) -> Column<'a, S> { + log::log_memory_usage("Start"); + let lhs_column: Column<'a, S> = self.lhs.prover_evaluate(builder, alloc, table); let rhs_column: Column<'a, S> = self.rhs.prover_evaluate(builder, alloc, table); - Column::Scalar(add_subtract_columns( + let res = Column::Scalar(add_subtract_columns( lhs_column, rhs_column, self.lhs.data_type().scale().unwrap_or(0), self.rhs.data_type().scale().unwrap_or(0), alloc, self.is_subtract, - )) + )); + + log::log_memory_usage("End"); + + res } fn verifier_evaluate( diff --git a/crates/proof-of-sql/src/sql/proof_exprs/aggregate_expr.rs b/crates/proof-of-sql/src/sql/proof_exprs/aggregate_expr.rs index 41b15e102..1bdeeede4 100644 --- a/crates/proof-of-sql/src/sql/proof_exprs/aggregate_expr.rs +++ b/crates/proof-of-sql/src/sql/proof_exprs/aggregate_expr.rs @@ -7,6 +7,7 @@ use crate::{ scalar::Scalar, }, sql::proof::{FinalRoundBuilder, VerificationBuilder}, + utils::log, }; use alloc::boxed::Box; use bumpalo::Bump; @@ -45,7 +46,13 @@ impl ProofExpr for AggregateExpr { alloc: &'a Bump, table: &Table<'a, S>, ) -> Column<'a, S> { - self.expr.result_evaluate(alloc, table) + log::log_memory_usage("Start"); + + let res = self.expr.result_evaluate(alloc, table); + + log::log_memory_usage("End"); + + res } #[tracing::instrument(name = "AggregateExpr::prover_evaluate", level = "debug", skip_all)] @@ -55,7 +62,13 @@ impl ProofExpr for AggregateExpr { alloc: &'a Bump, table: &Table<'a, S>, ) -> Column<'a, S> { - self.expr.prover_evaluate(builder, alloc, table) + log::log_memory_usage("Start"); + + let res = self.expr.prover_evaluate(builder, alloc, table); + + log::log_memory_usage("End"); + + res } fn verifier_evaluate( diff --git a/crates/proof-of-sql/src/sql/proof_exprs/and_expr.rs b/crates/proof-of-sql/src/sql/proof_exprs/and_expr.rs index 0536c9068..7bfab82c0 100644 --- a/crates/proof-of-sql/src/sql/proof_exprs/and_expr.rs +++ b/crates/proof-of-sql/src/sql/proof_exprs/and_expr.rs @@ -7,6 +7,7 @@ use crate::{ scalar::Scalar, }, sql::proof::{FinalRoundBuilder, SumcheckSubpolynomialType, VerificationBuilder}, + utils::log, }; use alloc::{boxed::Box, vec}; use bumpalo::Bump; @@ -37,11 +38,18 @@ impl ProofExpr for AndExpr { alloc: &'a Bump, table: &Table<'a, S>, ) -> Column<'a, S> { + log::log_memory_usage("Start"); + let lhs_column: Column<'a, S> = self.lhs.result_evaluate(alloc, table); let rhs_column: Column<'a, S> = self.rhs.result_evaluate(alloc, table); let lhs = lhs_column.as_boolean().expect("lhs is not boolean"); let rhs = rhs_column.as_boolean().expect("rhs is not boolean"); - Column::Boolean(alloc.alloc_slice_fill_with(table.num_rows(), |i| lhs[i] && rhs[i])) + let res = + Column::Boolean(alloc.alloc_slice_fill_with(table.num_rows(), |i| lhs[i] && rhs[i])); + + log::log_memory_usage("End"); + + res } #[tracing::instrument(name = "AndExpr::prover_evaluate", level = "debug", skip_all)] @@ -51,6 +59,8 @@ impl ProofExpr for AndExpr { alloc: &'a Bump, table: &Table<'a, S>, ) -> Column<'a, S> { + log::log_memory_usage("Start"); + let lhs_column: Column<'a, S> = self.lhs.prover_evaluate(builder, alloc, table); let rhs_column: Column<'a, S> = self.rhs.prover_evaluate(builder, alloc, table); let lhs = lhs_column.as_boolean().expect("lhs is not boolean"); @@ -70,7 +80,11 @@ impl ProofExpr for AndExpr { (-S::one(), vec![Box::new(lhs), Box::new(rhs)]), ], ); - Column::Boolean(lhs_and_rhs) + let res = Column::Boolean(lhs_and_rhs); + + log::log_memory_usage("End"); + + res } fn verifier_evaluate( diff --git a/crates/proof-of-sql/src/sql/proof_exprs/equals_expr.rs b/crates/proof-of-sql/src/sql/proof_exprs/equals_expr.rs index 94d1035e8..37ef02b9b 100644 --- a/crates/proof-of-sql/src/sql/proof_exprs/equals_expr.rs +++ b/crates/proof-of-sql/src/sql/proof_exprs/equals_expr.rs @@ -8,6 +8,7 @@ use crate::{ slice_ops, }, sql::proof::{FinalRoundBuilder, SumcheckSubpolynomialType, VerificationBuilder}, + utils::log, }; use alloc::{boxed::Box, vec}; use bumpalo::Bump; @@ -38,13 +39,19 @@ impl ProofExpr for EqualsExpr { alloc: &'a Bump, table: &Table<'a, S>, ) -> Column<'a, S> { + log::log_memory_usage("Start"); + let lhs_column = self.lhs.result_evaluate(alloc, table); let rhs_column = self.rhs.result_evaluate(alloc, table); let lhs_scale = self.lhs.data_type().scale().unwrap_or(0); let rhs_scale = self.rhs.data_type().scale().unwrap_or(0); let res = scale_and_subtract(alloc, lhs_column, rhs_column, lhs_scale, rhs_scale, true) .expect("Failed to scale and subtract"); - Column::Boolean(result_evaluate_equals_zero(table.num_rows(), alloc, res)) + let res = Column::Boolean(result_evaluate_equals_zero(table.num_rows(), alloc, res)); + + log::log_memory_usage("End"); + + res } #[tracing::instrument(name = "EqualsExpr::prover_evaluate", level = "debug", skip_all)] @@ -54,18 +61,25 @@ impl ProofExpr for EqualsExpr { alloc: &'a Bump, table: &Table<'a, S>, ) -> Column<'a, S> { + log::log_memory_usage("Start"); + let lhs_column = self.lhs.prover_evaluate(builder, alloc, table); let rhs_column = self.rhs.prover_evaluate(builder, alloc, table); let lhs_scale = self.lhs.data_type().scale().unwrap_or(0); let rhs_scale = self.rhs.data_type().scale().unwrap_or(0); - let res = scale_and_subtract(alloc, lhs_column, rhs_column, lhs_scale, rhs_scale, true) - .expect("Failed to scale and subtract"); - Column::Boolean(prover_evaluate_equals_zero( + let scale_and_subtract_res = + scale_and_subtract(alloc, lhs_column, rhs_column, lhs_scale, rhs_scale, true) + .expect("Failed to scale and subtract"); + let res = Column::Boolean(prover_evaluate_equals_zero( table.num_rows(), builder, alloc, - res, - )) + scale_and_subtract_res, + )); + + log::log_memory_usage("End"); + + res } fn verifier_evaluate( diff --git a/crates/proof-of-sql/src/sql/proof_exprs/inequality_expr.rs b/crates/proof-of-sql/src/sql/proof_exprs/inequality_expr.rs index 2f2af814a..b9d204966 100644 --- a/crates/proof-of-sql/src/sql/proof_exprs/inequality_expr.rs +++ b/crates/proof-of-sql/src/sql/proof_exprs/inequality_expr.rs @@ -12,6 +12,7 @@ use crate::{ scalar::Scalar, }, sql::proof::{FinalRoundBuilder, VerificationBuilder}, + utils::log, }; use alloc::boxed::Box; use bumpalo::Bump; @@ -51,6 +52,8 @@ impl ProofExpr for InequalityExpr { alloc: &'a Bump, table: &Table<'a, S>, ) -> Column<'a, S> { + log::log_memory_usage("Start"); + let lhs_column = self.lhs.result_evaluate(alloc, table); let rhs_column = self.rhs.result_evaluate(alloc, table); let lhs_scale = self.lhs.data_type().scale().unwrap_or(0); @@ -71,7 +74,11 @@ impl ProofExpr for InequalityExpr { let sign = result_evaluate_sign(table_length, alloc, diff); // (diff == 0) || (sign(diff) == -1) - Column::Boolean(result_evaluate_or(table_length, alloc, equals_zero, sign)) + let res = Column::Boolean(result_evaluate_or(table_length, alloc, equals_zero, sign)); + + log::log_memory_usage("End"); + + res } #[tracing::instrument(name = "InequalityExpr::prover_evaluate", level = "debug", skip_all)] @@ -81,6 +88,8 @@ impl ProofExpr for InequalityExpr { alloc: &'a Bump, table: &Table<'a, S>, ) -> Column<'a, S> { + log::log_memory_usage("Start"); + let lhs_column = self.lhs.prover_evaluate(builder, alloc, table); let rhs_column = self.rhs.prover_evaluate(builder, alloc, table); let lhs_scale = self.lhs.data_type().scale().unwrap_or(0); @@ -106,7 +115,11 @@ impl ProofExpr for InequalityExpr { ); // (diff == 0) || (sign(diff) == -1) - Column::Boolean(prover_evaluate_or(builder, alloc, equals_zero, sign)) + let res = Column::Boolean(prover_evaluate_or(builder, alloc, equals_zero, sign)); + + log::log_memory_usage("End"); + + res } fn verifier_evaluate( diff --git a/crates/proof-of-sql/src/sql/proof_exprs/literal_expr.rs b/crates/proof-of-sql/src/sql/proof_exprs/literal_expr.rs index f4a5f36e4..21f5b7cb3 100644 --- a/crates/proof-of-sql/src/sql/proof_exprs/literal_expr.rs +++ b/crates/proof-of-sql/src/sql/proof_exprs/literal_expr.rs @@ -7,6 +7,7 @@ use crate::{ scalar::Scalar, }, sql::proof::{FinalRoundBuilder, VerificationBuilder}, + utils::log, }; use bumpalo::Bump; use serde::{Deserialize, Serialize}; @@ -45,7 +46,13 @@ impl ProofExpr for LiteralExpr { alloc: &'a Bump, table: &Table<'a, S>, ) -> Column<'a, S> { - Column::from_literal_with_length(&self.value, table.num_rows(), alloc) + log::log_memory_usage("Start"); + + let res = Column::from_literal_with_length(&self.value, table.num_rows(), alloc); + + log::log_memory_usage("End"); + + res } #[tracing::instrument(name = "LiteralExpr::prover_evaluate", level = "debug", skip_all)] @@ -55,8 +62,14 @@ impl ProofExpr for LiteralExpr { alloc: &'a Bump, table: &Table<'a, S>, ) -> Column<'a, S> { + log::log_memory_usage("Start"); + let table_length = table.num_rows(); - Column::from_literal_with_length(&self.value, table_length, alloc) + let res = Column::from_literal_with_length(&self.value, table_length, alloc); + + log::log_memory_usage("End"); + + res } fn verifier_evaluate( diff --git a/crates/proof-of-sql/src/sql/proof_exprs/mod.rs b/crates/proof-of-sql/src/sql/proof_exprs/mod.rs index 73ad59d34..5143559c0 100644 --- a/crates/proof-of-sql/src/sql/proof_exprs/mod.rs +++ b/crates/proof-of-sql/src/sql/proof_exprs/mod.rs @@ -84,6 +84,3 @@ mod column_expr; pub(crate) use column_expr::ColumnExpr; #[cfg(all(test, feature = "blitzar"))] mod column_expr_test; - -#[allow(dead_code, unused_variables)] -mod range_check; diff --git a/crates/proof-of-sql/src/sql/proof_exprs/multiply_expr.rs b/crates/proof-of-sql/src/sql/proof_exprs/multiply_expr.rs index 10861c66a..867221b55 100644 --- a/crates/proof-of-sql/src/sql/proof_exprs/multiply_expr.rs +++ b/crates/proof-of-sql/src/sql/proof_exprs/multiply_expr.rs @@ -10,6 +10,7 @@ use crate::{ proof::{FinalRoundBuilder, SumcheckSubpolynomialType, VerificationBuilder}, proof_exprs::multiply_columns, }, + utils::log, }; use alloc::{boxed::Box, vec}; use bumpalo::Bump; @@ -57,6 +58,8 @@ impl ProofExpr for MultiplyExpr { alloc: &'a Bump, table: &Table<'a, S>, ) -> Column<'a, S> { + log::log_memory_usage("Start"); + let lhs_column: Column<'a, S> = self.lhs.prover_evaluate(builder, alloc, table); let rhs_column: Column<'a, S> = self.rhs.prover_evaluate(builder, alloc, table); @@ -72,7 +75,11 @@ impl ProofExpr for MultiplyExpr { (-S::one(), vec![Box::new(lhs_column), Box::new(rhs_column)]), ], ); - Column::Scalar(lhs_times_rhs) + let res = Column::Scalar(lhs_times_rhs); + + log::log_memory_usage("End"); + + res } fn verifier_evaluate( diff --git a/crates/proof-of-sql/src/sql/proof_exprs/not_expr.rs b/crates/proof-of-sql/src/sql/proof_exprs/not_expr.rs index a85944742..ce51995b6 100644 --- a/crates/proof-of-sql/src/sql/proof_exprs/not_expr.rs +++ b/crates/proof-of-sql/src/sql/proof_exprs/not_expr.rs @@ -7,6 +7,7 @@ use crate::{ scalar::Scalar, }, sql::proof::{FinalRoundBuilder, VerificationBuilder}, + utils::log, }; use alloc::boxed::Box; use bumpalo::Bump; @@ -36,9 +37,15 @@ impl ProofExpr for NotExpr { alloc: &'a Bump, table: &Table<'a, S>, ) -> Column<'a, S> { + log::log_memory_usage("Start"); + let expr_column: Column<'a, S> = self.expr.result_evaluate(alloc, table); let expr = expr_column.as_boolean().expect("expr is not boolean"); - Column::Boolean(alloc.alloc_slice_fill_with(expr.len(), |i| !expr[i])) + let res = Column::Boolean(alloc.alloc_slice_fill_with(expr.len(), |i| !expr[i])); + + log::log_memory_usage("End"); + + res } #[tracing::instrument(name = "NotExpr::prover_evaluate", level = "debug", skip_all)] @@ -48,9 +55,15 @@ impl ProofExpr for NotExpr { alloc: &'a Bump, table: &Table<'a, S>, ) -> Column<'a, S> { + log::log_memory_usage("Start"); + let expr_column: Column<'a, S> = self.expr.prover_evaluate(builder, alloc, table); let expr = expr_column.as_boolean().expect("expr is not boolean"); - Column::Boolean(alloc.alloc_slice_fill_with(expr.len(), |i| !expr[i])) + let res = Column::Boolean(alloc.alloc_slice_fill_with(expr.len(), |i| !expr[i])); + + log::log_memory_usage("End"); + + res } fn verifier_evaluate( diff --git a/crates/proof-of-sql/src/sql/proof_exprs/or_expr.rs b/crates/proof-of-sql/src/sql/proof_exprs/or_expr.rs index 71697722f..5e0a77177 100644 --- a/crates/proof-of-sql/src/sql/proof_exprs/or_expr.rs +++ b/crates/proof-of-sql/src/sql/proof_exprs/or_expr.rs @@ -7,6 +7,7 @@ use crate::{ scalar::Scalar, }, sql::proof::{FinalRoundBuilder, SumcheckSubpolynomialType, VerificationBuilder}, + utils::log, }; use alloc::{boxed::Box, vec}; use bumpalo::Bump; @@ -37,11 +38,17 @@ impl ProofExpr for OrExpr { alloc: &'a Bump, table: &Table<'a, S>, ) -> Column<'a, S> { + log::log_memory_usage("Start"); + let lhs_column: Column<'a, S> = self.lhs.result_evaluate(alloc, table); let rhs_column: Column<'a, S> = self.rhs.result_evaluate(alloc, table); let lhs = lhs_column.as_boolean().expect("lhs is not boolean"); let rhs = rhs_column.as_boolean().expect("rhs is not boolean"); - Column::Boolean(result_evaluate_or(table.num_rows(), alloc, lhs, rhs)) + let res = Column::Boolean(result_evaluate_or(table.num_rows(), alloc, lhs, rhs)); + + log::log_memory_usage("End"); + + res } #[tracing::instrument(name = "OrExpr::prover_evaluate", level = "debug", skip_all)] @@ -51,11 +58,17 @@ impl ProofExpr for OrExpr { alloc: &'a Bump, table: &Table<'a, S>, ) -> Column<'a, S> { + log::log_memory_usage("Start"); + let lhs_column: Column<'a, S> = self.lhs.prover_evaluate(builder, alloc, table); let rhs_column: Column<'a, S> = self.rhs.prover_evaluate(builder, alloc, table); let lhs = lhs_column.as_boolean().expect("lhs is not boolean"); let rhs = rhs_column.as_boolean().expect("rhs is not boolean"); - Column::Boolean(prover_evaluate_or(builder, alloc, lhs, rhs)) + let res = Column::Boolean(prover_evaluate_or(builder, alloc, lhs, rhs)); + + log::log_memory_usage("End"); + + res } fn verifier_evaluate( diff --git a/crates/proof-of-sql/src/sql/proof_exprs/range_check.rs b/crates/proof-of-sql/src/sql/proof_exprs/range_check.rs deleted file mode 100644 index 61e67cc3a..000000000 --- a/crates/proof-of-sql/src/sql/proof_exprs/range_check.rs +++ /dev/null @@ -1,314 +0,0 @@ -use crate::base::{scalar::Scalar, slice_ops}; -use alloc::vec::Vec; -use bytemuck::cast_slice; - -// Decomposes a scalar to requisite words, additionally tracks the total -// number of occurences of each word for later use in the argument. -fn decompose_scalar_to_words<'a, S: Scalar + 'a>( - scalars: &mut [S], - word_columns: &mut [&mut [u8]], - byte_counts: &mut [u64], -) { - for (i, scalar) in scalars.iter().enumerate() { - let scalar_array: [u64; 4] = (*scalar).into(); // Convert scalar to u64 array - let scalar_bytes_full = cast_slice::(&scalar_array); // Cast u64 array to u8 slice - let scalar_bytes = &scalar_bytes_full[..31]; - - // Populate the columns of the words table with decomposition of scalar: - for (byte_index, &byte) in scalar_bytes.iter().enumerate() { - // Each column in word_columns is for a specific byte position across all scalars - word_columns[byte_index][i] = byte; - byte_counts[byte as usize] += 1; - } - } -} - -// For a word w and a verifier challenge alpha, compute -// 1 / (word + alpha), which is the modular multiplicative -// inverse of (word + alpha) in the scalar field. -fn get_logarithmic_derivative<'a, S: Scalar + 'a>( - byte_columns: &[&mut [u8]], - alpha: S, - inverted_word_columns: &mut [&mut [S]], -) { - // Iterate over each column - for (i, byte_column) in byte_columns.iter().enumerate() { - // Convert bytes to field elements and add alpha - let mut terms_to_invert: Vec = byte_column.iter().map(|w| S::from(w) + alpha).collect(); - - // Invert all the terms in the column at once - slice_ops::batch_inversion(&mut terms_to_invert); - - // Assign the inverted values back to the inverted_word_columns - inverted_word_columns[i].copy_from_slice(&terms_to_invert); - } -} - -#[cfg(test)] -mod tests { - use crate::{ - base::scalar::{Curve25519Scalar as S, Scalar}, - sql::proof_exprs::range_check::{decompose_scalar_to_words, get_logarithmic_derivative}, - }; - use num_traits::Inv; - - #[test] - fn we_can_decompose_small_scalars_to_words() { - let mut scalars: Vec = [1, 2, 3, 255, 256, 257].iter().map(S::from).collect(); - - let mut word_columns = vec![vec![0; scalars.len()]; 31]; - let mut word_slices: Vec<&mut [u8]> = word_columns.iter_mut().map(|c| &mut c[..]).collect(); - let mut byte_counts = vec![0; 256]; - - decompose_scalar_to_words(&mut scalars, &mut word_slices, &mut byte_counts); - - let mut expected_word_columns = vec![vec![0; scalars.len()]; 31]; - expected_word_columns[0] = vec![1, 2, 3, 255, 0, 1]; - expected_word_columns[1] = vec![0, 0, 0, 0, 1, 1]; - // expected_word_columns[2..] is filled with 0s. - let mut expected_byte_counts = vec![0; 256]; - expected_byte_counts[0] = 31 * 6 - 7; - expected_byte_counts[1] = 4; - expected_byte_counts[2] = 1; - expected_byte_counts[3] = 1; - // expected_byte_counts[4..255] is filled with 0s. - expected_byte_counts[255] = 1; - - assert_eq!(word_columns, expected_word_columns); - assert_eq!(byte_counts, expected_byte_counts); - } - - #[test] - fn we_can_decompose_large_scalars_to_words() { - let mut scalars: Vec = [S::MAX_SIGNED, S::from(u64::MAX), S::from(-1)] - .iter() - .map(S::from) - .collect(); - - let mut word_columns = vec![vec![0; scalars.len()]; 31]; - let mut word_slices: Vec<&mut [u8]> = word_columns.iter_mut().map(|c| &mut c[..]).collect(); - let mut byte_counts = vec![0; 256]; - - decompose_scalar_to_words(&mut scalars, &mut word_slices, &mut byte_counts); - - let expected_word_columns = [ - [246, 255, 236], - [233, 255, 211], - [122, 255, 245], - [46, 255, 92], - [141, 255, 26], - [49, 255, 99], - [9, 255, 18], - [44, 255, 88], - [107, 0, 214], - [206, 0, 156], - [123, 0, 247], - [81, 0, 162], - [239, 0, 222], - [124, 0, 249], - [111, 0, 222], - [10, 0, 20], - // expected_word_columns[16..] is filled with 0s. - ]; - - let mut expected_byte_counts_hardcoded = vec![0; 256]; - expected_byte_counts_hardcoded[0] = 53; - expected_byte_counts_hardcoded[9] = 1; - expected_byte_counts_hardcoded[10] = 1; - expected_byte_counts_hardcoded[18] = 1; - expected_byte_counts_hardcoded[20] = 1; - expected_byte_counts_hardcoded[26] = 1; - expected_byte_counts_hardcoded[44] = 1; - expected_byte_counts_hardcoded[46] = 1; - expected_byte_counts_hardcoded[49] = 1; - expected_byte_counts_hardcoded[81] = 1; - expected_byte_counts_hardcoded[88] = 1; - expected_byte_counts_hardcoded[92] = 1; - expected_byte_counts_hardcoded[99] = 1; - expected_byte_counts_hardcoded[107] = 1; - expected_byte_counts_hardcoded[111] = 1; - expected_byte_counts_hardcoded[122] = 1; - expected_byte_counts_hardcoded[123] = 1; - expected_byte_counts_hardcoded[124] = 1; - expected_byte_counts_hardcoded[141] = 1; - expected_byte_counts_hardcoded[156] = 1; - expected_byte_counts_hardcoded[162] = 1; - expected_byte_counts_hardcoded[206] = 1; - expected_byte_counts_hardcoded[211] = 1; - expected_byte_counts_hardcoded[214] = 1; - expected_byte_counts_hardcoded[222] = 2; - expected_byte_counts_hardcoded[233] = 1; - expected_byte_counts_hardcoded[236] = 1; - expected_byte_counts_hardcoded[239] = 1; - expected_byte_counts_hardcoded[245] = 1; - expected_byte_counts_hardcoded[246] = 1; - expected_byte_counts_hardcoded[247] = 1; - expected_byte_counts_hardcoded[249] = 1; - expected_byte_counts_hardcoded[255] = 8; - - assert_eq!(word_columns[..16], expected_word_columns); - assert_eq!(byte_counts, expected_byte_counts_hardcoded); - } - - #[test] - fn we_can_obtain_logarithmic_derivative_from_small_scalar() { - let scalars: Vec = [1, 2, 3, 255, 256, 257].iter().map(S::from).collect(); - let mut word_columns: Vec> = vec![vec![0; scalars.len()]; 31]; - - // Manually set the decomposed words column - word_columns[0] = [1, 2, 3, 255, 0, 1].to_vec(); - word_columns[1] = [0, 0, 0, 0, 1, 1].to_vec(); - - let word_slices: Vec<&mut [u8]> = word_columns.iter_mut().map(|c| &mut c[..]).collect(); - - let alpha = S::from(5); - - // Initialize the inverted_word_columns_plus_alpha vector - let mut inverted_word_columns_plus_alpha: Vec> = - vec![vec![S::ZERO; scalars.len()]; 31]; - - // Convert Vec> into Vec<&mut [S]> for use in get_logarithmic_derivative - let mut word_columns_from_log_deriv: Vec<&mut [S]> = inverted_word_columns_plus_alpha - .iter_mut() - .map(Vec::as_mut_slice) - .collect(); - - get_logarithmic_derivative(&word_slices, alpha, &mut word_columns_from_log_deriv); - - let expected_data: [[u8; 6]; 31] = [ - [1, 2, 3, 255, 0, 1], - [0, 0, 0, 0, 1, 1], - [0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0], - ]; - - // Invert the expected data and add the verifier challenge - let expected_columns: Vec> = expected_data - .iter() - .map(|row| { - row.iter() - .map(|&w| (S::from(w) + alpha).inv().unwrap_or(S::ZERO)) - .collect() - }) - .collect(); - - // Perform assertion for all columns at once - assert_eq!(word_columns_from_log_deriv, expected_columns); - } - - #[test] - fn we_can_obtain_logarithmic_derivative_from_large_scalar() { - let scalars: Vec = [u64::MAX, u64::MAX].iter().map(S::from).collect(); - - let mut word_columns: Vec> = vec![vec![0; scalars.len()]; 31]; - - // Manually set the decomposed words column. - // Its helpful to think of this transposed, i.e. - // Scalar 1: FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 ... - // Scalar 2: FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 ... - word_columns[0] = [0xFF, 0xFF].to_vec(); - word_columns[1] = [0xFF, 0xFF].to_vec(); - word_columns[2] = [0xFF, 0xFF].to_vec(); - word_columns[3] = [0xFF, 0xFF].to_vec(); - word_columns[4] = [0xFF, 0xFF].to_vec(); - word_columns[5] = [0xFF, 0xFF].to_vec(); - word_columns[6] = [0xFF, 0xFF].to_vec(); - word_columns[7] = [0xFF, 0xFF].to_vec(); - word_columns[8] = [0xFF, 0xFF].to_vec(); - word_columns[9] = [0xFF, 0xFF].to_vec(); - word_columns[10] = [0xFF, 0xFF].to_vec(); - word_columns[11] = [0xFF, 0xFF].to_vec(); - word_columns[12] = [0xFF, 0xFF].to_vec(); - word_columns[13] = [0xFF, 0xFF].to_vec(); - word_columns[14] = [0xFF, 0xFF].to_vec(); - word_columns[15] = [0xFF, 0xFF].to_vec(); - - // Simulate a verifier challenge, then prepare storage for - // 1 / (word + alpha) - let alpha = S::from(5); - let word_slices: Vec<&mut [u8]> = word_columns.iter_mut().map(|c| &mut c[..]).collect(); - let mut inverted_word_columns_plus_alpha: Vec> = - vec![vec![S::ZERO; scalars.len()]; 31]; - // Convert Vec> into Vec<&mut [S]> for use in get_logarithmic_derivative - let mut word_columns_from_log_deriv: Vec<&mut [S]> = inverted_word_columns_plus_alpha - .iter_mut() - .map(Vec::as_mut_slice) - .collect(); - - get_logarithmic_derivative(&word_slices, alpha, &mut word_columns_from_log_deriv); - - let expected_data: [[u8; 2]; 31] = [ - [0xFF, 0xFF], - [0xFF, 0xFF], - [0xFF, 0xFF], - [0xFF, 0xFF], - [0xFF, 0xFF], - [0xFF, 0xFF], - [0xFF, 0xFF], - [0xFF, 0xFF], - [0xFF, 0xFF], - [0xFF, 0xFF], - [0xFF, 0xFF], - [0xFF, 0xFF], - [0xFF, 0xFF], - [0xFF, 0xFF], - [0xFF, 0xFF], - [0xFF, 0xFF], - [0, 0], - [0, 0], - [0, 0], - [0, 0], - [0, 0], - [0, 0], - [0, 0], - [0, 0], - [0, 0], - [0, 0], - [0, 0], - [0, 0], - [0, 0], - [0, 0], - [0, 0], - ]; - - // Invert the expected data and add the verifier challenge, producing - // columns containing 1 / (word + alpha) - let expected_columns: Vec> = expected_data - .iter() - .map(|row| { - row.iter() - .map(|&w| (S::from(w) + alpha).inv().unwrap_or(S::ZERO)) - .collect() - }) - .collect(); - - assert_eq!(word_columns_from_log_deriv, expected_columns); - } -} diff --git a/crates/proof-of-sql/src/sql/proof_plans/empty_exec.rs b/crates/proof-of-sql/src/sql/proof_plans/empty_exec.rs index 1ed5bf0f3..91943113c 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/empty_exec.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/empty_exec.rs @@ -10,6 +10,7 @@ use crate::{ sql::proof::{ FinalRoundBuilder, FirstRoundBuilder, ProofPlan, ProverEvaluate, VerificationBuilder, }, + utils::log, }; use alloc::vec::Vec; use bumpalo::Bump; @@ -70,9 +71,16 @@ impl ProverEvaluate for EmptyExec { _alloc: &'a Bump, _table_map: &IndexMap>, ) -> Table<'a, S> { + log::log_memory_usage("Start"); + // Create an empty table with one row - Table::<'a, S>::try_new_with_options(IndexMap::default(), TableOptions::new(Some(1))) - .unwrap() + let res = + Table::<'a, S>::try_new_with_options(IndexMap::default(), TableOptions::new(Some(1))) + .unwrap(); + + log::log_memory_usage("End"); + + res } #[tracing::instrument(name = "EmptyExec::final_round_evaluate", level = "debug", skip_all)] @@ -83,8 +91,15 @@ impl ProverEvaluate for EmptyExec { _alloc: &'a Bump, _table_map: &IndexMap>, ) -> Table<'a, S> { + log::log_memory_usage("Start"); + // Create an empty table with one row - Table::<'a, S>::try_new_with_options(IndexMap::default(), TableOptions::new(Some(1))) - .unwrap() + let res = + Table::<'a, S>::try_new_with_options(IndexMap::default(), TableOptions::new(Some(1))) + .unwrap(); + + log::log_memory_usage("End"); + + res } } diff --git a/crates/proof-of-sql/src/sql/proof_plans/filter_exec.rs b/crates/proof-of-sql/src/sql/proof_plans/filter_exec.rs index be8c7fd8f..5a044f737 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/filter_exec.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/filter_exec.rs @@ -17,6 +17,7 @@ use crate::{ }, proof_exprs::{AliasedDynProofExpr, DynProofExpr, ProofExpr, TableExpr}, }, + utils::log, }; use alloc::{boxed::Box, vec, vec::Vec}; use bumpalo::Bump; @@ -148,6 +149,8 @@ impl ProverEvaluate for FilterExec { alloc: &'a Bump, table_map: &IndexMap>, ) -> Table<'a, S> { + log::log_memory_usage("Start"); + let table = table_map .get(&self.table.table_ref) .expect("Table not found"); @@ -177,6 +180,9 @@ impl ProverEvaluate for FilterExec { .expect("Failed to create table from iterator"); builder.request_post_result_challenges(2); builder.produce_one_evaluation_length(output_length); + + log::log_memory_usage("End"); + res } @@ -188,6 +194,8 @@ impl ProverEvaluate for FilterExec { alloc: &'a Bump, table_map: &IndexMap>, ) -> Table<'a, S> { + log::log_memory_usage("Start"); + let table = table_map .get(&self.table.table_ref) .expect("Table not found"); @@ -226,14 +234,18 @@ impl ProverEvaluate for FilterExec { table.num_rows(), result_len, ); - Table::<'a, S>::try_from_iter_with_options( + let res = Table::<'a, S>::try_from_iter_with_options( self.aliased_results .iter() .map(|expr| expr.alias.clone()) .zip(filtered_columns), TableOptions::new(Some(output_length)), ) - .expect("Failed to create table from iterator") + .expect("Failed to create table from iterator"); + + log::log_memory_usage("End"); + + res } } diff --git a/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test_dishonest_prover.rs b/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test_dishonest_prover.rs index 867a52109..9ec4f5701 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test_dishonest_prover.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test_dishonest_prover.rs @@ -19,6 +19,7 @@ use crate::{ ProofExpr, }, }, + utils::log, }; use blitzar::proof::InnerProductProof; use bumpalo::Bump; @@ -40,6 +41,8 @@ impl ProverEvaluate for DishonestFilterExec { alloc: &'a Bump, table_map: &IndexMap>, ) -> Table<'a, S> { + log::log_memory_usage("Start"); + let table = table_map .get(&self.table.table_ref) .expect("Table not found"); @@ -68,6 +71,9 @@ impl ProverEvaluate for DishonestFilterExec { .expect("Failed to create table from iterator"); builder.request_post_result_challenges(2); builder.produce_one_evaluation_length(output_length); + + log::log_memory_usage("End"); + res } @@ -83,6 +89,8 @@ impl ProverEvaluate for DishonestFilterExec { alloc: &'a Bump, table_map: &IndexMap>, ) -> Table<'a, S> { + log::log_memory_usage("Start"); + let table = table_map .get(&self.table.table_ref) .expect("Table not found"); @@ -121,14 +129,18 @@ impl ProverEvaluate for DishonestFilterExec { table.num_rows(), result_len, ); - Table::<'a, S>::try_from_iter_with_options( + let res = Table::<'a, S>::try_from_iter_with_options( self.aliased_results .iter() .map(|expr| expr.alias.clone()) .zip(filtered_columns), TableOptions::new(Some(output_length)), ) - .expect("Failed to create table from iterator") + .expect("Failed to create table from iterator"); + + log::log_memory_usage("End"); + + res } } diff --git a/crates/proof-of-sql/src/sql/proof_plans/group_by_exec.rs b/crates/proof-of-sql/src/sql/proof_plans/group_by_exec.rs index b2f5de68b..086ffeceb 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/group_by_exec.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/group_by_exec.rs @@ -19,6 +19,7 @@ use crate::{ }, proof_exprs::{AliasedDynProofExpr, ColumnExpr, DynProofExpr, ProofExpr, TableExpr}, }, + utils::log, }; use alloc::{boxed::Box, vec, vec::Vec}; use bumpalo::Bump; @@ -196,6 +197,8 @@ impl ProverEvaluate for GroupByExec { alloc: &'a Bump, table_map: &IndexMap>, ) -> Table<'a, S> { + log::log_memory_usage("Start"); + let table = table_map .get(&self.table.table_ref) .expect("Table not found"); @@ -240,6 +243,9 @@ impl ProverEvaluate for GroupByExec { .expect("Failed to create table from column references"); builder.request_post_result_challenges(2); builder.produce_one_evaluation_length(count_column.len()); + + log::log_memory_usage("End"); + res } @@ -251,6 +257,8 @@ impl ProverEvaluate for GroupByExec { alloc: &'a Bump, table_map: &IndexMap>, ) -> Table<'a, S> { + log::log_memory_usage("Start"); + let table = table_map .get(&self.table.table_ref) .expect("Table not found"); @@ -312,6 +320,9 @@ impl ProverEvaluate for GroupByExec { (&group_by_result_columns, &sum_result_columns, count_column), table.num_rows(), ); + + log::log_memory_usage("End"); + res } } diff --git a/crates/proof-of-sql/src/sql/proof_plans/mod.rs b/crates/proof-of-sql/src/sql/proof_plans/mod.rs index 3a5edb7d0..c56ee5ff7 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/mod.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/mod.rs @@ -50,3 +50,7 @@ pub use dyn_proof_plan::DynProofPlan; #[cfg(test)] mod demo_mock_plan; + +pub mod range_check; +#[cfg(all(test, feature = "blitzar"))] +pub mod range_check_test_plan; diff --git a/crates/proof-of-sql/src/sql/proof_plans/projection_exec.rs b/crates/proof-of-sql/src/sql/proof_plans/projection_exec.rs index 0bdbea9bc..36fd29a35 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/projection_exec.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/projection_exec.rs @@ -13,6 +13,7 @@ use crate::{ }, proof_exprs::{AliasedDynProofExpr, ProofExpr, TableExpr}, }, + utils::log, }; use alloc::vec::Vec; use bumpalo::Bump; @@ -97,10 +98,12 @@ impl ProverEvaluate for ProjectionExec { alloc: &'a Bump, table_map: &IndexMap>, ) -> Table<'a, S> { + log::log_memory_usage("Start"); + let table = table_map .get(&self.table.table_ref) .expect("Table not found"); - Table::<'a, S>::try_from_iter_with_options( + let res = Table::<'a, S>::try_from_iter_with_options( self.aliased_results.iter().map(|aliased_expr| { ( aliased_expr.alias.clone(), @@ -109,7 +112,11 @@ impl ProverEvaluate for ProjectionExec { }), TableOptions::new(Some(table.num_rows())), ) - .expect("Failed to create table from iterator") + .expect("Failed to create table from iterator"); + + log::log_memory_usage("End"); + + res } #[tracing::instrument( @@ -124,6 +131,8 @@ impl ProverEvaluate for ProjectionExec { alloc: &'a Bump, table_map: &IndexMap>, ) -> Table<'a, S> { + log::log_memory_usage("Start"); + let table = table_map .get(&self.table.table_ref) .expect("Table not found"); @@ -142,6 +151,9 @@ impl ProverEvaluate for ProjectionExec { for column in res.columns().copied() { builder.produce_intermediate_mle(column); } + + log::log_memory_usage("End"); + res } } diff --git a/crates/proof-of-sql/src/sql/proof_plans/range_check.rs b/crates/proof-of-sql/src/sql/proof_plans/range_check.rs new file mode 100644 index 000000000..931d9733f --- /dev/null +++ b/crates/proof-of-sql/src/sql/proof_plans/range_check.rs @@ -0,0 +1,694 @@ +//! Implements a cryptographic range check using logarithmic derivatives to decompose a column of scalars +//! into a matrix of words. This method leverages the properties of logarithmic derivatives to efficiently +//! verify range proofs in a zero-knowledge setting by performing word-wise decompositions, intermediate MLEs, +//! and modular inversions. +//! +//! The approach builds on the techniques outlined in the paper "Multivariate Lookups Based on Logarithmic +//! Derivatives" [ePrint 2022/1530](https://eprint.iacr.org/2022/1530.pdf), which characterizes the use of +//! logarithmic derivatives to perform multivariate lookups in cryptographic protocols. +//! +//! ## Key Steps: +//! * Word-Sized Decomposition: Each scalar is decomposed into its byte-level representation, forming a matrix where +//! each row corresponds to the decomposition of a scalar and each column corresponds to the bytes from the same position +//! across all scalars. +//! * Intermediate MLE Computation: Multi-linear extensions are computed for each word column and for the count of how +//! often each word appears. +//! * Logarithmic Derivative Calculation: After decomposing the scalars, the verifier's challenge is added to each word, +//! and the modular multiplicative inverse of this sum is computed, forming a new matrix of logarithmic derivatives. +//! This matrix is key to constructing range constraints. +//! +//! ## Optimization Opportunities: +//! * Batch Inversion: Inversions of large vectors are computationally expensive +//! * Parallelization: Single-threaded execution of these operations is a performance bottleneck +use crate::{ + base::{polynomial::MultilinearExtension, proof::ProofSizeMismatch, scalar::Scalar, slice_ops}, + sql::proof::{FinalRoundBuilder, SumcheckSubpolynomialType, VerificationBuilder}, +}; +use alloc::{boxed::Box, vec::Vec}; +use bumpalo::Bump; +use bytemuck::cast_slice; +use core::{cmp::max, iter::repeat}; + +/// Prove that a word-wise decomposition of a collection of scalars +/// are all within the range 0 to 2^248. +pub fn final_round_evaluate_range_check<'a, S: Scalar + 'a>( + builder: &mut FinalRoundBuilder<'a, S>, + scalars: &[S], + table_length: usize, + alloc: &'a Bump, +) { + // Create 31 columns, each will collect the corresponding word from all scalars. + // 31 because a scalar will only ever have 248 bits of data set. + let mut word_columns: Vec<&mut [u8]> = repeat(()) + .take(31) + .map(|()| alloc.alloc_slice_fill_with(scalars.len(), |_| 0)) + .collect(); + + // Allocate space for the eventual inverted word columns by copying word_columns and converting to the required type. + let mut inverted_word_columns: Vec<&mut [S]> = word_columns + .iter_mut() + .map(|column| alloc.alloc_slice_fill_with(column.len(), |_| S::ZERO)) + .collect(); + + // Initialize a vector to count occurrences of each byte (0-255). + // The vector has 256 elements padded with zeros to match the length of the word columns + // The size is the larger of 256 or the number of scalars. + let word_counts: &mut [i64] = alloc.alloc_slice_fill_with(max(256, scalars.len()), |_| 0); + + decompose_scalar_to_words(scalars, &mut word_columns, word_counts); + + // Retrieve verifier challenge here, *after* Phase 1 + let alpha = builder.consume_post_result_challenge(); + + get_logarithmic_derivative( + builder, + alloc, + &mut word_columns, + alpha, + table_length, + &mut inverted_word_columns, + ); + + // Produce an MLE over the word values + prove_word_values(alloc, scalars, alpha, table_length, builder); + + // Argue that the sum of all words in each row, minus the count of each + // word multiplied by the inverted word value, is zero. + prove_row_zero_sum( + builder, + word_counts, + alloc, + scalars, + &inverted_word_columns, + alpha, + ); +} + +/// Decomposes a scalar to requisite words, additionally tracks the total +/// number of occurrences of each word for later use in the argument. +/// +/// ```text +/// | Column 0 | Column 1 | Column 2 | ... | Column 31 | +/// |------------|------------|------------|-----|-------------| +/// | w₀,₀ | w₀,₁ | w₀,₂ | ... | w₀,₃₁ | +/// | w₁,₀ | w₁,₁ | w₁,₂ | ... | w₁,₃₁ | +/// | w₂,₀ | w₂,₁ | w₂,₂ | ... | w₂,₃₁ | +/// ------------------------------------------------------------ +/// ``` +fn decompose_scalar_to_words<'a, S: Scalar + 'a>( + scalars: &[S], + word_columns: &mut [&mut [u8]], + byte_counts: &mut [i64], +) { + scalars.iter().enumerate().for_each(|(i, scalar)| { + let scalar_array: [u64; 4] = (*scalar).into(); // Convert scalar to u64 array + let scalar_bytes_full = cast_slice::(&scalar_array); // Cast u64 array to u8 slice + let scalar_bytes = &scalar_bytes_full[..31]; + + scalar_bytes + .iter() + .enumerate() + .for_each(|(byte_index, &byte)| { + word_columns[byte_index][i] = byte; + byte_counts[byte as usize] += 1; + }); + }); +} + +/// For a word w and a verifier challenge α, compute +/// wᵢⱼ , and produce an Int. MLE over this column: +/// +/// ```text +/// | Column 0 | Column 1 | Column 2 | ... | Column 31 | +/// |--------------|--------------|--------------|-----|--------------| +/// | w₀,₀ | w₀,₁ | w₀,₂ | ... | w₀,₃₁ | +/// | w₁,₀ | w₁,₁ | w₁,₂ | ... | w₁,₃₁ | +/// | w₂,₀ | w₂,₁ | w₂,₂ | ... | w₂,₃₁ | +/// ------------------------------------------------------------------- +/// | | | | +/// v v v v +/// Int. MLE Int. MLE Int. MLE Int. MLE +/// ``` +/// +/// Then, invert each column, producing the modular multiplicative +/// inverse of (wᵢⱼ + α), which is the logarithmic derivative +/// of wᵢⱼ + α: +/// +/// ```text +/// | Column 0 | Column 1 | Column 2 | ... | Column 31 | +/// |--------------|--------------|--------------|-----|---------------| +/// | (w₀,₀ + α)⁻¹ | (w₀,₁ + α)⁻¹ | (w₀,₂ + α)⁻¹ | ... | (w₀,₃₁ + α)⁻¹ | +/// | (w₁,₀ + α)⁻¹ | (w₁,₁ + α)⁻¹ | (w₁,₂ + α)⁻¹ | ... | (w₁,₃₁ + α)⁻¹ | +/// | (w₂,₀ + α)⁻¹ | (w₂,₁ + α)⁻¹ | (w₂,₂ + α)⁻¹ | ... | (w₂,₃₁ + α)⁻¹ | +/// -------------------------------------------------------------------- +/// | | | | +/// v v v v +/// Int. MLE Int. MLE Int. MLE Int. MLE +/// ``` +fn get_logarithmic_derivative<'a, S: Scalar + 'a>( + builder: &mut FinalRoundBuilder<'a, S>, + alloc: &'a Bump, + word_columns: &mut [&mut [u8]], + alpha: S, + table_length: usize, + inverted_word_columns: &mut [&mut [S]], +) { + word_columns + .iter_mut() + .zip(inverted_word_columns.iter_mut()) + .for_each(|(byte_column, inv_column)| { + // Allocate words + let words = + alloc.alloc_slice_fill_with(byte_column.len(), |j| S::from(&byte_column[j])); + + // Produce an MLE over words + builder.produce_intermediate_mle(words as &[_]); + + // Allocate words_inv + let words_inv = + alloc.alloc_slice_fill_with(byte_column.len(), |j| S::from(&byte_column[j])); + slice_ops::add_const::(words_inv, alpha); + slice_ops::batch_inversion(words_inv); + + // Copy words_inv to the corresponding inverted_word_columns[i] + builder.produce_intermediate_mle(words_inv as &[_]); + + inv_column.copy_from_slice(words_inv); + + let input_ones = alloc.alloc_slice_fill_copy(table_length, true); + + // α * (w + α)⁻¹ + w * (w + α)⁻¹ - 1 = 0 + builder.produce_sumcheck_subpolynomial( + SumcheckSubpolynomialType::Identity, + vec![ + (alpha, vec![Box::new(words_inv as &[_])]), + ( + S::one(), + vec![Box::new(words as &[_]), Box::new(words_inv as &[_])], + ), + (-S::one(), vec![Box::new(input_ones as &[_])]), + ], + ); + }); +} + +/// Produce the range of possible values that a word can take on, +/// based on the word's bit size, along with an intermediate MLE: +/// +/// ```text +/// | Column 0 | +/// |--------------------| +/// | 0 | +/// | 1 | +/// | ... | +/// | 2ⁿ - 1 | +/// ---------------------- +/// | +/// v +/// Int. MLE +/// ``` +/// Here, `n` represents the bit size of the word (e.g., for an 8-bit word, `2⁸ - 1 = 255`). +/// +/// Then, add the verifier challenge α, invert, and produce an +/// intermediate MLE: +/// +/// ```text +/// | Column 0 +/// |--------------------| +/// | (0 + α)⁻¹ | +/// | (1 + α)⁻¹ | +/// | ... | +/// | (2ⁿ - 1 + α)⁻¹ | +/// ---------------------- +/// | +/// v +/// Int. MLE +/// ``` +/// Finally, argue that (`word_values` + α)⁻¹ * (`word_values` + α) - 1 = 0 +/// +use alloc::vec; +#[allow(clippy::missing_panics_doc)] +#[allow(clippy::cast_possible_truncation)] +fn prove_word_values<'a, S: Scalar + 'a>( + alloc: &'a Bump, + scalars: &[S], + alpha: S, + table_length: usize, + builder: &mut FinalRoundBuilder<'a, S>, +) { + // Allocate from 0 to 255 and pertrub with verifier challenge + let word_values: &mut [S] = + alloc.alloc_slice_fill_with(max(256, scalars.len()), |i| S::from(&(i as u8))); + builder.produce_intermediate_mle(word_values as &[_]); + + // Now produce an intermediate MLE over the inverted word values + verifier challenge alpha + let word_vals_inv: &mut [S] = alloc.alloc_slice_fill_with(256, |i| { + S::try_from(i.into()).expect("word value will always fit into S") + }); + + slice_ops::add_const::(word_vals_inv, alpha); + slice_ops::batch_inversion(&mut word_vals_inv[..]); + builder.produce_intermediate_mle(word_vals_inv as &[_]); + + let input_ones = alloc.alloc_slice_fill_copy(table_length, true); + + // Argument: + // (word_values + α)⁻¹ * (word_values + α) - 1 = 0 + builder.produce_sumcheck_subpolynomial( + SumcheckSubpolynomialType::Identity, + vec![ + (alpha, vec![Box::new(word_vals_inv as &[_])]), + ( + S::one(), + vec![ + Box::new(word_vals_inv as &[_]), + Box::new(word_values as &[_]), + ], + ), + (-S::one(), vec![Box::new(input_ones as &[_])]), + ], + ); +} + +/// Argue that the sum of all words in each row, minus the count of each word +/// multiplied by the inverted word value, is zero. +/// +/// ```text +/// ∑ (I₀ + I₁ + I₂ + I₃ - (C * IN)) = 0 +/// ``` +/// +/// Where: +/// - `I₀, I₁, I₂, I₃` are the inverted word columns. +/// - `C` is the count of each word. +/// - `IN` is the inverted word values column. +#[allow(clippy::missing_panics_doc)] +fn prove_row_zero_sum<'a, S: Scalar + 'a>( + builder: &mut FinalRoundBuilder<'a, S>, + word_counts: &'a mut [i64], + alloc: &'a Bump, + scalars: &[S], + inverted_word_columns: &[&mut [S]], + alpha: S, +) { + // Produce an MLE over the counts of each word value + builder.produce_intermediate_mle(word_counts as &[_]); + + // Allocate row_sums from the bump allocator, ensuring it lives as long as 'a + let row_sums = alloc.alloc_slice_fill_with(scalars.len(), |_| S::ZERO); + + // Sum up the corresponding row values using iterators + for column in inverted_word_columns { + column.iter().enumerate().for_each(|(i, &inv_word)| { + row_sums[i] += inv_word; + }); + } + + // Allocate and store the row sums in a Box using the bump allocator + let row_sums_box: Box<_> = + Box::new(alloc.alloc_slice_copy(row_sums) as &[_]) as Box>; + + let word_vals_plus_alpha_inv: &mut [S] = alloc.alloc_slice_fill_with(256, |i| { + S::try_from(i.into()).expect("word value will always fit into S") + }); + + slice_ops::add_const::(word_vals_plus_alpha_inv, alpha); + slice_ops::batch_inversion(&mut word_vals_plus_alpha_inv[..]); + + // Now pass the vector to the builder + builder.produce_sumcheck_subpolynomial( + SumcheckSubpolynomialType::ZeroSum, + vec![ + (S::one(), vec![row_sums_box]), + ( + -S::one(), + vec![ + Box::new(word_counts as &[_]), + Box::new(word_vals_plus_alpha_inv as &[_]), + ], + ), + ], + ); +} + +/// Verify that the prover claim is correct. +/// +/// # Panics +/// +/// if a column contains values outside of the selected range. +pub fn verifier_evaluate_range_check( + builder: &mut VerificationBuilder<'_, C>, + input_ones_eval: C, + input_column_eval: C, +) -> Result<(), ProofSizeMismatch> +where + C: Scalar, +{ + let alpha = builder.try_consume_post_result_challenge()?; + + let (sum, w_plus_alpha_inv_evals) = (0..31) + .map(|i| { + let w_eval = builder.try_consume_mle_evaluation()?; + let words_inv = builder.try_consume_mle_evaluation()?; + + let word_eval = words_inv * (w_eval + alpha); + let power = (0..i).fold(C::from(1), |acc, _| acc * C::from(256)); + + builder.try_produce_sumcheck_subpolynomial_evaluation( + SumcheckSubpolynomialType::Identity, + word_eval - input_ones_eval, + 2, + )?; + + Ok((w_eval, words_inv, power)) + }) + .collect::, _>>()? + .into_iter() + .fold( + (C::ZERO, Vec::with_capacity(31)), + |(sum_acc, mut invs), (w_eval, words_inv, power)| { + (sum_acc + w_eval * power, { + invs.push(words_inv); + invs + }) + }, + ); + + assert_eq!( + sum, input_column_eval, + "Range check failed, column contains values outside of the selected range" + ); + + let word_vals_eval = builder.try_consume_mle_evaluation()?; + let word_vals_plus_alpha_inv = builder.try_consume_mle_evaluation()?; + let word_value_constraint = word_vals_plus_alpha_inv * (word_vals_eval + alpha); + + builder.try_produce_sumcheck_subpolynomial_evaluation( + SumcheckSubpolynomialType::Identity, + word_value_constraint - input_ones_eval, + 2, + )?; + + let count_eval = builder.try_consume_mle_evaluation()?; + let row_sum_eval: C = w_plus_alpha_inv_evals.iter().copied().sum(); + let count_value_product_eval = count_eval * word_vals_plus_alpha_inv; + + builder.try_produce_sumcheck_subpolynomial_evaluation( + SumcheckSubpolynomialType::ZeroSum, + row_sum_eval - count_value_product_eval, + 2, + )?; + + Ok(()) +} + +#[cfg(test)] +mod tests { + use crate::{ + base::scalar::{Curve25519Scalar as S, Scalar}, + sql::{ + proof::FinalRoundBuilder, + proof_plans::range_check::{decompose_scalar_to_words, get_logarithmic_derivative}, + }, + }; + use bumpalo::Bump; + use num_traits::Inv; + + #[test] + fn we_can_decompose_small_scalars_to_words() { + let scalars: Vec = [1, 2, 3, 255, 256, 257].iter().map(S::from).collect(); + + let mut word_columns = vec![vec![0; scalars.len()]; 31]; + let mut word_slices: Vec<&mut [u8]> = word_columns.iter_mut().map(|c| &mut c[..]).collect(); + let mut byte_counts = vec![0; 256]; + + decompose_scalar_to_words(&scalars, &mut word_slices, &mut byte_counts); + + let mut expected_word_columns = vec![vec![0; scalars.len()]; 31]; + expected_word_columns[0] = vec![1, 2, 3, 255, 0, 1]; + expected_word_columns[1] = vec![0, 0, 0, 0, 1, 1]; + // expected_word_columns[2..] is filled with 0s. + let mut expected_byte_counts = vec![0; 256]; + expected_byte_counts[0] = 31 * 6 - 7; + expected_byte_counts[1] = 4; + expected_byte_counts[2] = 1; + expected_byte_counts[3] = 1; + // expected_byte_counts[4..255] is filled with 0s. + expected_byte_counts[255] = 1; + + assert_eq!(word_columns, expected_word_columns); + assert_eq!(byte_counts, expected_byte_counts); + } + + #[test] + fn we_can_decompose_large_scalars_to_words() { + let scalars: Vec = [S::MAX_SIGNED, S::from(u64::MAX), S::from(-1)] + .iter() + .map(S::from) + .collect(); + + let mut word_columns = vec![vec![0; scalars.len()]; 31]; + let mut word_slices: Vec<&mut [u8]> = word_columns.iter_mut().map(|c| &mut c[..]).collect(); + let mut byte_counts = vec![0; 256]; + + decompose_scalar_to_words(&scalars, &mut word_slices, &mut byte_counts); + + let expected_word_columns = [ + [246, 255, 236], + [233, 255, 211], + [122, 255, 245], + [46, 255, 92], + [141, 255, 26], + [49, 255, 99], + [9, 255, 18], + [44, 255, 88], + [107, 0, 214], + [206, 0, 156], + [123, 0, 247], + [81, 0, 162], + [239, 0, 222], + [124, 0, 249], + [111, 0, 222], + [10, 0, 20], + // expected_word_columns[16..] is filled with 0s. + ]; + + let mut expected_byte_counts_hardcoded = vec![0; 256]; + expected_byte_counts_hardcoded[0] = 53; + expected_byte_counts_hardcoded[9] = 1; + expected_byte_counts_hardcoded[10] = 1; + expected_byte_counts_hardcoded[18] = 1; + expected_byte_counts_hardcoded[20] = 1; + expected_byte_counts_hardcoded[26] = 1; + expected_byte_counts_hardcoded[44] = 1; + expected_byte_counts_hardcoded[46] = 1; + expected_byte_counts_hardcoded[49] = 1; + expected_byte_counts_hardcoded[81] = 1; + expected_byte_counts_hardcoded[88] = 1; + expected_byte_counts_hardcoded[92] = 1; + expected_byte_counts_hardcoded[99] = 1; + expected_byte_counts_hardcoded[107] = 1; + expected_byte_counts_hardcoded[111] = 1; + expected_byte_counts_hardcoded[122] = 1; + expected_byte_counts_hardcoded[123] = 1; + expected_byte_counts_hardcoded[124] = 1; + expected_byte_counts_hardcoded[141] = 1; + expected_byte_counts_hardcoded[156] = 1; + expected_byte_counts_hardcoded[162] = 1; + expected_byte_counts_hardcoded[206] = 1; + expected_byte_counts_hardcoded[211] = 1; + expected_byte_counts_hardcoded[214] = 1; + expected_byte_counts_hardcoded[222] = 2; + expected_byte_counts_hardcoded[233] = 1; + expected_byte_counts_hardcoded[236] = 1; + expected_byte_counts_hardcoded[239] = 1; + expected_byte_counts_hardcoded[245] = 1; + expected_byte_counts_hardcoded[246] = 1; + expected_byte_counts_hardcoded[247] = 1; + expected_byte_counts_hardcoded[249] = 1; + expected_byte_counts_hardcoded[255] = 8; + + assert_eq!(word_columns[..16], expected_word_columns); + assert_eq!(byte_counts, expected_byte_counts_hardcoded); + } + + #[test] + fn we_can_obtain_logarithmic_derivative_from_small_scalar() { + let scalars: Vec = [1, 2, 3, 255, 256, 257].iter().map(S::from).collect(); + let mut word_columns: Vec> = vec![vec![0; scalars.len()]; 31]; + + // Manually set the decomposed words column + word_columns[0] = [1, 2, 3, 255, 0, 1].to_vec(); + word_columns[1] = [0, 0, 0, 0, 1, 1].to_vec(); + + let mut word_slices: Vec<&mut [u8]> = word_columns.iter_mut().map(|c| &mut c[..]).collect(); + + let alpha = S::from(5); + + // Initialize the inverted_word_columns_plus_alpha vector + let mut inverted_word_columns_plus_alpha: Vec> = + vec![vec![S::ZERO; scalars.len()]; 31]; + + // Convert Vec> into Vec<&mut [S]> for use in get_logarithmic_derivative + let mut word_columns_from_log_deriv: Vec<&mut [S]> = inverted_word_columns_plus_alpha + .iter_mut() + .map(Vec::as_mut_slice) + .collect(); + + let alloc = Bump::new(); + let mut builder = FinalRoundBuilder::new(2, Vec::new()); + + get_logarithmic_derivative( + &mut builder, + &alloc, + &mut word_slices, + alpha, + 256, + &mut word_columns_from_log_deriv, + ); + + let expected_data: [[u8; 6]; 31] = [ + [1, 2, 3, 255, 0, 1], + [0, 0, 0, 0, 1, 1], + [0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0], + ]; + + // Invert the expected data and add the verifier challenge + let expected_columns: Vec> = expected_data + .iter() + .map(|row| { + row.iter() + .map(|&w| (S::from(w) + alpha).inv().unwrap_or(S::ZERO)) + .collect() + }) + .collect(); + + // Perform assertion for all columns at once + assert_eq!(word_columns_from_log_deriv, expected_columns); + } + + #[test] + fn we_can_obtain_logarithmic_derivative_from_large_scalar() { + let scalars: Vec = [u64::MAX, u64::MAX].iter().map(S::from).collect(); + + let mut word_columns: Vec> = vec![vec![0; scalars.len()]; 31]; + + // Manually set the decomposed words column. + // Its helpful to think of this transposed, i.e. + // Scalar 1: FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 ... + // Scalar 2: FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 ... + word_columns[0] = [0xFF, 0xFF].to_vec(); + word_columns[1] = [0xFF, 0xFF].to_vec(); + word_columns[2] = [0xFF, 0xFF].to_vec(); + word_columns[3] = [0xFF, 0xFF].to_vec(); + word_columns[4] = [0xFF, 0xFF].to_vec(); + word_columns[5] = [0xFF, 0xFF].to_vec(); + word_columns[6] = [0xFF, 0xFF].to_vec(); + word_columns[7] = [0xFF, 0xFF].to_vec(); + word_columns[8] = [0xFF, 0xFF].to_vec(); + word_columns[9] = [0xFF, 0xFF].to_vec(); + word_columns[10] = [0xFF, 0xFF].to_vec(); + word_columns[11] = [0xFF, 0xFF].to_vec(); + word_columns[12] = [0xFF, 0xFF].to_vec(); + word_columns[13] = [0xFF, 0xFF].to_vec(); + word_columns[14] = [0xFF, 0xFF].to_vec(); + word_columns[15] = [0xFF, 0xFF].to_vec(); + + // Simulate a verifier challenge, then prepare storage for + // 1 / (word + alpha) + let alpha = S::from(5); + let mut word_slices: Vec<&mut [u8]> = word_columns.iter_mut().map(|c| &mut c[..]).collect(); + let mut inverted_word_columns_plus_alpha: Vec> = + vec![vec![S::ZERO; scalars.len()]; 31]; + // Convert Vec> into Vec<&mut [S]> for use in get_logarithmic_derivative + let mut word_columns_from_log_deriv: Vec<&mut [S]> = inverted_word_columns_plus_alpha + .iter_mut() + .map(Vec::as_mut_slice) + .collect(); + + let alloc = Bump::new(); + let mut builder = FinalRoundBuilder::new(2, Vec::new()); + get_logarithmic_derivative( + &mut builder, + &alloc, + &mut word_slices, + alpha, + 256, + &mut word_columns_from_log_deriv, + ); + + let expected_data: [[u8; 2]; 31] = [ + [0xFF, 0xFF], + [0xFF, 0xFF], + [0xFF, 0xFF], + [0xFF, 0xFF], + [0xFF, 0xFF], + [0xFF, 0xFF], + [0xFF, 0xFF], + [0xFF, 0xFF], + [0xFF, 0xFF], + [0xFF, 0xFF], + [0xFF, 0xFF], + [0xFF, 0xFF], + [0xFF, 0xFF], + [0xFF, 0xFF], + [0xFF, 0xFF], + [0xFF, 0xFF], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + ]; + + // Invert the expected data and add the verifier challenge, producing + // columns containing 1 / (word + alpha) + let expected_columns: Vec> = expected_data + .iter() + .map(|row| { + row.iter() + .map(|&w| (S::from(w) + alpha).inv().unwrap_or(S::ZERO)) + .collect() + }) + .collect(); + + assert_eq!(word_columns_from_log_deriv, expected_columns); + } +} diff --git a/crates/proof-of-sql/src/sql/proof_plans/range_check_test_plan.rs b/crates/proof-of-sql/src/sql/proof_plans/range_check_test_plan.rs new file mode 100644 index 000000000..f26be1fcc --- /dev/null +++ b/crates/proof-of-sql/src/sql/proof_plans/range_check_test_plan.rs @@ -0,0 +1,161 @@ +use super::range_check::{final_round_evaluate_range_check, verifier_evaluate_range_check}; +use crate::{ + base::{ + database::{ColumnField, ColumnRef, OwnedTable, Table, TableEvaluation, TableRef}, + map::{indexset, IndexMap, IndexSet}, + proof::ProofError, + scalar::Scalar, + }, + sql::proof::{ + FinalRoundBuilder, FirstRoundBuilder, ProofPlan, ProverEvaluate, VerificationBuilder, + }, +}; +use bumpalo::Bump; +use serde::Serialize; + +#[derive(Debug, Serialize)] +pub struct RangeCheckTestPlan { + pub column: ColumnRef, +} + +impl ProverEvaluate for RangeCheckTestPlan { + #[doc = " Evaluate the query, modify `FirstRoundBuilder` and return the result."] + fn first_round_evaluate<'a, S: Scalar>( + &self, + builder: &mut FirstRoundBuilder, + _alloc: &'a Bump, + table_map: &IndexMap>, + ) -> Table<'a, S> { + builder.request_post_result_challenges(1); + table_map[&self.column.table_ref()].clone() + } + + // extract data to test on from here, feed it into range check + fn final_round_evaluate<'a, S: Scalar>( + &self, + builder: &mut FinalRoundBuilder<'a, S>, + alloc: &'a Bump, + table_map: &IndexMap>, + ) -> Table<'a, S> { + // Get the table from the map using the table reference + let table: &Table<'a, S> = table_map + .get(&self.column.table_ref()) + .expect("Table not found"); + + let scalars = table + .inner_table() + .get(&self.column.column_id()) + .expect("Column not found in table") + .as_scalar() + .expect("Failed to convert column to scalar"); + final_round_evaluate_range_check(builder, scalars, 256, alloc); + table.clone() + } +} + +impl ProofPlan for RangeCheckTestPlan { + fn get_column_result_fields(&self) -> Vec { + vec![ColumnField::new( + self.column.column_id(), + *self.column.column_type(), + )] + } + + fn get_column_references(&self) -> IndexSet { + indexset! {self.column} + } + + #[doc = " Return all the tables referenced in the Query"] + fn get_table_references(&self) -> IndexSet { + indexset! {self.column.table_ref()} + } + + #[doc = " Form components needed to verify and proof store into `VerificationBuilder`"] + fn verifier_evaluate( + &self, + builder: &mut VerificationBuilder, + accessor: &IndexMap, + _result: Option<&OwnedTable>, + one_eval_map: &IndexMap, + ) -> Result, ProofError> { + let input_column_eval = accessor[&self.column]; + let input_ones_eval = one_eval_map[&self.column.table_ref()]; + + verifier_evaluate_range_check(builder, input_ones_eval, input_column_eval)?; + + Ok(TableEvaluation::new( + vec![accessor[&self.column]], + one_eval_map[&self.column.table_ref()], + )) + } +} + +#[cfg(all(test, feature = "blitzar"))] +mod tests { + + use crate::{ + base::database::{ + owned_table_utility::{owned_table, scalar}, + ColumnRef, ColumnType, OwnedTableTestAccessor, + }, + sql::{ + proof::VerifiableQueryResult, proof_plans::range_check_test_plan::RangeCheckTestPlan, + }, + }; + use blitzar::proof::InnerProductProof; + + #[test] + #[should_panic( + expected = "Range check failed, column contains values outside of the selected range" + )] + fn we_cannot_successfully_verify_invalid_range() { + let data = owned_table([scalar("a", -2..254)]); + let t = "sxt.t".parse().unwrap(); + let accessor = OwnedTableTestAccessor::::new_from_table(t, data, 0, ()); + let ast = RangeCheckTestPlan { + column: ColumnRef::new(t, "a".parse().unwrap(), ColumnType::Scalar), + }; + let verifiable_res = VerifiableQueryResult::::new(&ast, &accessor, &()); + let _ = verifiable_res.verify(&ast, &accessor, &()); + } + + #[test] + fn we_can_prove_a_range_check_with_range_0_to_256() { + let data = owned_table([scalar("a", 0..256)]); + let t = "sxt.t".parse().unwrap(); + let accessor = OwnedTableTestAccessor::::new_from_table(t, data, 0, ()); + let ast = RangeCheckTestPlan { + column: ColumnRef::new(t, "a".parse().unwrap(), ColumnType::Scalar), + }; + let verifiable_res = VerifiableQueryResult::::new(&ast, &accessor, &()); + let res: Result< + crate::sql::proof::QueryData>, + crate::sql::proof::QueryError, + > = verifiable_res.verify(&ast, &accessor, &()); + + if let Err(e) = res { + panic!("Verification failed: {e}"); + } + assert!(res.is_ok()); + } + + #[test] + fn we_can_prove_a_range_check_with_range_1000_to_1256() { + let data = owned_table([scalar("a", 1000..1256)]); + let t = "sxt.t".parse().unwrap(); + let accessor = OwnedTableTestAccessor::::new_from_table(t, data, 0, ()); + let ast = RangeCheckTestPlan { + column: ColumnRef::new(t, "a".parse().unwrap(), ColumnType::Scalar), + }; + let verifiable_res = VerifiableQueryResult::::new(&ast, &accessor, &()); + let res: Result< + crate::sql::proof::QueryData>, + crate::sql::proof::QueryError, + > = verifiable_res.verify(&ast, &accessor, &()); + + if let Err(e) = res { + panic!("Verification failed: {e}"); + } + assert!(res.is_ok()); + } +} diff --git a/crates/proof-of-sql/src/sql/proof_plans/slice_exec.rs b/crates/proof-of-sql/src/sql/proof_plans/slice_exec.rs index 313ee7c65..c5af72444 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/slice_exec.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/slice_exec.rs @@ -15,6 +15,7 @@ use crate::{ sql::proof::{ FinalRoundBuilder, FirstRoundBuilder, ProofPlan, ProverEvaluate, VerificationBuilder, }, + utils::log, }; use alloc::{boxed::Box, vec::Vec}; use bumpalo::Bump; @@ -115,6 +116,8 @@ impl ProverEvaluate for SliceExec { alloc: &'a Bump, table_map: &IndexMap>, ) -> Table<'a, S> { + log::log_memory_usage("Start"); + // 1. columns let input = self.input.first_round_evaluate(builder, alloc, table_map); let input_length = input.num_rows(); @@ -143,6 +146,9 @@ impl ProverEvaluate for SliceExec { builder.produce_one_evaluation_length(output_length); builder.produce_one_evaluation_length(offset_index); builder.produce_one_evaluation_length(max_index); + + log::log_memory_usage("End"); + res } @@ -154,6 +160,8 @@ impl ProverEvaluate for SliceExec { alloc: &'a Bump, table_map: &IndexMap>, ) -> Table<'a, S> { + log::log_memory_usage("Start"); + // 1. columns let input = self.input.final_round_evaluate(builder, alloc, table_map); let columns = input.columns().copied().collect::>(); @@ -181,13 +189,17 @@ impl ProverEvaluate for SliceExec { input.num_rows(), result_len, ); - Table::<'a, S>::try_from_iter_with_options( + let res = Table::<'a, S>::try_from_iter_with_options( self.get_column_result_fields() .into_iter() .map(|expr| expr.name()) .zip(filtered_columns), TableOptions::new(Some(output_length)), ) - .expect("Failed to create table from iterator") + .expect("Failed to create table from iterator"); + + log::log_memory_usage("End"); + + res } } diff --git a/crates/proof-of-sql/src/sql/proof_plans/table_exec.rs b/crates/proof-of-sql/src/sql/proof_plans/table_exec.rs index b86339afb..8220191d1 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/table_exec.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/table_exec.rs @@ -8,6 +8,7 @@ use crate::{ sql::proof::{ FinalRoundBuilder, FirstRoundBuilder, ProofPlan, ProverEvaluate, VerificationBuilder, }, + utils::log, }; use alloc::vec::Vec; use bumpalo::Bump; @@ -79,10 +80,16 @@ impl ProverEvaluate for TableExec { _alloc: &'a Bump, table_map: &IndexMap>, ) -> Table<'a, S> { - table_map + log::log_memory_usage("Start"); + + let first_round_table = table_map .get(&self.table_ref) .expect("Table not found") - .clone() + .clone(); + + log::log_memory_usage("End"); + + first_round_table } #[tracing::instrument(name = "TableExec::final_round_evaluate", level = "debug", skip_all)] @@ -93,9 +100,15 @@ impl ProverEvaluate for TableExec { alloc: &'a Bump, table_map: &IndexMap>, ) -> Table<'a, S> { - table_map + log::log_memory_usage("Start"); + + let final_round_table = table_map .get(&self.table_ref) .expect("Table not found") - .clone() + .clone(); + + log::log_memory_usage("End"); + + final_round_table } } diff --git a/crates/proof-of-sql/src/utils/log.rs b/crates/proof-of-sql/src/utils/log.rs new file mode 100644 index 000000000..d00e6a78f --- /dev/null +++ b/crates/proof-of-sql/src/utils/log.rs @@ -0,0 +1,30 @@ +use sysinfo::System; +use tracing::{trace, Level}; + +/// Logs the memory usage of the system at the TRACE level. +/// +/// This function logs the available memory, used memory, and the percentage of memory used. +/// It only logs this information if the TRACE level is enabled in the tracing configuration. +/// +/// # Arguments +/// +/// * `name` - A string slice that holds the name to be included in the log message. +#[allow(clippy::cast_precision_loss)] +pub fn log_memory_usage(name: &str) { + if tracing::level_enabled!(Level::TRACE) { + let mut system = System::new_all(); + system.refresh_memory(); + + let available_memory = system.available_memory() as f64 / (1024.0 * 1024.0); + let used_memory = system.used_memory() as f64 / (1024.0 * 1024.0); + let percentage_memory_used = (used_memory / (used_memory + available_memory)) * 100.0; + + trace!( + "{} Available memory: {:.2} MB, Used memory: {:.2} MB, Percentage memory used: {:.2}%", + name, + available_memory, + used_memory, + percentage_memory_used + ); + } +} diff --git a/crates/proof-of-sql/src/utils/mod.rs b/crates/proof-of-sql/src/utils/mod.rs index 888bd1de9..edb7840be 100644 --- a/crates/proof-of-sql/src/utils/mod.rs +++ b/crates/proof-of-sql/src/utils/mod.rs @@ -1,3 +1,6 @@ //! This module contains utilities for working with the library /// Parse DDLs and find bigdecimal columns pub mod parse; + +/// This module provides logging utilities for the library, including functions to log system memory usage. +pub mod log;