From 69e27c78b576212808bc253132131bd0654fe34b Mon Sep 17 00:00:00 2001 From: winrhcp Date: Thu, 10 Oct 2024 21:58:56 +0700 Subject: [PATCH 01/29] refactor:Tidy up scalar module (#251) # Rationale for this change MontScalar and Scalar logic is currently disorganized. We should consolidate this logic. # What changes are included in this PR? Combined logic held within the four files "mont_scalar", "mont_scalar_from", "mont_scalar_test", "mont_scalar_from_test" into the files "mont_scalar" and "mont_scalar_test" --- crates/proof-of-sql/src/base/scalar/mod.rs | 3 - .../src/base/scalar/mont_scalar.rs | 64 +++++- .../src/base/scalar/mont_scalar_from.rs | 61 ------ .../src/base/scalar/mont_scalar_from_test.rs | 183 ----------------- .../src/base/scalar/mont_scalar_test.rs | 186 +++++++++++++++++- 5 files changed, 245 insertions(+), 252 deletions(-) delete mode 100644 crates/proof-of-sql/src/base/scalar/mont_scalar_from.rs delete mode 100644 crates/proof-of-sql/src/base/scalar/mont_scalar_from_test.rs diff --git a/crates/proof-of-sql/src/base/scalar/mod.rs b/crates/proof-of-sql/src/base/scalar/mod.rs index 6ba7e8208..1759554f3 100644 --- a/crates/proof-of-sql/src/base/scalar/mod.rs +++ b/crates/proof-of-sql/src/base/scalar/mod.rs @@ -8,9 +8,6 @@ use alloc::string::String; use core::{cmp::Ordering, ops::Sub}; pub use mont_scalar::Curve25519Scalar; pub(crate) use mont_scalar::MontScalar; -mod mont_scalar_from; -#[cfg(test)] -mod mont_scalar_from_test; /// Module for a test Scalar #[cfg(test)] pub mod test_scalar; diff --git a/crates/proof-of-sql/src/base/scalar/mont_scalar.rs b/crates/proof-of-sql/src/base/scalar/mont_scalar.rs index 4705b3607..0a4ae8f6d 100644 --- a/crates/proof-of-sql/src/base/scalar/mont_scalar.rs +++ b/crates/proof-of-sql/src/base/scalar/mont_scalar.rs @@ -1,6 +1,6 @@ use super::{Scalar, ScalarConversionError}; use crate::base::math::decimal::MAX_SUPPORTED_PRECISION; -use alloc::{format, vec::Vec}; +use alloc::{format, string::String, vec::Vec}; use ark_ff::{BigInteger, Field, Fp, Fp256, MontBackend, MontConfig, PrimeField}; use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; use bytemuck::TransparentWrapper; @@ -13,7 +13,7 @@ use core::{ ops::{Add, AddAssign, Mul, MulAssign, Neg, Sub, SubAssign}, }; use num_bigint::BigInt; -use num_traits::Signed; +use num_traits::{Signed, Zero}; use serde::{Deserialize, Deserializer, Serialize, Serializer}; #[derive(CanonicalSerialize, CanonicalDeserialize, TransparentWrapper)] @@ -117,6 +117,66 @@ impl> Ord for MontScalar { // end replacement for #[derive(...)] // -------------------------------------------------------------------------------- +/// TODO: add docs +macro_rules! impl_from_for_mont_scalar_for_type_supported_by_from { + ($tt:ty) => { + impl> From<$tt> for MontScalar { + fn from(x: $tt) -> Self { + Self(x.into()) + } + } + }; +} + +/// Implement `From<&[u8]>` for `MontScalar` +impl> From<&[u8]> for MontScalar { + fn from(x: &[u8]) -> Self { + if x.is_empty() { + return Self::zero(); + } + + let hash = blake3::hash(x); + let mut bytes: [u8; 32] = hash.into(); + bytes[31] &= 0b0000_1111_u8; + + Self::from_le_bytes_mod_order(&bytes) + } +} + +/// TODO: add docs +macro_rules! impl_from_for_mont_scalar_for_string { + ($tt:ty) => { + impl> From<$tt> for MontScalar { + fn from(x: $tt) -> Self { + x.as_bytes().into() + } + } + }; +} + +impl_from_for_mont_scalar_for_type_supported_by_from!(bool); +impl_from_for_mont_scalar_for_type_supported_by_from!(u8); +impl_from_for_mont_scalar_for_type_supported_by_from!(u16); +impl_from_for_mont_scalar_for_type_supported_by_from!(u32); +impl_from_for_mont_scalar_for_type_supported_by_from!(u64); +impl_from_for_mont_scalar_for_type_supported_by_from!(u128); +impl_from_for_mont_scalar_for_type_supported_by_from!(i8); +impl_from_for_mont_scalar_for_type_supported_by_from!(i16); +impl_from_for_mont_scalar_for_type_supported_by_from!(i32); +impl_from_for_mont_scalar_for_type_supported_by_from!(i64); +impl_from_for_mont_scalar_for_type_supported_by_from!(i128); +impl_from_for_mont_scalar_for_string!(&str); +impl_from_for_mont_scalar_for_string!(String); + +impl, T> From<&T> for MontScalar +where + T: Into> + Clone, +{ + fn from(x: &T) -> Self { + x.clone().into() + } +} + /// A wrapper type around the field element `ark_curve25519::Fr` and should be used in place of `ark_curve25519::Fr`. /// /// Using the `Scalar` trait rather than this type is encouraged to allow for easier switching of the underlying field. diff --git a/crates/proof-of-sql/src/base/scalar/mont_scalar_from.rs b/crates/proof-of-sql/src/base/scalar/mont_scalar_from.rs deleted file mode 100644 index 7951cb871..000000000 --- a/crates/proof-of-sql/src/base/scalar/mont_scalar_from.rs +++ /dev/null @@ -1,61 +0,0 @@ -use crate::base::scalar::MontScalar; -use alloc::string::String; -use ark_ff::MontConfig; -use num_traits::Zero; - -/// TODO: add docs -macro_rules! impl_from_for_mont_scalar_for_type_supported_by_from { - ($tt:ty) => { - impl> From<$tt> for MontScalar { - fn from(x: $tt) -> Self { - Self(x.into()) - } - } - }; -} -impl> From<&[u8]> for MontScalar { - fn from(x: &[u8]) -> Self { - if x.is_empty() { - return Self::zero(); - } - - let hash = blake3::hash(x); - let mut bytes: [u8; 32] = hash.into(); - bytes[31] &= 0b0000_1111_u8; - - Self::from_le_bytes_mod_order(&bytes) - } -} -/// TODO: add docs -macro_rules! impl_from_for_mont_scalar_for_string { - ($tt:ty) => { - impl> From<$tt> for MontScalar { - fn from(x: $tt) -> Self { - x.as_bytes().into() - } - } - }; -} - -impl_from_for_mont_scalar_for_type_supported_by_from!(bool); -impl_from_for_mont_scalar_for_type_supported_by_from!(u8); -impl_from_for_mont_scalar_for_type_supported_by_from!(u16); -impl_from_for_mont_scalar_for_type_supported_by_from!(u32); -impl_from_for_mont_scalar_for_type_supported_by_from!(u64); -impl_from_for_mont_scalar_for_type_supported_by_from!(u128); -impl_from_for_mont_scalar_for_type_supported_by_from!(i8); -impl_from_for_mont_scalar_for_type_supported_by_from!(i16); -impl_from_for_mont_scalar_for_type_supported_by_from!(i32); -impl_from_for_mont_scalar_for_type_supported_by_from!(i64); -impl_from_for_mont_scalar_for_type_supported_by_from!(i128); -impl_from_for_mont_scalar_for_string!(&str); -impl_from_for_mont_scalar_for_string!(String); - -impl, T> From<&T> for MontScalar -where - T: Into> + Clone, -{ - fn from(x: &T) -> Self { - x.clone().into() - } -} diff --git a/crates/proof-of-sql/src/base/scalar/mont_scalar_from_test.rs b/crates/proof-of-sql/src/base/scalar/mont_scalar_from_test.rs deleted file mode 100644 index dec43c9f9..000000000 --- a/crates/proof-of-sql/src/base/scalar/mont_scalar_from_test.rs +++ /dev/null @@ -1,183 +0,0 @@ -use crate::base::{ - map::IndexSet, - scalar::{Curve25519Scalar, Scalar}, -}; -use alloc::{format, string::ToString, vec::Vec}; -use byte_slice_cast::AsByteSlice; -use core::cmp::Ordering; -use num_traits::{One, Zero}; -use rand::{ - distributions::{Distribution, Uniform}, - rngs::StdRng, - Rng, -}; -use rand_core::SeedableRng; - -#[test] -fn the_zero_integer_maps_to_the_zero_scalar() { - assert_eq!(Curve25519Scalar::from(0_u32), Curve25519Scalar::zero()); - assert_eq!(Curve25519Scalar::from(0_u64), Curve25519Scalar::zero()); - assert_eq!(Curve25519Scalar::from(0_u128), Curve25519Scalar::zero()); - assert_eq!(Curve25519Scalar::from(0_i32), Curve25519Scalar::zero()); - assert_eq!(Curve25519Scalar::from(0_i64), Curve25519Scalar::zero()); - assert_eq!(Curve25519Scalar::from(0_i128), Curve25519Scalar::zero()); -} - -#[test] -fn bools_map_to_curve25519_scalar_properly() { - assert_eq!(Curve25519Scalar::from(true), Curve25519Scalar::one()); - assert_eq!(Curve25519Scalar::from(false), Curve25519Scalar::zero()); -} - -#[test] -fn the_one_integer_maps_to_the_zero_scalar() { - assert_eq!(Curve25519Scalar::from(1_u32), Curve25519Scalar::one()); - assert_eq!(Curve25519Scalar::from(1_u64), Curve25519Scalar::one()); - assert_eq!(Curve25519Scalar::from(1_u128), Curve25519Scalar::one()); - assert_eq!(Curve25519Scalar::from(1_i32), Curve25519Scalar::one()); - assert_eq!(Curve25519Scalar::from(1_i64), Curve25519Scalar::one()); - assert_eq!(Curve25519Scalar::from(1_i128), Curve25519Scalar::one()); -} - -#[test] -fn the_zero_scalar_is_the_additive_identity() { - let mut rng = StdRng::seed_from_u64(0u64); - for _ in 0..1000 { - let a = Curve25519Scalar::from(rng.gen::()); - let b = Curve25519Scalar::from(rng.gen::()); - assert_eq!(a + b, b + a); - assert_eq!(a + Curve25519Scalar::zero(), a); - assert_eq!(b + Curve25519Scalar::zero(), b); - assert_eq!( - Curve25519Scalar::zero() + Curve25519Scalar::zero(), - Curve25519Scalar::zero() - ); - } -} - -#[test] -fn the_one_scalar_is_the_multiplicative_identity() { - let mut rng = StdRng::seed_from_u64(0u64); - for _ in 0..1000 { - let a = Curve25519Scalar::from(rng.gen::()); - let b = Curve25519Scalar::from(rng.gen::()); - assert_eq!(a * b, b * a); - assert_eq!(a * Curve25519Scalar::one(), a); - assert_eq!(b * Curve25519Scalar::one(), b); - assert_eq!( - Curve25519Scalar::one() * Curve25519Scalar::one(), - Curve25519Scalar::one() - ); - } -} - -#[test] -fn scalar_comparison_works() { - let zero = Curve25519Scalar::ZERO; - let one = Curve25519Scalar::ONE; - let two = Curve25519Scalar::TWO; - let max = Curve25519Scalar::MAX_SIGNED; - let min = max + one; - assert_eq!(max.signed_cmp(&one), Ordering::Greater); - assert_eq!(one.signed_cmp(&zero), Ordering::Greater); - assert_eq!(min.signed_cmp(&zero), Ordering::Less); - assert_eq!((two * max).signed_cmp(&zero), Ordering::Less); - assert_eq!(two * max + one, zero); -} - -#[test] -fn the_empty_string_will_be_mapped_to_the_zero_scalar() { - assert_eq!(Curve25519Scalar::from(""), Curve25519Scalar::zero()); - assert_eq!( - Curve25519Scalar::from(<&str>::default()), - Curve25519Scalar::zero() - ); -} - -#[test] -fn two_different_strings_map_to_different_scalars() { - let s = "abc12"; - assert_ne!(Curve25519Scalar::from(s), Curve25519Scalar::zero()); - assert_ne!(Curve25519Scalar::from(s), Curve25519Scalar::from("abc123")); -} - -#[test] -fn the_empty_buffer_will_be_mapped_to_the_zero_scalar() { - let buf = Vec::::default(); - assert_eq!(Curve25519Scalar::from(&buf[..]), Curve25519Scalar::zero()); -} - -#[test] -fn byte_arrays_with_the_same_content_but_different_types_map_to_different_scalars() { - let array = [1_u8, 2_u8, 34_u8]; - assert_ne!( - Curve25519Scalar::from(array.as_byte_slice()), - Curve25519Scalar::zero() - ); - assert_ne!( - Curve25519Scalar::from(array.as_byte_slice()), - Curve25519Scalar::from([1_u32, 2_u32, 34_u32].as_byte_slice()) - ); -} - -#[test] -fn strings_of_arbitrary_size_map_to_different_scalars() { - let mut prev_scalars = IndexSet::default(); - let mut rng = StdRng::from_seed([0u8; 32]); - let dist = Uniform::new(1, 100); - - for i in 0..100 { - let s = format!( - "{}_{}_{}", - dist.sample(&mut rng), - i, - "testing string to scalar".repeat(dist.sample(&mut rng)) - ); - assert!(prev_scalars.insert(Curve25519Scalar::from(s.as_str()))); - } -} - -#[test] -fn byte_arrays_of_arbitrary_size_map_to_different_scalars() { - let mut prev_scalars = IndexSet::default(); - let mut rng = StdRng::from_seed([0u8; 32]); - let dist = Uniform::new(1, 100); - - for _ in 0..100 { - let v = (0..dist.sample(&mut rng)) - .map(|_v| (dist.sample(&mut rng) % 255) as u8) - .collect::>(); - assert!(prev_scalars.insert(Curve25519Scalar::from(&v[..]))); - } -} - -#[test] -fn the_string_hash_implementation_uses_the_full_range_of_bits() { - let max_iters = 20; - let mut rng = StdRng::from_seed([0u8; 32]); - let dist = Uniform::new(1, i32::MAX); - - for i in 0..252 { - let mut curr_iters = 0; - let mut bset = IndexSet::default(); - - loop { - let s: Curve25519Scalar = dist.sample(&mut rng).to_string().as_str().into(); - let bytes = s.to_bytes_le(); //Note: this is the only spot that these tests are different from the to_curve25519_scalar tests. - - let is_ith_bit_set = bytes[i / 8] & (1 << (i % 8)) != 0; - - bset.insert(is_ith_bit_set); - - if bset == IndexSet::from_iter([false, true]) { - break; - } - - // this guarantees that, if the above test fails, - // we'll be able to identify it's failing - assert!(curr_iters <= max_iters); - - curr_iters += 1; - } - } -} diff --git a/crates/proof-of-sql/src/base/scalar/mont_scalar_test.rs b/crates/proof-of-sql/src/base/scalar/mont_scalar_test.rs index 66fd35d61..3a26fd8c0 100644 --- a/crates/proof-of-sql/src/base/scalar/mont_scalar_test.rs +++ b/crates/proof-of-sql/src/base/scalar/mont_scalar_test.rs @@ -1,7 +1,18 @@ -use crate::base::scalar::{Curve25519Scalar, Scalar, ScalarConversionError}; -use alloc::format; +use crate::base::{ + map::IndexSet, + scalar::{Curve25519Scalar, Scalar, ScalarConversionError}, +}; +use alloc::{format, string::ToString, vec::Vec}; +use byte_slice_cast::AsByteSlice; +use core::cmp::Ordering; use num_bigint::BigInt; -use num_traits::{Inv, One}; +use num_traits::{Inv, One, Zero}; +use rand::{ + distributions::{Distribution, Uniform}, + rngs::StdRng, + Rng, +}; +use rand_core::SeedableRng; #[test] fn test_dalek_interop_1() { @@ -291,3 +302,172 @@ fn test_curve25519_scalar_from_bigint() { -Curve25519Scalar::ONE ); } + +#[test] +fn the_zero_integer_maps_to_the_zero_scalar() { + assert_eq!(Curve25519Scalar::from(0_u32), Curve25519Scalar::zero()); + assert_eq!(Curve25519Scalar::from(0_u64), Curve25519Scalar::zero()); + assert_eq!(Curve25519Scalar::from(0_u128), Curve25519Scalar::zero()); + assert_eq!(Curve25519Scalar::from(0_i32), Curve25519Scalar::zero()); + assert_eq!(Curve25519Scalar::from(0_i64), Curve25519Scalar::zero()); + assert_eq!(Curve25519Scalar::from(0_i128), Curve25519Scalar::zero()); +} + +#[test] +fn bools_map_to_curve25519_scalar_properly() { + assert_eq!(Curve25519Scalar::from(true), Curve25519Scalar::one()); + assert_eq!(Curve25519Scalar::from(false), Curve25519Scalar::zero()); +} + +#[test] +fn the_one_integer_maps_to_the_zero_scalar() { + assert_eq!(Curve25519Scalar::from(1_u32), Curve25519Scalar::one()); + assert_eq!(Curve25519Scalar::from(1_u64), Curve25519Scalar::one()); + assert_eq!(Curve25519Scalar::from(1_u128), Curve25519Scalar::one()); + assert_eq!(Curve25519Scalar::from(1_i32), Curve25519Scalar::one()); + assert_eq!(Curve25519Scalar::from(1_i64), Curve25519Scalar::one()); + assert_eq!(Curve25519Scalar::from(1_i128), Curve25519Scalar::one()); +} + +#[test] +fn the_zero_scalar_is_the_additive_identity() { + let mut rng = StdRng::seed_from_u64(0u64); + for _ in 0..1000 { + let a = Curve25519Scalar::from(rng.gen::()); + let b = Curve25519Scalar::from(rng.gen::()); + assert_eq!(a + b, b + a); + assert_eq!(a + Curve25519Scalar::zero(), a); + assert_eq!(b + Curve25519Scalar::zero(), b); + assert_eq!( + Curve25519Scalar::zero() + Curve25519Scalar::zero(), + Curve25519Scalar::zero() + ); + } +} + +#[test] +fn the_one_scalar_is_the_multiplicative_identity() { + let mut rng = StdRng::seed_from_u64(0u64); + for _ in 0..1000 { + let a = Curve25519Scalar::from(rng.gen::()); + let b = Curve25519Scalar::from(rng.gen::()); + assert_eq!(a * b, b * a); + assert_eq!(a * Curve25519Scalar::one(), a); + assert_eq!(b * Curve25519Scalar::one(), b); + assert_eq!( + Curve25519Scalar::one() * Curve25519Scalar::one(), + Curve25519Scalar::one() + ); + } +} + +#[test] +fn scalar_comparison_works() { + let zero = Curve25519Scalar::ZERO; + let one = Curve25519Scalar::ONE; + let two = Curve25519Scalar::TWO; + let max = Curve25519Scalar::MAX_SIGNED; + let min = max + one; + assert_eq!(max.signed_cmp(&one), Ordering::Greater); + assert_eq!(one.signed_cmp(&zero), Ordering::Greater); + assert_eq!(min.signed_cmp(&zero), Ordering::Less); + assert_eq!((two * max).signed_cmp(&zero), Ordering::Less); + assert_eq!(two * max + one, zero); +} + +#[test] +fn the_empty_string_will_be_mapped_to_the_zero_scalar() { + assert_eq!(Curve25519Scalar::from(""), Curve25519Scalar::zero()); + assert_eq!( + Curve25519Scalar::from(<&str>::default()), + Curve25519Scalar::zero() + ); +} + +#[test] +fn two_different_strings_map_to_different_scalars() { + let s = "abc12"; + assert_ne!(Curve25519Scalar::from(s), Curve25519Scalar::zero()); + assert_ne!(Curve25519Scalar::from(s), Curve25519Scalar::from("abc123")); +} + +#[test] +fn the_empty_buffer_will_be_mapped_to_the_zero_scalar() { + let buf = Vec::::default(); + assert_eq!(Curve25519Scalar::from(&buf[..]), Curve25519Scalar::zero()); +} + +#[test] +fn byte_arrays_with_the_same_content_but_different_types_map_to_different_scalars() { + let array = [1_u8, 2_u8, 34_u8]; + assert_ne!( + Curve25519Scalar::from(array.as_byte_slice()), + Curve25519Scalar::zero() + ); + assert_ne!( + Curve25519Scalar::from(array.as_byte_slice()), + Curve25519Scalar::from([1_u32, 2_u32, 34_u32].as_byte_slice()) + ); +} + +#[test] +fn strings_of_arbitrary_size_map_to_different_scalars() { + let mut prev_scalars = IndexSet::default(); + let mut rng = StdRng::from_seed([0u8; 32]); + let dist = Uniform::new(1, 100); + + for i in 0..100 { + let s = format!( + "{}_{}_{}", + dist.sample(&mut rng), + i, + "testing string to scalar".repeat(dist.sample(&mut rng)) + ); + assert!(prev_scalars.insert(Curve25519Scalar::from(s.as_str()))); + } +} + +#[test] +fn byte_arrays_of_arbitrary_size_map_to_different_scalars() { + let mut prev_scalars = IndexSet::default(); + let mut rng = StdRng::from_seed([0u8; 32]); + let dist = Uniform::new(1, 100); + + for _ in 0..100 { + let v = (0..dist.sample(&mut rng)) + .map(|_v| (dist.sample(&mut rng) % 255) as u8) + .collect::>(); + assert!(prev_scalars.insert(Curve25519Scalar::from(&v[..]))); + } +} + +#[test] +fn the_string_hash_implementation_uses_the_full_range_of_bits() { + let max_iters = 20; + let mut rng = StdRng::from_seed([0u8; 32]); + let dist = Uniform::new(1, i32::MAX); + + for i in 0..252 { + let mut curr_iters = 0; + let mut bset = IndexSet::default(); + + loop { + let s: Curve25519Scalar = dist.sample(&mut rng).to_string().as_str().into(); + let bytes = s.to_bytes_le(); //Note: this is the only spot that these tests are different from the to_curve25519_scalar tests. + + let is_ith_bit_set = bytes[i / 8] & (1 << (i % 8)) != 0; + + bset.insert(is_ith_bit_set); + + if bset == IndexSet::from_iter([false, true]) { + break; + } + + // this guarantees that, if the above test fails, + // we'll be able to identify it's failing + assert!(curr_iters <= max_iters); + + curr_iters += 1; + } + } +} From 52bfa6bfd7672de2ee1c75e9bbcbcaf572fe0e7c Mon Sep 17 00:00:00 2001 From: Trevor Lovell Date: Thu, 10 Oct 2024 00:25:20 -0600 Subject: [PATCH 02/29] feat: remove std requirement for ark-serialize implementations These implementations do not use any std functionality. The save_to_file and load_from_file utilities do, which remain behind the "std" feature. Being able to serialize and deserialize public parameters will be important for some no_std use cases. --- .../src/proof_primitive/dory/public_parameters.rs | 4 ---- 1 file changed, 4 deletions(-) diff --git a/crates/proof-of-sql/src/proof_primitive/dory/public_parameters.rs b/crates/proof-of-sql/src/proof_primitive/dory/public_parameters.rs index 604eedd75..59cf6a640 100644 --- a/crates/proof-of-sql/src/proof_primitive/dory/public_parameters.rs +++ b/crates/proof-of-sql/src/proof_primitive/dory/public_parameters.rs @@ -1,7 +1,6 @@ use super::{G1Affine, G2Affine}; use alloc::vec::Vec; use ark_ff::UniformRand; -#[cfg(feature = "std")] use ark_serialize::{ CanonicalDeserialize, CanonicalSerialize, Compress, SerializationError, Valid, Validate, }; @@ -98,7 +97,6 @@ impl PublicParameters { } } -#[cfg(feature = "std")] impl CanonicalSerialize for PublicParameters { fn serialize_with_mode( &self, @@ -164,7 +162,6 @@ impl CanonicalSerialize for PublicParameters { } } -#[cfg(feature = "std")] impl CanonicalDeserialize for PublicParameters { fn deserialize_with_mode( mut reader: R, @@ -207,7 +204,6 @@ impl CanonicalDeserialize for PublicParameters { // Remove unnecessary methods if they're not overridden } -#[cfg(feature = "std")] // Implement the Valid trait to perform validation on deserialized data impl Valid for PublicParameters { fn check(&self) -> Result<(), SerializationError> { From 61d850c6eb17c1d05ef72701ceb27b151e631d65 Mon Sep 17 00:00:00 2001 From: Mehul Mathur Date: Fri, 11 Oct 2024 07:31:49 +0530 Subject: [PATCH 03/29] chore: resolved some `clippy::pedantic` lints (#254) # Rationale for this change We have cargo clippy running in our CI in order to enforce code quality. In order to increase our standards, we should enable the clippy::pedantic lint group. # What changes are included in this PR? Resolved the following lint warnings `module_name_repetitions` `wildcard_imports` `unused_self` `manual_let_else` `struct_field_names` `unicode_not_nfc` `manual_string_new` `large_types_passed_by_value` # Are these changes tested? Yes. --- Cargo.toml | 8 ++++++++ .../src/intermediate_ast_tests.rs | 9 +++++++-- crates/proof-of-sql/benches/scaffold/mod.rs | 2 +- crates/proof-of-sql/examples/hello_world/main.rs | 5 ++++- .../src/base/database/owned_column_operation.rs | 14 +++++++++++++- .../src/proof_primitive/dory/dory_messages.rs | 5 +++++ .../proof_primitive/dory/extended_dory_reduce.rs | 10 ++++++++-- .../src/sql/parse/dyn_proof_expr_builder.rs | 1 + .../src/sql/parse/query_context_builder.rs | 1 + crates/proof-of-sql/src/sql/proof_exprs/mod.rs | 16 +++++++++++----- .../src/sql/proof_plans/filter_exec.rs | 11 ++++------- 11 files changed, 63 insertions(+), 19 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index b5faa541b..64113f778 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -83,3 +83,11 @@ cast_lossless = "deny" redundant_closure_for_method_calls = "deny" inconsistent_struct_constructor = "deny" default_trait_access = "deny" +module_name_repetitions = "deny" +wildcard_imports = "deny" +unused_self = "deny" +manual_let_else = "deny" +struct_field_names = "deny" +unicode_not_nfc = "deny" +manual_string_new = "deny" +large_types_passed_by_value = "deny" diff --git a/crates/proof-of-sql-parser/src/intermediate_ast_tests.rs b/crates/proof-of-sql-parser/src/intermediate_ast_tests.rs index b23e12258..dbb6805cf 100644 --- a/crates/proof-of-sql-parser/src/intermediate_ast_tests.rs +++ b/crates/proof-of-sql-parser/src/intermediate_ast_tests.rs @@ -5,7 +5,11 @@ use crate::{ utility::*, SelectStatement, }; -use alloc::{borrow::ToOwned, string::ToString, vec}; +use alloc::{ + borrow::ToOwned, + string::{String, ToString}, + vec, +}; // Sting parser tests #[test] @@ -26,7 +30,7 @@ fn we_can_correctly_escape_the_single_quote_character() { #[test] fn we_can_parse_empty_strings() { - assert_eq!(StringLiteralParser::new().parse("''"), Ok("".to_string())); + assert_eq!(StringLiteralParser::new().parse("''"), Ok(String::new())); } #[test] @@ -108,6 +112,7 @@ fn we_can_parse_strings_having_control_characters() { ); } +#[allow(clippy::unicode_not_nfc)] #[test] fn unnormalized_strings_should_differ() { let lhs = StringLiteralParser::new().parse("'á'").unwrap(); diff --git a/crates/proof-of-sql/benches/scaffold/mod.rs b/crates/proof-of-sql/benches/scaffold/mod.rs index c82b212bd..82e9e4fe1 100644 --- a/crates/proof-of-sql/benches/scaffold/mod.rs +++ b/crates/proof-of-sql/benches/scaffold/mod.rs @@ -73,7 +73,7 @@ pub fn jaeger_scaffold( .unwrap(); } -#[allow(dead_code)] +#[allow(dead_code, clippy::module_name_repetitions)] pub fn criterion_scaffold( c: &mut Criterion, title: &str, diff --git a/crates/proof-of-sql/examples/hello_world/main.rs b/crates/proof-of-sql/examples/hello_world/main.rs index 4e5607151..26d025707 100644 --- a/crates/proof-of-sql/examples/hello_world/main.rs +++ b/crates/proof-of-sql/examples/hello_world/main.rs @@ -2,7 +2,10 @@ use blitzar::{compute::init_backend, proof::InnerProductProof}; use proof_of_sql::{ - base::database::{owned_table_utility::*, OwnedTableTestAccessor, TestAccessor}, + base::database::{ + owned_table_utility::{bigint, owned_table, varchar}, + OwnedTableTestAccessor, TestAccessor, + }, sql::{parse::QueryExpr, proof::QueryProof}, }; use std::{ diff --git a/crates/proof-of-sql/src/base/database/owned_column_operation.rs b/crates/proof-of-sql/src/base/database/owned_column_operation.rs index 29a131553..1fba3f1dc 100644 --- a/crates/proof-of-sql/src/base/database/owned_column_operation.rs +++ b/crates/proof-of-sql/src/base/database/owned_column_operation.rs @@ -1,6 +1,18 @@ use super::{ColumnOperationError, ColumnOperationResult}; use crate::base::{ - database::{column_operation::*, OwnedColumn}, + database::{ + column_operation::{ + eq_decimal_columns, ge_decimal_columns, le_decimal_columns, slice_and, slice_eq, + slice_eq_with_casting, slice_ge, slice_ge_with_casting, slice_le, + slice_le_with_casting, slice_not, slice_or, try_add_decimal_columns, try_add_slices, + try_add_slices_with_casting, try_divide_decimal_columns, try_divide_slices, + try_divide_slices_left_upcast, try_divide_slices_right_upcast, + try_multiply_decimal_columns, try_multiply_slices, try_multiply_slices_with_casting, + try_subtract_decimal_columns, try_subtract_slices, try_subtract_slices_left_upcast, + try_subtract_slices_right_upcast, + }, + OwnedColumn, + }, scalar::Scalar, }; use core::ops::{Add, Div, Mul, Sub}; diff --git a/crates/proof-of-sql/src/proof_primitive/dory/dory_messages.rs b/crates/proof-of-sql/src/proof_primitive/dory/dory_messages.rs index 42acfcd6e..a5f34b405 100644 --- a/crates/proof-of-sql/src/proof_primitive/dory/dory_messages.rs +++ b/crates/proof-of-sql/src/proof_primitive/dory/dory_messages.rs @@ -5,6 +5,7 @@ use ark_ff::Field; use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; use num_traits::Zero; +#[allow(clippy::struct_field_names)] #[derive(Default, Clone, CanonicalSerialize, CanonicalDeserialize, PartialEq, Eq, Debug)] /// The messages sent from the prover to the verifier in the interactive protocol. /// This is, in essence, the proof. @@ -51,6 +52,8 @@ impl DoryMessages { transcript.extend_canonical_serialize_as_le(&message); self.G2_messages.insert(0, message); } + + #[allow(clippy::large_types_passed_by_value)] /// Pushes a GT element from the prover onto the queue, and appends it to the transcript. pub(super) fn prover_send_GT_message(&mut self, transcript: &mut impl Transcript, message: GT) { transcript.extend_canonical_serialize_as_le(&message); @@ -102,6 +105,8 @@ impl DoryMessages { transcript.extend_canonical_serialize_as_le(&message); message } + + #[allow(clippy::unused_self)] /// This is the F message that the verifier sends to the prover. /// This message is produces as a challenge from the transcript. /// diff --git a/crates/proof-of-sql/src/proof_primitive/dory/extended_dory_reduce.rs b/crates/proof-of-sql/src/proof_primitive/dory/extended_dory_reduce.rs index d5c17d6da..49d6fa093 100644 --- a/crates/proof-of-sql/src/proof_primitive/dory/extended_dory_reduce.rs +++ b/crates/proof-of-sql/src/proof_primitive/dory/extended_dory_reduce.rs @@ -1,6 +1,12 @@ use super::{ - dory_reduce_helper::*, - extended_dory_reduce_helper::*, + dory_reduce_helper::{ + dory_reduce_prove_compute_Cs, dory_reduce_prove_compute_Ds, dory_reduce_prove_fold_v_vecs, + dory_reduce_prove_mutate_v_vecs, dory_reduce_verify_update_C, dory_reduce_verify_update_Ds, + }, + extended_dory_reduce_helper::{ + extended_dory_reduce_prove_compute_E_betas, extended_dory_reduce_prove_compute_signed_Es, + extended_dory_reduce_prove_fold_s_vecs, extended_dory_reduce_verify_update_Es, + }, extended_state::{ExtendedProverState, ExtendedVerifierState}, DoryMessages, ProverSetup, VerifierSetup, }; diff --git a/crates/proof-of-sql/src/sql/parse/dyn_proof_expr_builder.rs b/crates/proof-of-sql/src/sql/parse/dyn_proof_expr_builder.rs index 692fb9fc5..0a4b44c81 100644 --- a/crates/proof-of-sql/src/sql/parse/dyn_proof_expr_builder.rs +++ b/crates/proof-of-sql/src/sql/parse/dyn_proof_expr_builder.rs @@ -80,6 +80,7 @@ impl DynProofExprBuilder<'_> { ))) } + #[allow(clippy::unused_self)] fn visit_literal( &self, lit: &Literal, diff --git a/crates/proof-of-sql/src/sql/parse/query_context_builder.rs b/crates/proof-of-sql/src/sql/parse/query_context_builder.rs index 819bbbec6..2c91ce6be 100644 --- a/crates/proof-of-sql/src/sql/parse/query_context_builder.rs +++ b/crates/proof-of-sql/src/sql/parse/query_context_builder.rs @@ -218,6 +218,7 @@ impl<'a> QueryContextBuilder<'a> { } } + #[allow(clippy::unused_self)] fn visit_literal(&self, literal: &Literal) -> Result { match literal { Literal::Boolean(_) => Ok(ColumnType::Boolean), diff --git a/crates/proof-of-sql/src/sql/proof_exprs/mod.rs b/crates/proof-of-sql/src/sql/proof_exprs/mod.rs index d6e16f969..d2b8c3f27 100644 --- a/crates/proof-of-sql/src/sql/proof_exprs/mod.rs +++ b/crates/proof-of-sql/src/sql/proof_exprs/mod.rs @@ -21,7 +21,10 @@ use multiply_expr::MultiplyExpr; mod multiply_expr_test; mod bitwise_verification; -use bitwise_verification::*; +use bitwise_verification::{ + is_within_acceptable_range, verify_constant_abs_decomposition, + verify_constant_sign_decomposition, +}; #[cfg(test)] mod bitwise_verification_test; @@ -39,12 +42,12 @@ use and_expr::AndExpr; mod and_expr_test; mod inequality_expr; -use inequality_expr::*; +use inequality_expr::InequalityExpr; #[cfg(all(test, feature = "blitzar"))] mod inequality_expr_test; mod or_expr; -use or_expr::*; +use or_expr::{count_or, prover_evaluate_or, result_evaluate_or, verifier_evaluate_or, OrExpr}; #[cfg(all(test, feature = "blitzar"))] mod or_expr_test; @@ -62,12 +65,15 @@ pub(crate) use numerical_util::{ }; mod equals_expr; -use equals_expr::*; +use equals_expr::{ + count_equals_zero, prover_evaluate_equals_zero, result_evaluate_equals_zero, + verifier_evaluate_equals_zero, EqualsExpr, +}; #[cfg(all(test, feature = "blitzar"))] mod equals_expr_test; mod sign_expr; -use sign_expr::*; +use sign_expr::{count_sign, prover_evaluate_sign, result_evaluate_sign, verifier_evaluate_sign}; #[cfg(all(test, feature = "blitzar"))] mod sign_expr_test; diff --git a/crates/proof-of-sql/src/sql/proof_plans/filter_exec.rs b/crates/proof-of-sql/src/sql/proof_plans/filter_exec.rs index 7c2bef519..c1c18c00f 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/filter_exec.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/filter_exec.rs @@ -240,13 +240,10 @@ fn verify_filter( ) -> Result<(), ProofError> { let one_eval = builder.mle_evaluations.one_evaluation; - let chi_eval = match builder.mle_evaluations.result_indexes_evaluation { - Some(eval) => eval, - None => { - return Err(ProofError::VerificationError { - error: "Result indexes not valid.", - }) - } + let Some(chi_eval) = builder.mle_evaluations.result_indexes_evaluation else { + return Err(ProofError::VerificationError { + error: "Result indexes not valid.", + }); }; let c_fold_eval = alpha * one_eval + fold_vals(beta, c_evals); From c378d4722e399603a0e6b83633fd9d955ebd5059 Mon Sep 17 00:00:00 2001 From: Ian Joiner <14581281+iajoiner@users.noreply.github.com> Date: Thu, 10 Oct 2024 09:22:21 -0400 Subject: [PATCH 04/29] refactor!: remove `Indexes` --- crates/proof-of-sql/src/sql/proof/indexes.rs | 111 --------- .../src/sql/proof/indexes_test.rs | 218 ------------------ crates/proof-of-sql/src/sql/proof/mod.rs | 5 - .../src/sql/proof/proof_builder_test.rs | 9 +- .../src/sql/proof/provable_query_result.rs | 56 ++--- .../sql/proof/provable_query_result_test.rs | 203 ++++++---------- .../src/sql/proof/provable_result_column.rs | 61 +++-- .../proof-of-sql/src/sql/proof/query_proof.rs | 21 +- .../src/sql/proof/query_proof_test.rs | 10 +- .../src/sql/proof/query_result.rs | 3 - .../src/sql/proof/result_builder.rs | 7 - .../src/sql/proof/sumcheck_mle_evaluations.rs | 41 ++-- .../proof/sumcheck_mle_evaluations_test.rs | 30 +-- .../verifiable_query_result_test_utility.rs | 33 +-- .../src/sql/proof/verification_builder.rs | 2 +- .../sql/proof/verification_builder_test.rs | 6 +- .../src/sql/proof_exprs/equals_expr.rs | 2 +- .../src/sql/proof_exprs/inequality_expr.rs | 2 +- .../src/sql/proof_exprs/literal_expr.rs | 2 +- .../src/sql/proof_exprs/not_expr.rs | 2 +- .../src/sql/proof_exprs/sign_expr.rs | 2 +- .../src/sql/proof_exprs/sign_expr_test.rs | 6 +- .../src/sql/proof_plans/filter_exec.rs | 25 +- .../src/sql/proof_plans/filter_exec_test.rs | 8 +- .../filter_exec_test_dishonest_prover.rs | 10 +- .../src/sql/proof_plans/group_by_exec.rs | 18 +- .../src/sql/proof_plans/projection_exec.rs | 3 +- 27 files changed, 195 insertions(+), 701 deletions(-) delete mode 100644 crates/proof-of-sql/src/sql/proof/indexes.rs delete mode 100644 crates/proof-of-sql/src/sql/proof/indexes_test.rs diff --git a/crates/proof-of-sql/src/sql/proof/indexes.rs b/crates/proof-of-sql/src/sql/proof/indexes.rs deleted file mode 100644 index 4dc97470c..000000000 --- a/crates/proof-of-sql/src/sql/proof/indexes.rs +++ /dev/null @@ -1,111 +0,0 @@ -use crate::base::{polynomial::compute_truncated_lagrange_basis_sum, scalar::Scalar}; -use alloc::vec::Vec; -use core::{ops::Range, slice}; -use num_traits::Zero; -use serde::{Deserialize, Serialize}; - -#[derive(Debug, Clone, Serialize, Deserialize)] -/// Indexes of a table for use in the [`ProvableQueryResult`](crate::sql::proof::ProvableQueryResult) -pub enum Indexes { - /// Sparse indexes. (i.e. explicitly specified indexes) - Sparse(Vec), - /// Dense indexes. (i.e. all indexes in a range, which means the indexes do not need to be sent to the verifier) - Dense(Range), -} - -impl Default for Indexes { - fn default() -> Self { - Self::Sparse(Vec::default()) - } -} - -impl Indexes { - /// Check if the indexes are valid for a table with n rows - pub fn valid(&self, n: usize) -> bool { - let n = n as u64; - match &self { - Self::Sparse(ix) => { - if ix.is_empty() { - return true; - } - let index = ix[0]; - if index >= n { - return false; - } - let mut prev_index = index; - for index in ix.iter().skip(1) { - if *index <= prev_index || *index >= n { - return false; - } - prev_index = *index; - } - true - } - Self::Dense(range) => range.end <= n && (range.start < range.end || range.start == 0), - } - } - /// Get an iterator over the indexes - pub fn iter(&self) -> impl Iterator + '_ { - enum Iter<'a> { - Sparse(slice::Iter<'a, u64>), - Dense(Range), - } - impl<'a> Iterator for Iter<'a> { - type Item = u64; - fn next(&mut self) -> Option { - match self { - Iter::Sparse(iter) => iter.next().copied(), - Iter::Dense(iter) => iter.next(), - } - } - } - match self { - Self::Sparse(vec) => Iter::Sparse(vec.iter()), - Self::Dense(range) => Iter::Dense(range.clone()), - } - } - /// Get the number of indexes - pub fn len(&self) -> usize { - match self { - Self::Sparse(vec) => vec.len(), - Self::Dense(range) => { - if range.end <= range.start { - 0 - } else { - (range.end - range.start) as usize - } - } - } - } - /// Check if the number of indexes is zero. - pub fn is_empty(&self) -> bool { - match self { - Self::Sparse(vec) => vec.is_empty(), - Self::Dense(range) => range.end <= range.start, - } - } - - /// Evaluates the mle that is 1 at the indexes and 0 elsewhere at the given evaluation point. - /// This returne None for Sparse indexes and the actual value for Dense indexes. - pub fn evaluate_at_point(&self, evaluation_point: &[S]) -> Option { - match self { - Indexes::Sparse(_) => None, - Indexes::Dense(range) => { - if range.is_empty() { - Some(Zero::zero()) - } else if range.end as usize > 2usize.pow(evaluation_point.len() as u32) { - // This only happens when the indexes are tampered with. - None - } else { - Some( - compute_truncated_lagrange_basis_sum(range.end as usize, evaluation_point) - - compute_truncated_lagrange_basis_sum( - range.start as usize, - evaluation_point, - ), - ) - } - } - } - } -} diff --git a/crates/proof-of-sql/src/sql/proof/indexes_test.rs b/crates/proof-of-sql/src/sql/proof/indexes_test.rs deleted file mode 100644 index b6a717f8d..000000000 --- a/crates/proof-of-sql/src/sql/proof/indexes_test.rs +++ /dev/null @@ -1,218 +0,0 @@ -use super::Indexes; -use crate::base::{ - polynomial::compute_evaluation_vector, - scalar::{Curve25519Scalar, MontScalar}, -}; -use num_traits::Zero; - -#[test] -fn an_empty_sparse_index_slice_is_always_valid() { - let ix = Indexes::Sparse(vec![]); - assert!(ix.valid(0)); - assert!(ix.valid(1)); -} - -#[test] -fn a_single_sparse_index_is_valid_if_within_range() { - let ix = Indexes::Sparse(vec![0]); - assert!(!ix.valid(0)); - assert!(ix.valid(1)); -} - -#[test] -fn multiple_sparse_indexes_are_valid_if_sorted_and_within_range() { - let ix = Indexes::Sparse(vec![0, 1]); - assert!(ix.valid(2)); - assert!(!ix.valid(1)); - - let ix = Indexes::Sparse(vec![1, 0]); - assert!(!ix.valid(2)); - - let ix = Indexes::Sparse(vec![0, 2, 3, 7]); - assert!(ix.valid(8)); - assert!(!ix.valid(7)); - - let ix = Indexes::Sparse(vec![0, 3, 2, 7]); - assert!(!ix.valid(8)); -} - -#[test] -fn repeated_sparse_indexes_are_invalid() { - let ix = Indexes::Sparse(vec![0, 1, 1]); - assert!(!ix.valid(2)); -} - -#[test] -fn dense_indexes_are_valid_if_within_range() { - let ix = Indexes::Dense(0..0); - assert!(ix.valid(1)); - assert!(ix.valid(0)); - - let ix = Indexes::Dense(0..1); - assert!(ix.valid(1)); - assert!(!ix.valid(0)); - - let ix = Indexes::Dense(0..2); - assert!(ix.valid(2)); - assert!(!ix.valid(1)); - - let ix = Indexes::Dense(1..2); - assert!(ix.valid(2)); - assert!(!ix.valid(1)); - - let ix = Indexes::Dense(2..8); - assert!(ix.valid(8)); - assert!(!ix.valid(7)); -} - -#[test] -fn empty_dense_indexes_are_invalid_if_start_and_end_are_not_0() { - let ix = Indexes::Dense(0..0); - assert!(ix.valid(10)); - assert!(ix.valid(0)); - let ix = Indexes::Dense(3..3); - assert!(!ix.valid(10)); - assert!(!ix.valid(0)); - #[allow(clippy::reversed_empty_ranges)] - let ix = Indexes::Dense(3..2); - assert!(!ix.valid(10)); - assert!(!ix.valid(0)); -} - -#[test] -fn we_can_get_the_len_of_indexes() { - let ix = Indexes::Sparse(vec![0, 1, 1]); - assert_eq!(ix.len(), 3); - - let ix = Indexes::Sparse(vec![]); - assert_eq!(ix.len(), 0); - - let ix = Indexes::Dense(0..0); - assert_eq!(ix.len(), 0); - - let ix = Indexes::Dense(0..1); - assert_eq!(ix.len(), 1); - - #[allow(clippy::reversed_empty_ranges)] - let ix = Indexes::Dense(3..2); - assert_eq!(ix.len(), 0); - - let ix = Indexes::Dense(1..2); - assert_eq!(ix.len(), 1); - - let ix = Indexes::Dense(2..8); - assert_eq!(ix.len(), 6); -} - -#[test] -fn we_can_get_the_emptiness_of_indexes() { - let ix = Indexes::Sparse(vec![0, 1, 1]); - assert!(!ix.is_empty()); - - let ix = Indexes::Sparse(vec![]); - assert!(ix.is_empty()); - - let ix = Indexes::Dense(0..0); - assert!(ix.is_empty()); - - let ix = Indexes::Dense(0..1); - assert!(!ix.is_empty()); - - #[allow(clippy::reversed_empty_ranges)] - let ix = Indexes::Dense(3..2); - assert!(ix.is_empty()); - - let ix = Indexes::Dense(1..2); - assert!(!ix.is_empty()); - - let ix = Indexes::Dense(2..8); - assert!(!ix.is_empty()); -} - -#[test] -fn we_can_calculate_the_sum_and_prod_using_iter_for_indexes() { - let ix = Indexes::Sparse(vec![0, 1, 1]); - assert_eq!(ix.iter().sum::(), 2); - assert_eq!(ix.iter().product::(), 0); - - let ix = Indexes::Sparse(vec![]); - assert_eq!(ix.iter().sum::(), 0); - assert_eq!(ix.iter().product::(), 1); - - let ix = Indexes::Sparse(vec![2, 3, 5]); - assert_eq!(ix.iter().sum::(), 10); - assert_eq!(ix.iter().product::(), 30); - - let ix = Indexes::Dense(0..0); - assert_eq!(ix.iter().sum::(), 0); - assert_eq!(ix.iter().product::(), 1); - - let ix = Indexes::Dense(0..1); - assert_eq!(ix.iter().sum::(), 0); - assert_eq!(ix.iter().product::(), 0); - - #[allow(clippy::reversed_empty_ranges)] - let ix = Indexes::Dense(3..2); - assert_eq!(ix.iter().sum::(), 0); - assert_eq!(ix.iter().product::(), 1); - - let ix = Indexes::Dense(1..2); - assert_eq!(ix.iter().sum::(), 1); - assert_eq!(ix.iter().product::(), 1); - - let ix = Indexes::Dense(2..8); - assert_eq!(ix.iter().sum::(), 27); - assert_eq!(ix.iter().product::(), 5040); -} - -#[test] -fn we_can_evaluate_indexes_at_an_evaluation_point() { - let evaluation_point = [ - Curve25519Scalar::from(3u64), - Curve25519Scalar::from(5u64), - Curve25519Scalar::from(7u64), - ]; - let mut evaluation_vector = vec![MontScalar::default(); 8]; - compute_evaluation_vector(&mut evaluation_vector, &evaluation_point); - - let ix = Indexes::Sparse(vec![0, 1, 1]); - assert_eq!(ix.evaluate_at_point(&evaluation_point), None); - - let ix = Indexes::Sparse(vec![]); - assert_eq!(ix.evaluate_at_point(&evaluation_point), None); - - let ix = Indexes::Sparse(vec![2, 3, 5]); - assert_eq!(ix.evaluate_at_point(&evaluation_point), None); - - let ix = Indexes::Dense(0..0); - assert_eq!(ix.evaluate_at_point(&evaluation_point), Some(Zero::zero())); - - let ix = Indexes::Dense(0..1); - assert_eq!( - ix.evaluate_at_point(&evaluation_point), - Some(evaluation_vector[0]) - ); - - #[allow(clippy::reversed_empty_ranges)] - let ix = Indexes::Dense(3..2); - assert_eq!(ix.evaluate_at_point(&evaluation_point), Some(Zero::zero())); - - let ix = Indexes::Dense(1..2); - assert_eq!( - ix.evaluate_at_point(&evaluation_point), - Some(evaluation_vector[1]) - ); - - let ix = Indexes::Dense(2..8); - assert_eq!( - ix.evaluate_at_point(&evaluation_point), - Some( - evaluation_vector[2] - + evaluation_vector[3] - + evaluation_vector[4] - + evaluation_vector[5] - + evaluation_vector[6] - + evaluation_vector[7] - ) - ); -} diff --git a/crates/proof-of-sql/src/sql/proof/mod.rs b/crates/proof-of-sql/src/sql/proof/mod.rs index 4c993760a..c3082b476 100644 --- a/crates/proof-of-sql/src/sql/proof/mod.rs +++ b/crates/proof-of-sql/src/sql/proof/mod.rs @@ -68,10 +68,5 @@ pub(crate) use result_element_serialization::{ decode_and_convert, decode_multiple_elements, ProvableResultElement, }; -mod indexes; -pub(crate) use indexes::Indexes; -#[cfg(test)] -mod indexes_test; - mod result_builder; pub(crate) use result_builder::ResultBuilder; diff --git a/crates/proof-of-sql/src/sql/proof/proof_builder_test.rs b/crates/proof-of-sql/src/sql/proof/proof_builder_test.rs index d5d258940..788920551 100644 --- a/crates/proof-of-sql/src/sql/proof/proof_builder_test.rs +++ b/crates/proof-of-sql/src/sql/proof/proof_builder_test.rs @@ -6,7 +6,7 @@ use crate::{ polynomial::{compute_evaluation_vector, CompositePolynomial, MultilinearExtension}, scalar::Curve25519Scalar, }, - sql::proof::{Indexes, SumcheckSubpolynomialType}, + sql::proof::SumcheckSubpolynomialType, }; use alloc::sync::Arc; #[cfg(feature = "arrow")] @@ -136,10 +136,9 @@ fn we_can_form_an_aggregated_sumcheck_polynomial() { #[cfg(feature = "arrow")] #[test] fn we_can_form_the_provable_query_result() { - let result_indexes = Indexes::Sparse(vec![1, 2]); - let col1: Column = Column::BigInt(&[10_i64, 11, 12]); - let col2: Column = Column::BigInt(&[-2_i64, -3, -4]); - let res = ProvableQueryResult::new(&result_indexes, &[col1, col2]); + let col1: Column = Column::BigInt(&[11_i64, 12]); + let col2: Column = Column::BigInt(&[-3_i64, -4]); + let res = ProvableQueryResult::new(2, &[col1, col2]); let column_fields = vec![ ColumnField::new("a".parse().unwrap(), ColumnType::BigInt), diff --git a/crates/proof-of-sql/src/sql/proof/provable_query_result.rs b/crates/proof-of-sql/src/sql/proof/provable_query_result.rs index 57d037780..95c002fe1 100644 --- a/crates/proof-of-sql/src/sql/proof/provable_query_result.rs +++ b/crates/proof-of-sql/src/sql/proof/provable_query_result.rs @@ -1,6 +1,4 @@ -use super::{ - decode_and_convert, decode_multiple_elements, Indexes, ProvableResultColumn, QueryError, -}; +use super::{decode_and_convert, decode_multiple_elements, ProvableResultColumn, QueryError}; use crate::base::{ database::{Column, ColumnField, ColumnType, OwnedColumn, OwnedTable}, polynomial::compute_evaluation_vector, @@ -15,7 +13,7 @@ use serde::{Deserialize, Serialize}; #[derive(Debug, Default, Clone, Serialize, Deserialize)] pub struct ProvableQueryResult { num_columns: u64, - indexes: Indexes, + table_length: u64, data: Vec, } @@ -25,23 +23,17 @@ impl ProvableQueryResult { pub fn num_columns(&self) -> usize { self.num_columns as usize } - /// The indexes in the result. - #[must_use] - pub fn indexes(&self) -> &Indexes { - &self.indexes - } - /// A mutable reference to a the indexes in the result. Because the struct is deserialized from untrusted data, it - /// cannot maintain any invariant on its data members; hence, this function is available to allow for easy manipulation for testing. - #[cfg(test)] - pub fn indexes_mut(&mut self) -> &mut Indexes { - &mut self.indexes - } /// A mutable reference to the number of columns in the result. Because the struct is deserialized from untrusted data, it /// cannot maintain any invariant on its data members; hence, this function is available to allow for easy manipulation for testing. #[cfg(test)] pub fn num_columns_mut(&mut self) -> &mut u64 { &mut self.num_columns } + /// The number of rows in the result + #[must_use] + pub fn table_length(&self) -> usize { + self.table_length as usize + } /// A mutable reference to the underlying encoded data of the result. Because the struct is deserialized from untrusted data, it /// cannot maintain any invariant on its data members; hence, this function is available to allow for easy manipulation for testing. #[cfg(test)] @@ -51,29 +43,29 @@ impl ProvableQueryResult { /// This function is available to allow for easy creation for testing. #[cfg(test)] #[must_use] - pub fn new_from_raw_data(num_columns: u64, indexes: Indexes, data: Vec) -> Self { + pub fn new_from_raw_data(num_columns: u64, table_length: u64, data: Vec) -> Self { Self { num_columns, - indexes, + table_length, data, } } /// Form intermediate query result from index rows and result columns #[must_use] - pub fn new<'a, S: Scalar>(indexes: &'a Indexes, columns: &'a [Column<'a, S>]) -> Self { + pub fn new<'a, S: Scalar>(table_length: u64, columns: &'a [Column<'a, S>]) -> Self { let mut sz = 0; for col in columns { - sz += col.num_bytes(indexes); + sz += col.num_bytes(table_length); } let mut data = vec![0u8; sz]; let mut sz = 0; for col in columns { - sz += col.write(&mut data[sz..], indexes); + sz += col.write(&mut data[sz..], table_length); } ProvableQueryResult { num_columns: columns.len() as u64, - indexes: indexes.clone(), + table_length, data, } } @@ -91,32 +83,20 @@ impl ProvableQueryResult { pub fn evaluate( &self, evaluation_point: &[S], - table_length: usize, + output_length: usize, column_result_fields: &[ColumnField], ) -> Result, QueryError> { if self.num_columns as usize != column_result_fields.len() { return Err(QueryError::InvalidColumnCount); } - - if !self.indexes.valid(table_length) { - return Err(QueryError::InvalidIndexes); - } - - let evaluation_vec_len = self - .indexes - .iter() - .max() - .map(|max| max as usize + 1) - .unwrap_or(0); - let mut evaluation_vec = vec![Zero::zero(); evaluation_vec_len]; + let mut evaluation_vec = vec![Zero::zero(); output_length]; compute_evaluation_vector(&mut evaluation_vec, evaluation_point); - let mut offset: usize = 0; let mut res = Vec::with_capacity(self.num_columns as usize); for field in column_result_fields { let mut val = S::zero(); - for index in self.indexes.iter() { + for entry in evaluation_vec.iter().take(output_length) { let (x, sz) = match field.data_type() { ColumnType::Boolean => decode_and_convert::(&self.data[offset..]), ColumnType::TinyInt => decode_and_convert::(&self.data[offset..]), @@ -133,7 +113,7 @@ impl ProvableQueryResult { decode_and_convert::(&self.data[offset..]) } }?; - val += evaluation_vec[index as usize] * x; + val += *entry * x; offset += sz; } res.push(val); @@ -161,7 +141,7 @@ impl ProvableQueryResult { return Err(QueryError::InvalidColumnCount); } - let n = self.indexes.len(); + let n = self.table_length(); let mut offset: usize = 0; let owned_table = OwnedTable::try_new( diff --git a/crates/proof-of-sql/src/sql/proof/provable_query_result_test.rs b/crates/proof-of-sql/src/sql/proof/provable_query_result_test.rs index ce2088ee9..693e7039e 100644 --- a/crates/proof-of-sql/src/sql/proof/provable_query_result_test.rs +++ b/crates/proof-of-sql/src/sql/proof/provable_query_result_test.rs @@ -1,12 +1,9 @@ use super::{ProvableQueryResult, QueryError}; -use crate::{ - base::{ - database::{Column, ColumnField, ColumnType}, - math::decimal::Precision, - polynomial::compute_evaluation_vector, - scalar::{Curve25519Scalar, Scalar}, - }, - sql::proof::Indexes, +use crate::base::{ + database::{Column, ColumnField, ColumnType}, + math::decimal::Precision, + polynomial::compute_evaluation_vector, + scalar::{Curve25519Scalar, Scalar}, }; use alloc::sync::Arc; use arrow::{ @@ -19,7 +16,7 @@ use num_traits::Zero; #[test] fn we_can_convert_an_empty_provable_result_to_a_final_result() { let cols: [Column; 1] = [Column::BigInt(&[0_i64; 0])]; - let res = ProvableQueryResult::new(&Indexes::Sparse(vec![]), &cols); + let res = ProvableQueryResult::new(0, &cols); let column_fields = vec![ColumnField::new("a1".parse().unwrap(), ColumnType::BigInt)]; let res = RecordBatch::try_from( res.to_owned_table::(&column_fields) @@ -38,42 +35,40 @@ fn we_can_convert_an_empty_provable_result_to_a_final_result() { #[test] fn we_can_evaluate_result_columns_as_mles() { - let indexes = Indexes::Sparse(vec![0, 2]); - let cols: [Column; 1] = [Column::BigInt(&[10, 11, -12])]; - let res = ProvableQueryResult::new(&indexes, &cols); + let cols: [Column; 1] = [Column::BigInt(&[10, -12])]; + let res = ProvableQueryResult::new(2, &cols); let evaluation_point = [ Curve25519Scalar::from(10u64), Curve25519Scalar::from(100u64), ]; - let mut evaluation_vec = [Curve25519Scalar::ZERO; 4]; + let mut evaluation_vec = [Curve25519Scalar::ZERO; 2]; compute_evaluation_vector(&mut evaluation_vec, &evaluation_point); let column_fields = vec![ColumnField::new("a".parse().unwrap(), ColumnType::BigInt); cols.len()]; let evals = res - .evaluate(&evaluation_point, 4, &column_fields[..]) + .evaluate(&evaluation_point, 2, &column_fields[..]) .unwrap(); #[allow(clippy::possible_missing_comma)] let expected_evals = [Curve25519Scalar::from(10u64) * evaluation_vec[0] - - Curve25519Scalar::from(12u64) * evaluation_vec[2]]; + - Curve25519Scalar::from(12u64) * evaluation_vec[1]]; assert_eq!(evals, expected_evals); } #[test] fn we_can_evaluate_result_columns_with_no_rows() { - let indexes = Indexes::Sparse(vec![]); let cols: [Column; 1] = [Column::BigInt(&[10, 11, 12])]; - let res = ProvableQueryResult::new(&indexes, &cols); + let res = ProvableQueryResult::new(0, &cols); let evaluation_point = [ Curve25519Scalar::from(10u64), Curve25519Scalar::from(100u64), ]; - let mut evaluation_vec = [Curve25519Scalar::ZERO; 4]; + let mut evaluation_vec = [Curve25519Scalar::ZERO; 0]; compute_evaluation_vector(&mut evaluation_vec, &evaluation_point); let column_fields = vec![ColumnField::new("a".parse().unwrap(), ColumnType::BigInt); cols.len()]; let evals = res - .evaluate(&evaluation_point, 4, &column_fields[..]) + .evaluate(&evaluation_point, 0, &column_fields[..]) .unwrap(); let expected_evals = [Curve25519Scalar::zero()]; assert_eq!(evals, expected_evals); @@ -81,52 +76,48 @@ fn we_can_evaluate_result_columns_with_no_rows() { #[test] fn we_can_evaluate_multiple_result_columns_as_mles() { - let indexes = Indexes::Sparse(vec![0, 2]); - let cols: [Column; 2] = - [Column::BigInt(&[10, 11, 12]), Column::BigInt(&[5, 7, 9])]; - let res = ProvableQueryResult::new(&indexes, &cols); + let cols: [Column; 2] = [Column::BigInt(&[10, 12]), Column::BigInt(&[5, 9])]; + let res = ProvableQueryResult::new(2, &cols); let evaluation_point = [ Curve25519Scalar::from(10u64), Curve25519Scalar::from(100u64), ]; - let mut evaluation_vec = [Curve25519Scalar::ZERO; 4]; + let mut evaluation_vec = [Curve25519Scalar::ZERO; 2]; compute_evaluation_vector(&mut evaluation_vec, &evaluation_point); let column_fields = vec![ColumnField::new("a".parse().unwrap(), ColumnType::BigInt); cols.len()]; let evals = res - .evaluate(&evaluation_point, 4, &column_fields[..]) + .evaluate(&evaluation_point, 2, &column_fields[..]) .unwrap(); let expected_evals = [ Curve25519Scalar::from(10u64) * evaluation_vec[0] - + Curve25519Scalar::from(12u64) * evaluation_vec[2], + + Curve25519Scalar::from(12u64) * evaluation_vec[1], Curve25519Scalar::from(5u64) * evaluation_vec[0] - + Curve25519Scalar::from(9u64) * evaluation_vec[2], + + Curve25519Scalar::from(9u64) * evaluation_vec[1], ]; assert_eq!(evals, expected_evals); } #[test] fn we_can_evaluate_multiple_result_columns_as_mles_with_128_bits() { - let indexes = Indexes::Sparse(vec![0, 2]); - let cols: [Column; 2] = - [Column::Int128(&[10, 11, 12]), Column::Int128(&[5, 7, 9])]; - let res = ProvableQueryResult::new(&indexes, &cols); + let cols: [Column; 2] = [Column::Int128(&[10, 12]), Column::Int128(&[5, 9])]; + let res = ProvableQueryResult::new(2, &cols); let evaluation_point = [ Curve25519Scalar::from(10u64), Curve25519Scalar::from(100u64), ]; - let mut evaluation_vec = [Curve25519Scalar::ZERO; 4]; + let mut evaluation_vec = [Curve25519Scalar::ZERO; 2]; compute_evaluation_vector(&mut evaluation_vec, &evaluation_point); let column_fields = vec![ColumnField::new("a".parse().unwrap(), ColumnType::Int128); cols.len()]; let evals = res - .evaluate(&evaluation_point, 4, &column_fields[..]) + .evaluate(&evaluation_point, 2, &column_fields[..]) .unwrap(); let expected_evals = [ Curve25519Scalar::from(10u64) * evaluation_vec[0] - + Curve25519Scalar::from(12u64) * evaluation_vec[2], + + Curve25519Scalar::from(12u64) * evaluation_vec[1], Curve25519Scalar::from(5u64) * evaluation_vec[0] - + Curve25519Scalar::from(9u64) * evaluation_vec[2], + + Curve25519Scalar::from(9u64) * evaluation_vec[1], ]; assert_eq!(evals, expected_evals); } @@ -134,194 +125,141 @@ fn we_can_evaluate_multiple_result_columns_as_mles_with_128_bits() { #[allow(clippy::similar_names)] #[test] fn we_can_evaluate_multiple_result_columns_as_mles_with_scalar_columns() { - let indexes = Indexes::Sparse(vec![0, 2]); - let col0 = [10, 11, 12] + let col0 = [10, 12] .iter() .map(|v| Curve25519Scalar::from(*v)) .collect::>(); - let col1 = [5, 7, 9] + let col1 = [5, 9] .iter() .map(|v| Curve25519Scalar::from(*v)) .collect::>(); let cols: [Column; 2] = [Column::Scalar(&col0), Column::Scalar(&col1)]; - let res = ProvableQueryResult::new(&indexes, &cols); + let res = ProvableQueryResult::new(2, &cols); let evaluation_point = [ Curve25519Scalar::from(10u64), Curve25519Scalar::from(100u64), ]; - let mut evaluation_vec = [Curve25519Scalar::ZERO; 4]; + let mut evaluation_vec = [Curve25519Scalar::ZERO; 2]; compute_evaluation_vector(&mut evaluation_vec, &evaluation_point); let column_fields = vec![ColumnField::new("a".parse().unwrap(), ColumnType::Scalar); cols.len()]; let evals = res - .evaluate(&evaluation_point, 4, &column_fields[..]) + .evaluate(&evaluation_point, 2, &column_fields[..]) .unwrap(); let expected_evals = [ Curve25519Scalar::from(10u64) * evaluation_vec[0] - + Curve25519Scalar::from(12u64) * evaluation_vec[2], + + Curve25519Scalar::from(12u64) * evaluation_vec[1], Curve25519Scalar::from(5u64) * evaluation_vec[0] - + Curve25519Scalar::from(9u64) * evaluation_vec[2], + + Curve25519Scalar::from(9u64) * evaluation_vec[1], ]; assert_eq!(evals, expected_evals); } #[test] fn we_can_evaluate_multiple_result_columns_as_mles_with_mixed_data_types() { - let indexes = Indexes::Sparse(vec![0, 2]); - let cols: [Column; 2] = - [Column::BigInt(&[10, 11, 12]), Column::Int128(&[5, 7, 9])]; - let res = ProvableQueryResult::new(&indexes, &cols); + let cols: [Column; 2] = [Column::BigInt(&[10, 12]), Column::Int128(&[5, 9])]; + let res = ProvableQueryResult::new(2, &cols); let evaluation_point = [ Curve25519Scalar::from(10u64), Curve25519Scalar::from(100u64), ]; - let mut evaluation_vec = [Curve25519Scalar::ZERO; 4]; + let mut evaluation_vec = [Curve25519Scalar::ZERO; 2]; compute_evaluation_vector(&mut evaluation_vec, &evaluation_point); let column_fields = [ ColumnField::new("a".parse().unwrap(), ColumnType::BigInt), ColumnField::new("a".parse().unwrap(), ColumnType::Int128), ]; let evals = res - .evaluate(&evaluation_point, 4, &column_fields[..]) + .evaluate(&evaluation_point, 2, &column_fields[..]) .unwrap(); let expected_evals = [ Curve25519Scalar::from(10u64) * evaluation_vec[0] - + Curve25519Scalar::from(12u64) * evaluation_vec[2], + + Curve25519Scalar::from(12u64) * evaluation_vec[1], Curve25519Scalar::from(5u64) * evaluation_vec[0] - + Curve25519Scalar::from(9u64) * evaluation_vec[2], + + Curve25519Scalar::from(9u64) * evaluation_vec[1], ]; assert_eq!(evals, expected_evals); } -#[test] -fn evaluation_fails_if_indexes_are_out_of_range() { - let indexes = Indexes::Sparse(vec![0, 2]); - let cols: [Column; 1] = [Column::BigInt(&[10, 11, 12])]; - let mut res = ProvableQueryResult::new(&indexes, &cols); - match res.indexes_mut() { - Indexes::Sparse(indexes) => indexes[1] = 20, - _ => panic!("unexpected indexes type"), - } - let evaluation_point = [ - Curve25519Scalar::from(10u64), - Curve25519Scalar::from(100u64), - ]; - let mut evaluation_vec = [Curve25519Scalar::ZERO; 4]; - compute_evaluation_vector(&mut evaluation_vec, &evaluation_point); - let column_fields = - vec![ColumnField::new("a".parse().unwrap(), ColumnType::BigInt); cols.len()]; - assert!(matches!( - res.evaluate(&evaluation_point, 4, &column_fields[..]), - Err(QueryError::InvalidIndexes) - )); -} - -#[test] -fn evaluation_fails_if_indexes_are_not_sorted() { - let indexes = Indexes::Sparse(vec![1, 0]); - let cols: [Column; 1] = [Column::BigInt(&[10, 11, 12])]; - let res = ProvableQueryResult::new(&indexes, &cols); - let evaluation_point = [ - Curve25519Scalar::from(10u64), - Curve25519Scalar::from(100u64), - ]; - let mut evaluation_vec = [Curve25519Scalar::ZERO; 4]; - compute_evaluation_vector(&mut evaluation_vec, &evaluation_point); - let column_fields = - vec![ColumnField::new("a".parse().unwrap(), ColumnType::BigInt); cols.len()]; - assert!(matches!( - res.evaluate(&evaluation_point, 4, &column_fields[..]), - Err(QueryError::InvalidIndexes) - )); -} - #[test] fn evaluation_fails_if_extra_data_is_included() { - let indexes = Indexes::Sparse(vec![0, 2]); - let cols: [Column; 1] = [Column::BigInt(&[10, 11, 12])]; - let mut res = ProvableQueryResult::new(&indexes, &cols); + let cols: [Column; 1] = [Column::BigInt(&[10, 12])]; + let mut res = ProvableQueryResult::new(2, &cols); res.data_mut().push(3u8); let evaluation_point = [ Curve25519Scalar::from(10u64), Curve25519Scalar::from(100u64), ]; - let mut evaluation_vec = [Curve25519Scalar::ZERO; 4]; + let mut evaluation_vec = [Curve25519Scalar::ZERO; 2]; compute_evaluation_vector(&mut evaluation_vec, &evaluation_point); let column_fields = vec![ColumnField::new("a".parse().unwrap(), ColumnType::BigInt); cols.len()]; assert!(matches!( - res.evaluate(&evaluation_point, 4, &column_fields[..]), + res.evaluate(&evaluation_point, 2, &column_fields[..]), Err(QueryError::MiscellaneousEvaluationError) )); } #[test] fn evaluation_fails_if_the_result_cant_be_decoded() { - let mut res = ProvableQueryResult::new_from_raw_data( - 1, - Indexes::Sparse(vec![0]), - vec![0b1111_1111_u8; 38], - ); + let mut res = ProvableQueryResult::new_from_raw_data(1, 1, vec![0b1111_1111_u8; 38]); res.data_mut()[37] = 0b0000_0001_u8; let evaluation_point = [ Curve25519Scalar::from(10u64), Curve25519Scalar::from(100u64), ]; - let mut evaluation_vec = [Curve25519Scalar::ZERO; 4]; + let mut evaluation_vec = [Curve25519Scalar::ZERO; 2]; compute_evaluation_vector(&mut evaluation_vec, &evaluation_point); let column_fields = vec![ColumnField::new("a".parse().unwrap(), ColumnType::BigInt); res.num_columns()]; assert!(matches!( - res.evaluate(&evaluation_point, 4, &column_fields[..]), + res.evaluate(&evaluation_point, 2, &column_fields[..]), Err(QueryError::Overflow) )); } #[test] fn evaluation_fails_if_integer_overflow_happens() { - let indexes = Indexes::Sparse(vec![0, 2]); - let binding = [i64::from(i32::MAX) + 1_i64, 11, 12]; + let binding = [i64::from(i32::MAX) + 1_i64, 12]; let cols: [Column; 1] = [Column::BigInt(&binding)]; - let res = ProvableQueryResult::new(&indexes, &cols); + let res = ProvableQueryResult::new(2, &cols); let evaluation_point = [ Curve25519Scalar::from(10u64), Curve25519Scalar::from(100u64), ]; - let mut evaluation_vec = [Curve25519Scalar::ZERO; 4]; + let mut evaluation_vec = [Curve25519Scalar::ZERO; 2]; compute_evaluation_vector(&mut evaluation_vec, &evaluation_point); let column_fields = vec![ColumnField::new("a".parse().unwrap(), ColumnType::Int); res.num_columns()]; assert!(matches!( - res.evaluate(&evaluation_point, 4, &column_fields[..]), + res.evaluate(&evaluation_point, 2, &column_fields[..]), Err(QueryError::Overflow) )); } #[test] fn evaluation_fails_if_data_is_missing() { - let indexes = Indexes::Sparse(vec![0, 2]); - let cols: [Column; 1] = [Column::BigInt(&[10, 11, 12])]; - let mut res = ProvableQueryResult::new(&indexes, &cols); + let cols: [Column; 1] = [Column::BigInt(&[10, 12])]; + let mut res = ProvableQueryResult::new(2, &cols); *res.num_columns_mut() = 3; let evaluation_point = [ Curve25519Scalar::from(10u64), Curve25519Scalar::from(100u64), ]; - let mut evaluation_vec = [Curve25519Scalar::ZERO; 4]; + let mut evaluation_vec = [Curve25519Scalar::ZERO; 2]; compute_evaluation_vector(&mut evaluation_vec, &evaluation_point); let column_fields = vec![ColumnField::new("a".parse().unwrap(), ColumnType::BigInt); res.num_columns()]; assert!(matches!( - res.evaluate(&evaluation_point, 4, &column_fields[..]), + res.evaluate(&evaluation_point, 2, &column_fields[..]), Err(QueryError::Overflow) )); } #[test] fn we_can_convert_a_provable_result_to_a_final_result() { - let indexes = Indexes::Sparse(vec![0, 2]); - let cols: [Column; 1] = [Column::BigInt(&[10, 11, 12])]; - let res = ProvableQueryResult::new(&indexes, &cols); + let cols: [Column; 1] = [Column::BigInt(&[10, 12])]; + let res = ProvableQueryResult::new(2, &cols); let column_fields = vec![ColumnField::new("a1".parse().unwrap(), ColumnType::BigInt)]; let res = RecordBatch::try_from( res.to_owned_table::(&column_fields) @@ -340,9 +278,8 @@ fn we_can_convert_a_provable_result_to_a_final_result() { #[test] fn we_can_convert_a_provable_result_to_a_final_result_with_128_bits() { - let indexes = Indexes::Sparse(vec![0, 2]); - let cols: [Column; 1] = [Column::Int128(&[10, 11, i128::MAX])]; - let res = ProvableQueryResult::new(&indexes, &cols); + let cols: [Column; 1] = [Column::Int128(&[10, i128::MAX])]; + let res = ProvableQueryResult::new(2, &cols); let column_fields = vec![ColumnField::new("a1".parse().unwrap(), ColumnType::Int128)]; let res = RecordBatch::try_from( res.to_owned_table::(&column_fields) @@ -368,15 +305,10 @@ fn we_can_convert_a_provable_result_to_a_final_result_with_128_bits() { #[test] fn we_can_convert_a_provable_result_to_a_final_result_with_252_bits() { - let indexes = Indexes::Sparse(vec![0, 2]); - let values = [ - Curve25519Scalar::from(10), - Curve25519Scalar::from(11), - Curve25519Scalar::MAX_SIGNED, - ]; + let values = [Curve25519Scalar::from(10), Curve25519Scalar::MAX_SIGNED]; let cols: [Column; 1] = [Column::Scalar(&values)]; - let res = ProvableQueryResult::new(&indexes, &cols); + let res = ProvableQueryResult::new(2, &cols); let column_fields = vec![ColumnField::new( "a1".parse().unwrap(), ColumnType::Decimal75(Precision::new(75).unwrap(), 0), @@ -406,19 +338,14 @@ fn we_can_convert_a_provable_result_to_a_final_result_with_252_bits() { #[test] fn we_can_convert_a_provable_result_to_a_final_result_with_mixed_data_types() { - let indexes = Indexes::Sparse(vec![0, 2]); - let values1: [i64; 3] = [6, 7, i64::MAX]; - let values2: [i128; 3] = [10, 11, i128::MAX]; - let values3 = ["abc", "fg", "de"]; + let values1: [i64; 2] = [6, i64::MAX]; + let values2: [i128; 2] = [10, i128::MAX]; + let values3 = ["abc", "de"]; let scalars3 = values3 .iter() .map(|v| Curve25519Scalar::from(*v)) .collect::>(); - let values4 = [ - Curve25519Scalar::from(10), - Curve25519Scalar::from(11), - Curve25519Scalar::MAX_SIGNED, - ]; + let values4 = [Curve25519Scalar::from(10), Curve25519Scalar::MAX_SIGNED]; let cols: [Column; 4] = [ Column::BigInt(&values1), @@ -426,7 +353,7 @@ fn we_can_convert_a_provable_result_to_a_final_result_with_mixed_data_types() { Column::VarChar((&values3, &scalars3)), Column::Scalar(&values4), ]; - let res = ProvableQueryResult::new(&indexes, &cols); + let res = ProvableQueryResult::new(2, &cols); let column_fields = vec![ ColumnField::new("a1".parse().unwrap(), ColumnType::BigInt), ColumnField::new("a2".parse().unwrap(), ColumnType::Int128), diff --git a/crates/proof-of-sql/src/sql/proof/provable_result_column.rs b/crates/proof-of-sql/src/sql/proof/provable_result_column.rs index bf883245b..6309f313a 100644 --- a/crates/proof-of-sql/src/sql/proof/provable_result_column.rs +++ b/crates/proof-of-sql/src/sql/proof/provable_result_column.rs @@ -1,4 +1,3 @@ -use super::Indexes; use crate::{ base::{database::Column, scalar::Scalar}, sql::proof::ProvableResultElement, @@ -7,24 +6,20 @@ use crate::{ /// Interface for serializing an intermediate result column pub trait ProvableResultColumn { /// The number of bytes of the serialized result column - fn num_bytes(&self, selection: &Indexes) -> usize; + fn num_bytes(&self, length: u64) -> usize; /// Serialize the result column - fn write(&self, out: &mut [u8], selection: &Indexes) -> usize; + fn write(&self, out: &mut [u8], length: u64) -> usize; } impl<'a, T: ProvableResultElement<'a>> ProvableResultColumn for &[T] { - fn num_bytes(&self, selection: &Indexes) -> usize { - let mut res = 0; - for i in selection.iter() { - res += self[i as usize].required_bytes(); - } - res + fn num_bytes(&self, length: u64) -> usize { + (0..length as usize).map(|i| self[i].required_bytes()).sum() } - fn write(&self, out: &mut [u8], selection: &Indexes) -> usize { + fn write(&self, out: &mut [u8], length: u64) -> usize { let mut res = 0; - for i in selection.iter() { + for i in 0..length { res += self[i as usize].encode(&mut out[res..]); } res @@ -32,39 +27,39 @@ impl<'a, T: ProvableResultElement<'a>> ProvableResultColumn for &[T] { } impl ProvableResultColumn for Column<'_, S> { - fn num_bytes(&self, selection: &Indexes) -> usize { + fn num_bytes(&self, length: u64) -> usize { match self { - Column::Boolean(col) => col.num_bytes(selection), - Column::TinyInt(col) => col.num_bytes(selection), - Column::SmallInt(col) => col.num_bytes(selection), - Column::Int(col) => col.num_bytes(selection), - Column::BigInt(col) | Column::TimestampTZ(_, _, col) => col.num_bytes(selection), - Column::Int128(col) => col.num_bytes(selection), - Column::Decimal75(_, _, col) | Column::Scalar(col) => col.num_bytes(selection), - Column::VarChar((col, _)) => col.num_bytes(selection), + Column::Boolean(col) => col.num_bytes(length), + Column::TinyInt(col) => col.num_bytes(length), + Column::SmallInt(col) => col.num_bytes(length), + Column::Int(col) => col.num_bytes(length), + Column::BigInt(col) | Column::TimestampTZ(_, _, col) => col.num_bytes(length), + Column::Int128(col) => col.num_bytes(length), + Column::Decimal75(_, _, col) | Column::Scalar(col) => col.num_bytes(length), + Column::VarChar((col, _)) => col.num_bytes(length), } } - fn write(&self, out: &mut [u8], selection: &Indexes) -> usize { + fn write(&self, out: &mut [u8], length: u64) -> usize { match self { - Column::Boolean(col) => col.write(out, selection), - Column::TinyInt(col) => col.write(out, selection), - Column::SmallInt(col) => col.write(out, selection), - Column::Int(col) => col.write(out, selection), - Column::BigInt(col) | Column::TimestampTZ(_, _, col) => col.write(out, selection), - Column::Int128(col) => col.write(out, selection), - Column::Decimal75(_, _, col) | Column::Scalar(col) => col.write(out, selection), - Column::VarChar((col, _)) => col.write(out, selection), + Column::Boolean(col) => col.write(out, length), + Column::TinyInt(col) => col.write(out, length), + Column::SmallInt(col) => col.write(out, length), + Column::Int(col) => col.write(out, length), + Column::BigInt(col) | Column::TimestampTZ(_, _, col) => col.write(out, length), + Column::Int128(col) => col.write(out, length), + Column::Decimal75(_, _, col) | Column::Scalar(col) => col.write(out, length), + Column::VarChar((col, _)) => col.write(out, length), } } } impl<'a, T: ProvableResultElement<'a>, const N: usize> ProvableResultColumn for [T; N] { - fn num_bytes(&self, selection: &Indexes) -> usize { - (&self[..]).num_bytes(selection) + fn num_bytes(&self, length: u64) -> usize { + (&self[..]).num_bytes(length) } - fn write(&self, out: &mut [u8], selection: &Indexes) -> usize { - (&self[..]).write(out, selection) + fn write(&self, out: &mut [u8], length: u64) -> usize { + (&self[..]).write(out, length) } } diff --git a/crates/proof-of-sql/src/sql/proof/query_proof.rs b/crates/proof-of-sql/src/sql/proof/query_proof.rs index dae0cd566..20a56ea27 100644 --- a/crates/proof-of-sql/src/sql/proof/query_proof.rs +++ b/crates/proof-of-sql/src/sql/proof/query_proof.rs @@ -6,7 +6,7 @@ use crate::{ base::{ bit::BitDistribution, commitment::{Commitment, CommitmentEvaluationProof}, - database::{CommitmentAccessor, DataAccessor}, + database::{Column, CommitmentAccessor, DataAccessor}, math::log2_up, polynomial::{compute_evaluation_vector, CompositePolynomialInfo}, proof::{Keccak256Transcript, ProofError, Transcript}, @@ -56,7 +56,7 @@ impl QueryProof { let mut result_builder = ResultBuilder::new(table_length); let result_cols = expr.result_evaluate(&mut result_builder, &alloc, accessor); let provable_result = - ProvableQueryResult::new(&result_builder.result_index_vector, &result_cols); + ProvableQueryResult::new(&result_builder.table_length(), &result_cols); // construct a transcript for the proof let mut transcript: Keccak256Transcript = @@ -145,9 +145,10 @@ impl QueryProof { result: &ProvableQueryResult, setup: &CP::VerifierPublicSetup<'_>, ) -> QueryResult { - let table_length = expr.get_length(accessor); + let input_length = expr.get_length(accessor); + let output_length = result.table_length(); let generator_offset = expr.get_offset(accessor); - let num_sumcheck_variables = cmp::max(log2_up(table_length), 1); + let num_sumcheck_variables = cmp::max(log2_up(input_length), 1); assert!(num_sumcheck_variables > 0); // validate bit decompositions @@ -175,7 +176,7 @@ impl QueryProof { // construct a transcript for the proof let mut transcript: Keccak256Transcript = - make_transcript(expr, result, table_length, generator_offset); + make_transcript(expr, result, input_length, generator_offset); // These are the challenges that will be consumed by the proof // Specifically, these are the challenges that the verifier sends to @@ -197,7 +198,7 @@ impl QueryProof { .take(num_random_scalars) .collect(); let sumcheck_random_scalars = - SumcheckRandomScalars::new(&random_scalars, table_length, num_sumcheck_variables); + SumcheckRandomScalars::new(&random_scalars, input_length, num_sumcheck_variables); // verify sumcheck up to the evaluation check let poly_info = CompositePolynomialInfo { @@ -226,11 +227,11 @@ impl QueryProof { // pass over the provable AST to fill in the verification builder let sumcheck_evaluations = SumcheckMleEvaluations::new( - table_length, + input_length, + output_length, &subclaim.evaluation_point, &sumcheck_random_scalars, &self.pcs_proof_evaluations, - result.indexes(), ); let mut builder = VerificationBuilder::new( generator_offset, @@ -247,7 +248,7 @@ impl QueryProof { // compute the evaluation of the result MLEs let result_evaluations = result.evaluate( &subclaim.evaluation_point, - table_length, + output_length, &column_result_fields[..], )?; // check the evaluation of the result MLEs @@ -274,7 +275,7 @@ impl QueryProof { &product, &subclaim.evaluation_point, generator_offset as u64, - table_length, + input_length, setup, ) .map_err(|_e| ProofError::VerificationError { diff --git a/crates/proof-of-sql/src/sql/proof/query_proof_test.rs b/crates/proof-of-sql/src/sql/proof/query_proof_test.rs index 05c0754d6..fa010e909 100644 --- a/crates/proof-of-sql/src/sql/proof/query_proof_test.rs +++ b/crates/proof-of-sql/src/sql/proof/query_proof_test.rs @@ -14,7 +14,7 @@ use crate::{ proof::ProofError, scalar::{Curve25519Scalar, Scalar}, }, - sql::proof::{Indexes, QueryData, ResultBuilder, SumcheckSubpolynomialType}, + sql::proof::{QueryData, ResultBuilder, SumcheckSubpolynomialType}, }; use bumpalo::Bump; use serde::Serialize; @@ -48,8 +48,6 @@ impl ProverEvaluate for TrivialTestProofPlan { _accessor: &'a dyn DataAccessor, ) -> Vec> { let col = alloc.alloc_slice_fill_copy(builder.table_length(), self.column_fill_value); - let indexes = Indexes::Sparse(vec![0u64]); - builder.set_result_indexes(indexes); vec![Column::BigInt(col)] } @@ -124,7 +122,8 @@ fn verify_a_trivial_query_proof_with_given_offset(n: usize, offset_generators: u table, } = proof.verify(&expr, &accessor, &result, &()).unwrap(); assert_ne!(verification_hash, [0; 32]); - let expected_result = owned_table([bigint("a1", [0])]); + let expected_col = vec![0_i64; n]; + let expected_result = owned_table([bigint("a1", expected_col)]); assert_eq!(table, expected_result); } @@ -203,7 +202,6 @@ impl ProverEvaluate for SquareTestProofPlan { alloc: &'a Bump, _accessor: &'a dyn DataAccessor, ) -> Vec> { - builder.set_result_indexes(Indexes::Sparse(vec![0, 1])); let res: &[_] = alloc.alloc_slice_copy(&self.res); vec![Column::BigInt(res)] } @@ -384,7 +382,6 @@ impl ProverEvaluate for DoubleSquareTestProofPlan { alloc: &'a Bump, _accessor: &'a dyn DataAccessor, ) -> Vec> { - builder.set_result_indexes(Indexes::Sparse(vec![0, 1])); let res: &[_] = alloc.alloc_slice_copy(&self.res); vec![Column::BigInt(res)] } @@ -595,7 +592,6 @@ impl ProverEvaluate for ChallengeTestProofPlan { _alloc: &'a Bump, _accessor: &'a dyn DataAccessor, ) -> Vec> { - builder.set_result_indexes(Indexes::Sparse(vec![0, 1])); builder.request_post_result_challenges(2); vec![Column::BigInt(&[9, 25])] } diff --git a/crates/proof-of-sql/src/sql/proof/query_result.rs b/crates/proof-of-sql/src/sql/proof/query_result.rs index cdefda189..31b9ad994 100644 --- a/crates/proof-of-sql/src/sql/proof/query_result.rs +++ b/crates/proof-of-sql/src/sql/proof/query_result.rs @@ -21,9 +21,6 @@ pub enum QueryError { /// Decoding errors other than overflow and invalid string. #[snafu(display("Miscellaneous decoding error"))] MiscellaneousDecodingError, - /// Indexes are invalid. - #[snafu(display("Invalid indexes"))] - InvalidIndexes, /// Miscellaneous evaluation error. #[snafu(display("Miscellaneous evaluation error"))] MiscellaneousEvaluationError, diff --git a/crates/proof-of-sql/src/sql/proof/result_builder.rs b/crates/proof-of-sql/src/sql/proof/result_builder.rs index d25a190b7..e1eafa4de 100644 --- a/crates/proof-of-sql/src/sql/proof/result_builder.rs +++ b/crates/proof-of-sql/src/sql/proof/result_builder.rs @@ -3,8 +3,6 @@ use super::Indexes; /// Track the result created by a query pub struct ResultBuilder { table_length: usize, - /// TODO: add docs - pub(crate) result_index_vector: Indexes, /// The number of challenges used in the proof. /// Specifically, these are the challenges that the verifier sends to @@ -28,11 +26,6 @@ impl ResultBuilder { self.table_length } - /// Set the indexes of the rows select in the result - pub fn set_result_indexes(&mut self, result_index: Indexes) { - self.result_index_vector = result_index; - } - /// The number of challenges used in the proof. /// Specifically, these are the challenges that the verifier sends to /// the prover after the prover sends the result, but before the prover diff --git a/crates/proof-of-sql/src/sql/proof/sumcheck_mle_evaluations.rs b/crates/proof-of-sql/src/sql/proof/sumcheck_mle_evaluations.rs index d03b265a2..9fabd9e13 100644 --- a/crates/proof-of-sql/src/sql/proof/sumcheck_mle_evaluations.rs +++ b/crates/proof-of-sql/src/sql/proof/sumcheck_mle_evaluations.rs @@ -1,4 +1,4 @@ -use super::{Indexes, SumcheckRandomScalars}; +use super::SumcheckRandomScalars; use crate::base::{ polynomial::{ compute_truncated_lagrange_basis_inner_product, compute_truncated_lagrange_basis_sum, @@ -10,17 +10,20 @@ use crate::base::{ #[derive(Default)] pub struct SumcheckMleEvaluations<'a, S: Scalar> { /// The length of the input table for a basic filter. When we support more complex queries, this may need to split. - pub table_length: usize, + pub input_length: usize, + /// The length of the output table for a basic filter. When we support more complex queries, this may need to split. + pub output_length: usize, /// The number of sumcheck variables. pub num_sumcheck_variables: usize, /// The evaluation (at the random point generated by sumcheck) of an MLE `{x_i}` where - /// `x_i = 1` if `i < table_length;` + /// `x_i = 1` if `i < input_length;` /// = 0, otherwise - pub one_evaluation: S, + pub input_one_evaluation: S, - /// The evaluation (at the random point generated by sumcheck) of the MLE that is 1 at the result indexes and 0 elsewhere. - /// This is only computed if the result indexes are dense, and is None otherwise. - pub result_indexes_evaluation: Option, + /// The evaluation (at the random point generated by sumcheck) of an MLE `{x_i}` where + /// `x_i = 1` if `i < output_length;` + /// = 0, otherwise + pub output_one_evaluation: S, /// The evaluation (at the random point generated by sumcheck) of the MLE formed from entrywise random scalars. /// @@ -42,35 +45,35 @@ impl<'a, S: Scalar> SumcheckMleEvaluations<'a, S> { /// - `evaluation_point` - the point, outputted by sumcheck, at which to evaluate the MLEs /// - `sumcheck_random_scalars` - the random scalars used to batch the evaluations that are proven via IPA /// - `pcs_proof_evaluations` - the evaluations of the MLEs that are proven via IPA - /// - `result_indexes` - the indexes of the entries in the result columns. This can be sparse or dense pub fn new( - table_length: usize, + input_length: usize, + output_length: usize, evaluation_point: &[S], sumcheck_random_scalars: &SumcheckRandomScalars, pcs_proof_evaluations: &'a [S], - result_indexes: &Indexes, ) -> Self { assert_eq!( evaluation_point.len(), sumcheck_random_scalars.entrywise_point.len() ); - assert_eq!(table_length, sumcheck_random_scalars.table_length); + assert_eq!(input_length, sumcheck_random_scalars.table_length); let random_evaluation = compute_truncated_lagrange_basis_inner_product( - table_length, + input_length, evaluation_point, sumcheck_random_scalars.entrywise_point, ); - let one_evaluation = compute_truncated_lagrange_basis_sum(table_length, evaluation_point); - - let result_indexes_evaluation = result_indexes.evaluate_at_point(evaluation_point); - + let input_one_evaluation = + compute_truncated_lagrange_basis_sum(input_length, evaluation_point); + let output_one_evaluation = + compute_truncated_lagrange_basis_sum(output_length, evaluation_point); Self { - table_length, + input_length, + output_length, num_sumcheck_variables: evaluation_point.len(), - one_evaluation, + input_one_evaluation, + output_one_evaluation, random_evaluation, pcs_proof_evaluations, - result_indexes_evaluation, } } } diff --git a/crates/proof-of-sql/src/sql/proof/sumcheck_mle_evaluations_test.rs b/crates/proof-of-sql/src/sql/proof/sumcheck_mle_evaluations_test.rs index 9a603722d..5baaf54c3 100644 --- a/crates/proof-of-sql/src/sql/proof/sumcheck_mle_evaluations_test.rs +++ b/crates/proof-of-sql/src/sql/proof/sumcheck_mle_evaluations_test.rs @@ -1,4 +1,4 @@ -use super::{Indexes, SumcheckMleEvaluations}; +use super::SumcheckMleEvaluations; use crate::{base::scalar::Curve25519Scalar, sql::proof::SumcheckRandomScalars}; use num_traits::One; @@ -14,11 +14,11 @@ fn we_can_track_the_evaluation_of_mles_used_within_sumcheck() { let pcs_proof_evaluations = [Curve25519Scalar::from(42u64)]; let evals = SumcheckMleEvaluations::new( + 3, 3, &evaluation_point, &sumcheck_random_scalars, &pcs_proof_evaluations, - &Indexes::Sparse(vec![]), ); let expected_eval = (Curve25519Scalar::one() - evaluation_point[0]) * (Curve25519Scalar::one() - evaluation_point[1]) @@ -38,28 +38,6 @@ fn we_can_track_the_evaluation_of_mles_used_within_sumcheck() { * (Curve25519Scalar::one() - evaluation_point[1]) + (evaluation_point[0]) * (Curve25519Scalar::one() - evaluation_point[1]) + (Curve25519Scalar::one() - evaluation_point[0]) * (evaluation_point[1]); - assert_eq!(evals.one_evaluation, expected_eval); - // Because the Indexes are sparse, this should not be evaluated. - assert_eq!(evals.result_indexes_evaluation, None); -} -#[test] -fn we_can_track_the_evaluation_of_dense_indexes() { - let evaluation_point = [Curve25519Scalar::from(3u64), Curve25519Scalar::from(5u64)]; - let random_scalars = [ - Curve25519Scalar::from(123u64), - Curve25519Scalar::from(456u64), - ]; - - let sumcheck_random_scalars = SumcheckRandomScalars::new(&random_scalars, 3, 2); - - let pcs_proof_evaluations = [Curve25519Scalar::from(42u64)]; - let evals = SumcheckMleEvaluations::new( - 3, - &evaluation_point, - &sumcheck_random_scalars, - &pcs_proof_evaluations, - &Indexes::Dense(0..3), - ); - // Because the range is the entire table, these should be the same. - assert_eq!(evals.result_indexes_evaluation, Some(evals.one_evaluation)); + assert_eq!(evals.input_one_evaluation, expected_eval); + assert_eq!(evals.output_one_evaluation, expected_eval); } diff --git a/crates/proof-of-sql/src/sql/proof/verifiable_query_result_test_utility.rs b/crates/proof-of-sql/src/sql/proof/verifiable_query_result_test_utility.rs index 956c013dd..3616af825 100644 --- a/crates/proof-of-sql/src/sql/proof/verifiable_query_result_test_utility.rs +++ b/crates/proof-of-sql/src/sql/proof/verifiable_query_result_test_utility.rs @@ -2,13 +2,10 @@ use super::{ verifiable_query_result_test::EmptyTestQueryExpr, ProofPlan, ProvableQueryResult, QueryProof, VerifiableQueryResult, }; -use crate::{ - base::{ - commitment::{Commitment, CommittableColumn}, - database::{Column, CommitmentAccessor, OwnedTableTestAccessor, TableRef, TestAccessor}, - scalar::Curve25519Scalar, - }, - sql::proof::Indexes, +use crate::base::{ + commitment::{Commitment, CommittableColumn}, + database::{Column, CommitmentAccessor, OwnedTableTestAccessor, TableRef, TestAccessor}, + scalar::Curve25519Scalar, }; use blitzar::proof::InnerProductProof; use curve25519_dalek::{ristretto::RistrettoPoint, traits::Identity}; @@ -94,7 +91,7 @@ fn tamper_no_result( // add a result let mut res_p = res.clone(); let cols: [Column<'_, Curve25519Scalar>; 1] = [Column::BigInt(&[0_i64; 0])]; - res_p.provable_result = Some(ProvableQueryResult::new(&Indexes::Sparse(vec![]), &cols)); + res_p.provable_result = Some(ProvableQueryResult::new(0, &cols)); assert!(res_p.verify(expr, accessor, &()).is_err()); // add a proof @@ -117,7 +114,7 @@ fn tamper_empty_result( // try to add a result let mut res_p = res.clone(); let cols: [Column<'_, Curve25519Scalar>; 1] = [Column::BigInt(&[123_i64])]; - res_p.provable_result = Some(ProvableQueryResult::new(&Indexes::Sparse(vec![0]), &cols)); + res_p.provable_result = Some(ProvableQueryResult::new(1, &cols)); assert!(res_p.verify(expr, accessor, &()).is_err()); } @@ -126,8 +123,6 @@ fn tamper_empty_result( /// Will panic if: /// - `res.provable_result` is `None`, which leads to calling `unwrap()` on it in the subsequent /// code and may cause an unexpected behavior. -/// - The `provable_res.indexes()` returns an empty vector, which leads to attempting to modify an -/// index of an empty result, causing an invalid state. /// - The assertion `assert!(res_p.verify(expr, accessor, &()).is_err())` fails, indicating that the /// verification did not fail as expected after tampering. fn tamper_result( @@ -140,24 +135,12 @@ fn tamper_result( return; } let provable_res = res.provable_result.as_ref().unwrap(); - if provable_res.indexes().is_empty() { + + if provable_res.table_length() == 0 { tamper_empty_result(res, expr, accessor); return; } - // try to change an index - let mut res_p = res.clone(); - let mut provable_res_p = provable_res.clone(); - match provable_res_p.indexes_mut() { - Indexes::Sparse(indexes) => indexes[0] += 1, - Indexes::Dense(range) => { - range.start += 1; - range.end += 1; - } - } - res_p.provable_result = Some(provable_res_p); - assert!(res_p.verify(expr, accessor, &()).is_err()); - // try to change data let mut res_p = res.clone(); let mut provable_res_p = provable_res.clone(); diff --git a/crates/proof-of-sql/src/sql/proof/verification_builder.rs b/crates/proof-of-sql/src/sql/proof/verification_builder.rs index 532914584..e60be0d16 100644 --- a/crates/proof-of-sql/src/sql/proof/verification_builder.rs +++ b/crates/proof-of-sql/src/sql/proof/verification_builder.rs @@ -63,7 +63,7 @@ impl<'a, C: Commitment> VerificationBuilder<'a, C> { } pub fn table_length(&self) -> usize { - self.mle_evaluations.table_length + self.mle_evaluations.input_length } pub fn generator_offset(&self) -> usize { diff --git a/crates/proof-of-sql/src/sql/proof/verification_builder_test.rs b/crates/proof-of-sql/src/sql/proof/verification_builder_test.rs index acc1782d5..5d850d7ea 100644 --- a/crates/proof-of-sql/src/sql/proof/verification_builder_test.rs +++ b/crates/proof-of-sql/src/sql/proof/verification_builder_test.rs @@ -7,7 +7,7 @@ use rand_core::OsRng; #[test] fn an_empty_sumcheck_polynomial_evaluates_to_zero() { let mle_evaluations = SumcheckMleEvaluations { - table_length: 1, + input_length: 1, num_sumcheck_variables: 1, ..Default::default() }; @@ -28,7 +28,7 @@ fn an_empty_sumcheck_polynomial_evaluates_to_zero() { #[test] fn we_build_up_a_sumcheck_polynomial_evaluation_from_subpolynomial_evaluations() { let mle_evaluations = SumcheckMleEvaluations { - table_length: 1, + input_length: 1, num_sumcheck_variables: 1, ..Default::default() }; @@ -65,7 +65,7 @@ fn we_build_up_the_folded_pcs_proof_commitment() { Curve25519Scalar::from(456u64), ]; let mle_evaluations = SumcheckMleEvaluations { - table_length: 1, + input_length: 1, num_sumcheck_variables: 1, pcs_proof_evaluations: &pcs_proof_evaluations, ..Default::default() diff --git a/crates/proof-of-sql/src/sql/proof_exprs/equals_expr.rs b/crates/proof-of-sql/src/sql/proof_exprs/equals_expr.rs index a845076fd..faf9af5e1 100644 --- a/crates/proof-of-sql/src/sql/proof_exprs/equals_expr.rs +++ b/crates/proof-of-sql/src/sql/proof_exprs/equals_expr.rs @@ -152,7 +152,7 @@ pub fn verifier_evaluate_equals_zero( // consume mle evaluations let lhs_pseudo_inv_eval = builder.consume_intermediate_mle(); let selection_not_eval = builder.consume_intermediate_mle(); - let selection_eval = builder.mle_evaluations.one_evaluation - selection_not_eval; + let selection_eval = builder.mle_evaluations.input_one_evaluation - selection_not_eval; // subpolynomial: selection * lhs builder.produce_sumcheck_subpolynomial_evaluation( diff --git a/crates/proof-of-sql/src/sql/proof_exprs/inequality_expr.rs b/crates/proof-of-sql/src/sql/proof_exprs/inequality_expr.rs index 099e888df..06ce2a217 100644 --- a/crates/proof-of-sql/src/sql/proof_exprs/inequality_expr.rs +++ b/crates/proof-of-sql/src/sql/proof_exprs/inequality_expr.rs @@ -123,7 +123,7 @@ impl ProofExpr for InequalityExpr { builder: &mut VerificationBuilder, accessor: &dyn CommitmentAccessor, ) -> Result { - let one_eval = builder.mle_evaluations.one_evaluation; + let one_eval = builder.mle_evaluations.input_one_evaluation; let lhs_eval = self.lhs.verifier_evaluate(builder, accessor)?; let rhs_eval = self.rhs.verifier_evaluate(builder, accessor)?; let lhs_scale = self.lhs.data_type().scale().unwrap_or(0); diff --git a/crates/proof-of-sql/src/sql/proof_exprs/literal_expr.rs b/crates/proof-of-sql/src/sql/proof_exprs/literal_expr.rs index fd4b646b9..4ea429cc5 100644 --- a/crates/proof-of-sql/src/sql/proof_exprs/literal_expr.rs +++ b/crates/proof-of-sql/src/sql/proof_exprs/literal_expr.rs @@ -70,7 +70,7 @@ impl ProofExpr for LiteralExpr { builder: &mut VerificationBuilder, _accessor: &dyn CommitmentAccessor, ) -> Result { - let mut commitment = builder.mle_evaluations.one_evaluation; + let mut commitment = builder.mle_evaluations.input_one_evaluation; commitment *= self.value.to_scalar(); Ok(commitment) } diff --git a/crates/proof-of-sql/src/sql/proof_exprs/not_expr.rs b/crates/proof-of-sql/src/sql/proof_exprs/not_expr.rs index 1cdf204f6..c0f2f899f 100644 --- a/crates/proof-of-sql/src/sql/proof_exprs/not_expr.rs +++ b/crates/proof-of-sql/src/sql/proof_exprs/not_expr.rs @@ -66,7 +66,7 @@ impl ProofExpr for NotExpr { accessor: &dyn CommitmentAccessor, ) -> Result { let eval = self.expr.verifier_evaluate(builder, accessor)?; - Ok(builder.mle_evaluations.one_evaluation - eval) + Ok(builder.mle_evaluations.input_one_evaluation - eval) } fn get_column_references(&self, columns: &mut IndexSet) { diff --git a/crates/proof-of-sql/src/sql/proof_exprs/sign_expr.rs b/crates/proof-of-sql/src/sql/proof_exprs/sign_expr.rs index 67dae7b47..9e614d22e 100644 --- a/crates/proof-of-sql/src/sql/proof_exprs/sign_expr.rs +++ b/crates/proof-of-sql/src/sql/proof_exprs/sign_expr.rs @@ -247,7 +247,7 @@ fn verify_bit_decomposition( ) { let mut eval = expr_eval; let sign_eval = bit_evals.last().unwrap(); - let sign_eval = builder.mle_evaluations.one_evaluation - C::Scalar::TWO * *sign_eval; + let sign_eval = builder.mle_evaluations.input_one_evaluation - C::Scalar::TWO * *sign_eval; let mut vary_index = 0; eval -= sign_eval * C::Scalar::from(dist.constant_part()); dist.for_each_abs_varying_bit(|int_index: usize, bit_index: usize| { diff --git a/crates/proof-of-sql/src/sql/proof_exprs/sign_expr_test.rs b/crates/proof-of-sql/src/sql/proof_exprs/sign_expr_test.rs index f8177d551..f12900bcf 100644 --- a/crates/proof-of-sql/src/sql/proof_exprs/sign_expr_test.rs +++ b/crates/proof-of-sql/src/sql/proof_exprs/sign_expr_test.rs @@ -2,7 +2,7 @@ use super::{count_sign, prover_evaluate_sign, result_evaluate_sign, verifier_eva use crate::{ base::{bit::BitDistribution, polynomial::MultilinearExtension, scalar::Curve25519Scalar}, sql::proof::{ - CountBuilder, Indexes, ProofBuilder, SumcheckMleEvaluations, SumcheckRandomScalars, + CountBuilder, ProofBuilder, SumcheckMleEvaluations, SumcheckRandomScalars, VerificationBuilder, }, }; @@ -58,11 +58,11 @@ fn we_can_verify_a_constant_decomposition() { let sumcheck_random_scalars = SumcheckRandomScalars::new(&scalars, data.len(), 2); let evaluation_point = [Curve25519Scalar::from(324), Curve25519Scalar::from(97)]; let sumcheck_evaluations = SumcheckMleEvaluations::new( + data.len(), data.len(), &evaluation_point, &sumcheck_random_scalars, &[], - &Indexes::default(), ); let one_eval = sumcheck_evaluations.one_evaluation; @@ -82,11 +82,11 @@ fn verification_of_constant_data_fails_if_the_commitment_doesnt_match_the_bit_di let sumcheck_random_scalars = SumcheckRandomScalars::new(&scalars, data.len(), 2); let evaluation_point = [Curve25519Scalar::from(324), Curve25519Scalar::from(97)]; let sumcheck_evaluations = SumcheckMleEvaluations::new( + data.len(), data.len(), &evaluation_point, &sumcheck_random_scalars, &[], - &Indexes::default(), ); let one_eval = sumcheck_evaluations.one_evaluation; diff --git a/crates/proof-of-sql/src/sql/proof_plans/filter_exec.rs b/crates/proof-of-sql/src/sql/proof_plans/filter_exec.rs index c1c18c00f..defcd2a36 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/filter_exec.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/filter_exec.rs @@ -13,7 +13,7 @@ use crate::{ }, sql::{ proof::{ - CountBuilder, HonestProver, Indexes, ProofBuilder, ProofPlan, ProverEvaluate, + CountBuilder, HonestProver, ProofBuilder, ProofPlan, ProverEvaluate, ProverHonestyMarker, ResultBuilder, SumcheckSubpolynomialType, VerificationBuilder, }, proof_exprs::{AliasedDynProofExpr, DynProofExpr, ProofExpr, TableExpr}, @@ -101,13 +101,7 @@ where .map(|aliased_expr| aliased_expr.expr.verifier_evaluate(builder, accessor)) .collect::, _>>()?, ); - // 3. indexes - let indexes_eval = builder.mle_evaluations.result_indexes_evaluation.ok_or( - ProofError::VerificationError { - error: "invalid indexes", - }, - )?; - // 4. filtered_columns + // 3. filtered_columns let filtered_columns_evals: Vec<_> = repeat_with(|| builder.consume_intermediate_mle()) .take(self.aliased_results.len()) .collect(); @@ -178,9 +172,7 @@ impl ProverEvaluate for FilterExec { .collect(); // Compute filtered_columns and indexes - let (filtered_columns, result_len) = filter_columns(alloc, &columns, selection); - // 3. set indexes - builder.set_result_indexes(Indexes::Dense(0..(result_len as u64))); + let (filtered_columns, _) = filter_columns(alloc, &columns, selection); builder.request_post_result_challenges(2); filtered_columns } @@ -206,7 +198,7 @@ impl ProverEvaluate for FilterExec { .iter() .map(|aliased_expr| aliased_expr.expr.prover_evaluate(builder, alloc, accessor)) .collect(); - // Compute filtered_columns and indexes + // Compute filtered_columns let (filtered_columns, result_len) = filter_columns(alloc, &columns, selection); // 3. Produce MLEs filtered_columns.iter().copied().for_each(|column| { @@ -238,13 +230,8 @@ fn verify_filter( s_eval: C::Scalar, d_evals: &[C::Scalar], ) -> Result<(), ProofError> { - let one_eval = builder.mle_evaluations.one_evaluation; - - let Some(chi_eval) = builder.mle_evaluations.result_indexes_evaluation else { - return Err(ProofError::VerificationError { - error: "Result indexes not valid.", - }); - }; + let one_eval = builder.mle_evaluations.input_one_evaluation; + let chi_eval = builder.mle_evaluations.output_one_evaluation; let c_fold_eval = alpha * one_eval + fold_vals(beta, c_evals); let d_bar_fold_eval = alpha * one_eval + fold_vals(beta, d_evals); diff --git a/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test.rs b/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test.rs index 1f2bb55de..1b7d7ba96 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test.rs @@ -204,7 +204,7 @@ fn we_can_get_an_empty_result_from_a_basic_filter_on_an_empty_table_using_result ), ]; let res: OwnedTable = - ProvableQueryResult::new(&builder.result_index_vector, &result_cols) + ProvableQueryResult::new(&builder.table_length(), &result_cols) .to_owned_table(fields) .unwrap(); let expected: OwnedTable = owned_table([ @@ -249,7 +249,7 @@ fn we_can_get_an_empty_result_from_a_basic_filter_using_result_evaluate() { ), ]; let res: OwnedTable = - ProvableQueryResult::new(&builder.result_index_vector, &result_cols) + ProvableQueryResult::new(&builder.table_length(), &result_cols) .to_owned_table(fields) .unwrap(); let expected: OwnedTable = owned_table([ @@ -282,7 +282,7 @@ fn we_can_get_no_columns_from_a_basic_filter_with_no_selected_columns_using_resu let result_cols = expr.result_evaluate(&mut builder, &alloc, &accessor); let fields = &[]; let res: OwnedTable = - ProvableQueryResult::new(&builder.result_index_vector, &result_cols) + ProvableQueryResult::new(&builder.table_length(), &result_cols) .to_owned_table(fields) .unwrap(); let expected = OwnedTable::try_new(IndexMap::default()).unwrap(); @@ -321,7 +321,7 @@ fn we_can_get_the_correct_result_from_a_basic_filter_using_result_evaluate() { ), ]; let res: OwnedTable = - ProvableQueryResult::new(&builder.result_index_vector, &result_cols) + ProvableQueryResult::new(&builder.table_length(), &result_cols) .to_owned_table(fields) .unwrap(); let expected: OwnedTable = owned_table([ diff --git a/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test_dishonest_prover.rs b/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test_dishonest_prover.rs index cf2fa969d..0c1a03be3 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test_dishonest_prover.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test_dishonest_prover.rs @@ -8,7 +8,7 @@ use crate::{ }, sql::{ proof::{ - Indexes, ProofBuilder, ProverEvaluate, ProverHonestyMarker, QueryError, ResultBuilder, + ProofBuilder, ProverEvaluate, ProverHonestyMarker, QueryError, ResultBuilder, VerifiableQueryResult, }, // Making this explicit to ensure that we don't accidentally use the @@ -58,11 +58,9 @@ impl ProverEvaluate for DishonestFilterExec { .result_evaluate(builder.table_length(), alloc, accessor) }) .collect(); - // Compute filtered_columns and indexes - let (filtered_columns, result_len) = filter_columns(alloc, &columns, selection); + // Compute filtered_columns + let (filtered_columns, _) = filter_columns(alloc, &columns, selection); let filtered_columns = tamper_column(alloc, filtered_columns); - // 3. set indexes - builder.set_result_indexes(Indexes::Dense(0..(result_len as u64))); builder.request_post_result_challenges(2); filtered_columns } @@ -91,7 +89,7 @@ impl ProverEvaluate for DishonestFilterExec { .iter() .map(|aliased_expr| aliased_expr.expr.prover_evaluate(builder, alloc, accessor)) .collect(); - // Compute filtered_columns and indexes + // Compute filtered_columns let (filtered_columns, result_len) = filter_columns(alloc, &columns, selection); let filtered_columns = tamper_column(alloc, filtered_columns); // 3. Produce MLEs diff --git a/crates/proof-of-sql/src/sql/proof_plans/group_by_exec.rs b/crates/proof-of-sql/src/sql/proof_plans/group_by_exec.rs index a5053f203..ba717a131 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/group_by_exec.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/group_by_exec.rs @@ -16,7 +16,7 @@ use crate::{ }, sql::{ proof::{ - CountBuilder, Indexes, ProofBuilder, ProofPlan, ProverEvaluate, ResultBuilder, + CountBuilder, ProofBuilder, ProofPlan, ProverEvaluate, ResultBuilder, SumcheckSubpolynomialType, VerificationBuilder, }, proof_exprs::{AliasedDynProofExpr, ColumnExpr, DynProofExpr, ProofExpr, TableExpr}, @@ -120,13 +120,7 @@ impl ProofPlan for GroupByExec { .iter() .map(|aliased_expr| aliased_expr.expr.verifier_evaluate(builder, accessor)) .collect::, _>>()?; - // 3. indexes - let indexes_eval = builder.mle_evaluations.result_indexes_evaluation.ok_or( - ProofError::VerificationError { - error: "invalid indexes", - }, - )?; - // 4. filtered_columns + // 3. filtered_columns let group_by_result_columns_evals: Vec<_> = repeat_with(|| builder.consume_intermediate_mle()) @@ -242,7 +236,7 @@ impl ProverEvaluate for GroupByExec { .result_evaluate(builder.table_length(), alloc, accessor) }) .collect::>(); - // Compute filtered_columns and indexes + // Compute filtered_columns let AggregatedColumns { group_by_columns: group_by_result_columns, sum_columns: sum_result_columns, @@ -250,8 +244,6 @@ impl ProverEvaluate for GroupByExec { .. } = aggregate_columns(alloc, &group_by_columns, &sum_columns, &[], &[], selection) .expect("columns should be aggregatable"); - // 3. set indexes - builder.set_result_indexes(Indexes::Dense(0..(count_column.len() as u64))); let sum_result_columns_iter = sum_result_columns.iter().map(|col| Column::Scalar(col)); builder.request_post_result_challenges(2); group_by_result_columns @@ -287,7 +279,7 @@ impl ProverEvaluate for GroupByExec { .iter() .map(|aliased_expr| aliased_expr.expr.prover_evaluate(builder, alloc, accessor)) .collect::>(); - // 3. Compute filtered_columns and indexes + // 3. Compute filtered_columns let AggregatedColumns { group_by_columns: group_by_result_columns, sum_columns: sum_result_columns, @@ -331,7 +323,7 @@ fn verify_group_by( (g_in_evals, sum_in_evals, sel_in_eval): (Vec, Vec, C::Scalar), (g_out_evals, sum_out_evals, count_out_eval): (Vec, Vec, C::Scalar), ) -> Result<(), ProofError> { - let one_eval = builder.mle_evaluations.one_evaluation; + let one_eval = builder.mle_evaluations.input_one_evaluation; // g_in_fold = alpha + sum beta^j * g_in[j] let g_in_fold_eval = alpha * one_eval + fold_vals(beta, &g_in_evals); diff --git a/crates/proof-of-sql/src/sql/proof_plans/projection_exec.rs b/crates/proof-of-sql/src/sql/proof_plans/projection_exec.rs index b0551969c..0d2de3170 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/projection_exec.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/projection_exec.rs @@ -10,7 +10,7 @@ use crate::{ }, sql::{ proof::{ - CountBuilder, Indexes, ProofBuilder, ProofPlan, ProverEvaluate, ResultBuilder, + CountBuilder, ProofBuilder, ProofPlan, ProverEvaluate, ResultBuilder, VerificationBuilder, }, proof_exprs::{AliasedDynProofExpr, ProofExpr, TableExpr}, @@ -111,7 +111,6 @@ impl ProverEvaluate for ProjectionExec { .result_evaluate(builder.table_length(), alloc, accessor) }) .collect(); - builder.set_result_indexes(Indexes::Dense(0..(builder.table_length() as u64))); columns } From e8d92987659d5b5fe45039069609ed4bcdb95f2d Mon Sep 17 00:00:00 2001 From: Ian Joiner <14581281+iajoiner@users.noreply.github.com> Date: Thu, 10 Oct 2024 09:46:13 -0400 Subject: [PATCH 05/29] fix: get tests to run Note that I did not force push this time since the previous commit is worth preserving for the sake of rustc debugging see https://github.com/rust-lang/rust/issues/131499 --- crates/proof-of-sql/src/sql/proof/query_proof.rs | 4 ++-- crates/proof-of-sql/src/sql/proof/query_proof_test.rs | 4 ++-- crates/proof-of-sql/src/sql/proof/result_builder.rs | 3 --- crates/proof-of-sql/src/sql/proof_exprs/sign_expr_test.rs | 4 ++-- .../proof-of-sql/src/sql/proof_plans/filter_exec_test.rs | 8 ++++---- .../src/sql/proof_plans/projection_exec_test.rs | 6 +++--- 6 files changed, 13 insertions(+), 16 deletions(-) diff --git a/crates/proof-of-sql/src/sql/proof/query_proof.rs b/crates/proof-of-sql/src/sql/proof/query_proof.rs index 20a56ea27..4a0b9f0c9 100644 --- a/crates/proof-of-sql/src/sql/proof/query_proof.rs +++ b/crates/proof-of-sql/src/sql/proof/query_proof.rs @@ -6,7 +6,7 @@ use crate::{ base::{ bit::BitDistribution, commitment::{Commitment, CommitmentEvaluationProof}, - database::{Column, CommitmentAccessor, DataAccessor}, + database::{CommitmentAccessor, DataAccessor}, math::log2_up, polynomial::{compute_evaluation_vector, CompositePolynomialInfo}, proof::{Keccak256Transcript, ProofError, Transcript}, @@ -56,7 +56,7 @@ impl QueryProof { let mut result_builder = ResultBuilder::new(table_length); let result_cols = expr.result_evaluate(&mut result_builder, &alloc, accessor); let provable_result = - ProvableQueryResult::new(&result_builder.table_length(), &result_cols); + ProvableQueryResult::new(result_builder.table_length() as u64, &result_cols); // construct a transcript for the proof let mut transcript: Keccak256Transcript = diff --git a/crates/proof-of-sql/src/sql/proof/query_proof_test.rs b/crates/proof-of-sql/src/sql/proof/query_proof_test.rs index fa010e909..53cefa5a2 100644 --- a/crates/proof-of-sql/src/sql/proof/query_proof_test.rs +++ b/crates/proof-of-sql/src/sql/proof/query_proof_test.rs @@ -198,7 +198,7 @@ impl Default for SquareTestProofPlan { impl ProverEvaluate for SquareTestProofPlan { fn result_evaluate<'a>( &self, - builder: &mut ResultBuilder, + _builder: &mut ResultBuilder, alloc: &'a Bump, _accessor: &'a dyn DataAccessor, ) -> Vec> { @@ -378,7 +378,7 @@ impl Default for DoubleSquareTestProofPlan { impl ProverEvaluate for DoubleSquareTestProofPlan { fn result_evaluate<'a>( &self, - builder: &mut ResultBuilder, + _builder: &mut ResultBuilder, alloc: &'a Bump, _accessor: &'a dyn DataAccessor, ) -> Vec> { diff --git a/crates/proof-of-sql/src/sql/proof/result_builder.rs b/crates/proof-of-sql/src/sql/proof/result_builder.rs index e1eafa4de..6670b8ae1 100644 --- a/crates/proof-of-sql/src/sql/proof/result_builder.rs +++ b/crates/proof-of-sql/src/sql/proof/result_builder.rs @@ -1,5 +1,3 @@ -use super::Indexes; - /// Track the result created by a query pub struct ResultBuilder { table_length: usize, @@ -16,7 +14,6 @@ impl ResultBuilder { pub fn new(table_length: usize) -> Self { Self { table_length, - result_index_vector: Indexes::default(), num_post_result_challenges: 0, } } diff --git a/crates/proof-of-sql/src/sql/proof_exprs/sign_expr_test.rs b/crates/proof-of-sql/src/sql/proof_exprs/sign_expr_test.rs index f12900bcf..7e24b66ae 100644 --- a/crates/proof-of-sql/src/sql/proof_exprs/sign_expr_test.rs +++ b/crates/proof-of-sql/src/sql/proof_exprs/sign_expr_test.rs @@ -64,7 +64,7 @@ fn we_can_verify_a_constant_decomposition() { &sumcheck_random_scalars, &[], ); - let one_eval = sumcheck_evaluations.one_evaluation; + let one_eval = sumcheck_evaluations.input_one_evaluation; let mut builder: VerificationBuilder = VerificationBuilder::new(0, sumcheck_evaluations, &dists, &[], &[], &[], Vec::new()); @@ -88,7 +88,7 @@ fn verification_of_constant_data_fails_if_the_commitment_doesnt_match_the_bit_di &sumcheck_random_scalars, &[], ); - let one_eval = sumcheck_evaluations.one_evaluation; + let one_eval = sumcheck_evaluations.input_one_evaluation; let mut builder: VerificationBuilder = VerificationBuilder::new(0, sumcheck_evaluations, &dists, &[], &[], &[], Vec::new()); diff --git a/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test.rs b/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test.rs index 1b7d7ba96..3794d4d76 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test.rs @@ -204,7 +204,7 @@ fn we_can_get_an_empty_result_from_a_basic_filter_on_an_empty_table_using_result ), ]; let res: OwnedTable = - ProvableQueryResult::new(&builder.table_length(), &result_cols) + ProvableQueryResult::new(builder.table_length() as u64, &result_cols) .to_owned_table(fields) .unwrap(); let expected: OwnedTable = owned_table([ @@ -249,7 +249,7 @@ fn we_can_get_an_empty_result_from_a_basic_filter_using_result_evaluate() { ), ]; let res: OwnedTable = - ProvableQueryResult::new(&builder.table_length(), &result_cols) + ProvableQueryResult::new(builder.table_length() as u64, &result_cols) .to_owned_table(fields) .unwrap(); let expected: OwnedTable = owned_table([ @@ -282,7 +282,7 @@ fn we_can_get_no_columns_from_a_basic_filter_with_no_selected_columns_using_resu let result_cols = expr.result_evaluate(&mut builder, &alloc, &accessor); let fields = &[]; let res: OwnedTable = - ProvableQueryResult::new(&builder.table_length(), &result_cols) + ProvableQueryResult::new(builder.table_length() as u64, &result_cols) .to_owned_table(fields) .unwrap(); let expected = OwnedTable::try_new(IndexMap::default()).unwrap(); @@ -321,7 +321,7 @@ fn we_can_get_the_correct_result_from_a_basic_filter_using_result_evaluate() { ), ]; let res: OwnedTable = - ProvableQueryResult::new(&builder.table_length(), &result_cols) + ProvableQueryResult::new(builder.table_length() as u64, &result_cols) .to_owned_table(fields) .unwrap(); let expected: OwnedTable = owned_table([ diff --git a/crates/proof-of-sql/src/sql/proof_plans/projection_exec_test.rs b/crates/proof-of-sql/src/sql/proof_plans/projection_exec_test.rs index 604a3ca58..5245ae266 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/projection_exec_test.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/projection_exec_test.rs @@ -177,7 +177,7 @@ fn we_can_get_an_empty_result_from_a_basic_projection_on_an_empty_table_using_re ), ]; let res: OwnedTable = - ProvableQueryResult::new(&builder.result_index_vector, &result_cols) + ProvableQueryResult::new(builder.table_length() as u64, &result_cols) .to_owned_table(fields) .unwrap(); let expected: OwnedTable = owned_table([ @@ -208,7 +208,7 @@ fn we_can_get_no_columns_from_a_basic_projection_with_no_selected_columns_using_ let result_cols = expr.result_evaluate(&mut builder, &alloc, &accessor); let fields = &[]; let res: OwnedTable = - ProvableQueryResult::new(&builder.result_index_vector, &result_cols) + ProvableQueryResult::new(builder.table_length() as u64, &result_cols) .to_owned_table(fields) .unwrap(); let expected = OwnedTable::try_new(IndexMap::default()).unwrap(); @@ -252,7 +252,7 @@ fn we_can_get_the_correct_result_from_a_basic_projection_using_result_evaluate() ), ]; let res: OwnedTable = - ProvableQueryResult::new(&builder.result_index_vector, &result_cols) + ProvableQueryResult::new(builder.table_length() as u64, &result_cols) .to_owned_table(fields) .unwrap(); let expected: OwnedTable = owned_table([ From e0e324674858455c9bc8fb5b0bb155098b7893b6 Mon Sep 17 00:00:00 2001 From: Ian Joiner <14581281+iajoiner@users.noreply.github.com> Date: Thu, 10 Oct 2024 10:25:51 -0400 Subject: [PATCH 06/29] fix: fix table length issue --- crates/proof-of-sql/src/sql/proof/provable_query_result.rs | 3 +-- crates/proof-of-sql/src/sql/proof/result_builder.rs | 2 +- crates/proof-of-sql/src/sql/proof_plans/filter_exec.rs | 3 ++- .../src/sql/proof_plans/filter_exec_test_dishonest_prover.rs | 3 ++- crates/proof-of-sql/src/sql/proof_plans/group_by_exec.rs | 1 + 5 files changed, 7 insertions(+), 5 deletions(-) diff --git a/crates/proof-of-sql/src/sql/proof/provable_query_result.rs b/crates/proof-of-sql/src/sql/proof/provable_query_result.rs index 95c002fe1..3bdd5bab9 100644 --- a/crates/proof-of-sql/src/sql/proof/provable_query_result.rs +++ b/crates/proof-of-sql/src/sql/proof/provable_query_result.rs @@ -13,7 +13,7 @@ use serde::{Deserialize, Serialize}; #[derive(Debug, Default, Clone, Serialize, Deserialize)] pub struct ProvableQueryResult { num_columns: u64, - table_length: u64, + pub(crate) table_length: u64, data: Vec, } @@ -118,7 +118,6 @@ impl ProvableQueryResult { } res.push(val); } - if offset != self.data.len() { return Err(QueryError::MiscellaneousEvaluationError); } diff --git a/crates/proof-of-sql/src/sql/proof/result_builder.rs b/crates/proof-of-sql/src/sql/proof/result_builder.rs index 6670b8ae1..1969e4787 100644 --- a/crates/proof-of-sql/src/sql/proof/result_builder.rs +++ b/crates/proof-of-sql/src/sql/proof/result_builder.rs @@ -1,6 +1,6 @@ /// Track the result created by a query pub struct ResultBuilder { - table_length: usize, + pub(crate) table_length: usize, /// The number of challenges used in the proof. /// Specifically, these are the challenges that the verifier sends to diff --git a/crates/proof-of-sql/src/sql/proof_plans/filter_exec.rs b/crates/proof-of-sql/src/sql/proof_plans/filter_exec.rs index defcd2a36..081edd78c 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/filter_exec.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/filter_exec.rs @@ -172,7 +172,8 @@ impl ProverEvaluate for FilterExec { .collect(); // Compute filtered_columns and indexes - let (filtered_columns, _) = filter_columns(alloc, &columns, selection); + let (filtered_columns, result_len) = filter_columns(alloc, &columns, selection); + builder.table_length = result_len; builder.request_post_result_challenges(2); filtered_columns } diff --git a/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test_dishonest_prover.rs b/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test_dishonest_prover.rs index 0c1a03be3..803b09e55 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test_dishonest_prover.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test_dishonest_prover.rs @@ -59,7 +59,8 @@ impl ProverEvaluate for DishonestFilterExec { }) .collect(); // Compute filtered_columns - let (filtered_columns, _) = filter_columns(alloc, &columns, selection); + let (filtered_columns, result_len) = filter_columns(alloc, &columns, selection); + builder.table_length = result_len; let filtered_columns = tamper_column(alloc, filtered_columns); builder.request_post_result_challenges(2); filtered_columns diff --git a/crates/proof-of-sql/src/sql/proof_plans/group_by_exec.rs b/crates/proof-of-sql/src/sql/proof_plans/group_by_exec.rs index ba717a131..9da923747 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/group_by_exec.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/group_by_exec.rs @@ -245,6 +245,7 @@ impl ProverEvaluate for GroupByExec { } = aggregate_columns(alloc, &group_by_columns, &sum_columns, &[], &[], selection) .expect("columns should be aggregatable"); let sum_result_columns_iter = sum_result_columns.iter().map(|col| Column::Scalar(col)); + builder.table_length = count_column.len(); builder.request_post_result_challenges(2); group_by_result_columns .into_iter() From 7724a7205ebac12d09cd953696b295ea48692d06 Mon Sep 17 00:00:00 2001 From: Ian Joiner <14581281+iajoiner@users.noreply.github.com> Date: Thu, 10 Oct 2024 10:29:07 -0400 Subject: [PATCH 07/29] fix: address review --- crates/proof-of-sql/src/sql/proof/provable_query_result.rs | 7 +++++++ .../proof-of-sql/src/sql/proof/provable_result_column.rs | 3 ++- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/crates/proof-of-sql/src/sql/proof/provable_query_result.rs b/crates/proof-of-sql/src/sql/proof/provable_query_result.rs index 3bdd5bab9..7651f792d 100644 --- a/crates/proof-of-sql/src/sql/proof/provable_query_result.rs +++ b/crates/proof-of-sql/src/sql/proof/provable_query_result.rs @@ -52,8 +52,15 @@ impl ProvableQueryResult { } /// Form intermediate query result from index rows and result columns + /// # Panics + /// + /// Will panic if `table_length` is somehow larger than the length of some column + /// which should never happen. #[must_use] pub fn new<'a, S: Scalar>(table_length: u64, columns: &'a [Column<'a, S>]) -> Self { + assert!(columns + .iter() + .all(|column| table_length <= column.len() as u64)); let mut sz = 0; for col in columns { sz += col.num_bytes(table_length); diff --git a/crates/proof-of-sql/src/sql/proof/provable_result_column.rs b/crates/proof-of-sql/src/sql/proof/provable_result_column.rs index 6309f313a..c804e4738 100644 --- a/crates/proof-of-sql/src/sql/proof/provable_result_column.rs +++ b/crates/proof-of-sql/src/sql/proof/provable_result_column.rs @@ -14,7 +14,8 @@ pub trait ProvableResultColumn { impl<'a, T: ProvableResultElement<'a>> ProvableResultColumn for &[T] { fn num_bytes(&self, length: u64) -> usize { - (0..length as usize).map(|i| self[i].required_bytes()).sum() + assert_eq!(self.len() as u64, length); + self.iter().map(ProvableResultElement::required_bytes).sum() } fn write(&self, out: &mut [u8], length: u64) -> usize { From 70faf499a78bd596c0b1150d6ae800527fe6e573 Mon Sep 17 00:00:00 2001 From: Ian Joiner <14581281+iajoiner@users.noreply.github.com> Date: Thu, 10 Oct 2024 11:56:28 -0400 Subject: [PATCH 08/29] fix: fix a test --- .../src/sql/proof/provable_query_result_test.rs | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/crates/proof-of-sql/src/sql/proof/provable_query_result_test.rs b/crates/proof-of-sql/src/sql/proof/provable_query_result_test.rs index 693e7039e..3540de040 100644 --- a/crates/proof-of-sql/src/sql/proof/provable_query_result_test.rs +++ b/crates/proof-of-sql/src/sql/proof/provable_query_result_test.rs @@ -57,12 +57,9 @@ fn we_can_evaluate_result_columns_as_mles() { #[test] fn we_can_evaluate_result_columns_with_no_rows() { - let cols: [Column; 1] = [Column::BigInt(&[10, 11, 12])]; + let cols: [Column; 1] = [Column::BigInt(&[0; 0])]; let res = ProvableQueryResult::new(0, &cols); - let evaluation_point = [ - Curve25519Scalar::from(10u64), - Curve25519Scalar::from(100u64), - ]; + let evaluation_point = []; let mut evaluation_vec = [Curve25519Scalar::ZERO; 0]; compute_evaluation_vector(&mut evaluation_vec, &evaluation_point); let column_fields = From 30ac6024e06b96a001c25eff67fb5ed4eaf6c711 Mon Sep 17 00:00:00 2001 From: Ian Joiner <14581281+iajoiner@users.noreply.github.com> Date: Thu, 10 Oct 2024 13:26:01 -0400 Subject: [PATCH 09/29] fix: replace <= with == in `ProvableQueryResult::new` sanity check --- crates/proof-of-sql/src/sql/proof/provable_query_result.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/proof-of-sql/src/sql/proof/provable_query_result.rs b/crates/proof-of-sql/src/sql/proof/provable_query_result.rs index 7651f792d..9e4e7ba14 100644 --- a/crates/proof-of-sql/src/sql/proof/provable_query_result.rs +++ b/crates/proof-of-sql/src/sql/proof/provable_query_result.rs @@ -60,7 +60,7 @@ impl ProvableQueryResult { pub fn new<'a, S: Scalar>(table_length: u64, columns: &'a [Column<'a, S>]) -> Self { assert!(columns .iter() - .all(|column| table_length <= column.len() as u64)); + .all(|column| table_length == column.len() as u64)); let mut sz = 0; for col in columns { sz += col.num_bytes(table_length); From 54ed6aa539978cee69a2e13909f3f43fca2fa81d Mon Sep 17 00:00:00 2001 From: Ian Joiner <14581281+iajoiner@users.noreply.github.com> Date: Thu, 10 Oct 2024 14:12:10 -0400 Subject: [PATCH 10/29] refactor!: add `ResultBuilder::set_table_length` --- crates/proof-of-sql/src/sql/proof/query_proof.rs | 2 +- crates/proof-of-sql/src/sql/proof/result_builder.rs | 13 +++++++++---- .../proof-of-sql/src/sql/proof_plans/filter_exec.rs | 2 +- .../src/sql/proof_plans/filter_exec_test.rs | 8 ++++---- .../filter_exec_test_dishonest_prover.rs | 2 +- .../src/sql/proof_plans/group_by_exec.rs | 2 +- .../src/sql/proof_plans/projection_exec_test.rs | 6 +++--- 7 files changed, 20 insertions(+), 15 deletions(-) diff --git a/crates/proof-of-sql/src/sql/proof/query_proof.rs b/crates/proof-of-sql/src/sql/proof/query_proof.rs index 4a0b9f0c9..ff78cf253 100644 --- a/crates/proof-of-sql/src/sql/proof/query_proof.rs +++ b/crates/proof-of-sql/src/sql/proof/query_proof.rs @@ -53,7 +53,7 @@ impl QueryProof { assert!(num_sumcheck_variables > 0); let alloc = Bump::new(); - let mut result_builder = ResultBuilder::new(table_length); + let mut result_builder = ResultBuilder::new(); let result_cols = expr.result_evaluate(&mut result_builder, &alloc, accessor); let provable_result = ProvableQueryResult::new(result_builder.table_length() as u64, &result_cols); diff --git a/crates/proof-of-sql/src/sql/proof/result_builder.rs b/crates/proof-of-sql/src/sql/proof/result_builder.rs index 1969e4787..9b17f045f 100644 --- a/crates/proof-of-sql/src/sql/proof/result_builder.rs +++ b/crates/proof-of-sql/src/sql/proof/result_builder.rs @@ -1,6 +1,6 @@ /// Track the result created by a query pub struct ResultBuilder { - pub(crate) table_length: usize, + table_length: usize, /// The number of challenges used in the proof. /// Specifically, these are the challenges that the verifier sends to @@ -11,18 +11,23 @@ pub struct ResultBuilder { impl ResultBuilder { /// Create a new result builder for a table with the given length. For multi table queries, this will likely need to change. - pub fn new(table_length: usize) -> Self { + pub fn new() -> Self { Self { - table_length, + table_length: 0, num_post_result_challenges: 0, } } - /// Get the length of the table + /// Get the length of the output table pub fn table_length(&self) -> usize { self.table_length } + /// Set the length of the output table + pub fn set_table_length(&mut self, table_length: usize) { + self.table_length = table_length; + } + /// The number of challenges used in the proof. /// Specifically, these are the challenges that the verifier sends to /// the prover after the prover sends the result, but before the prover diff --git a/crates/proof-of-sql/src/sql/proof_plans/filter_exec.rs b/crates/proof-of-sql/src/sql/proof_plans/filter_exec.rs index 081edd78c..ca89e45f0 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/filter_exec.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/filter_exec.rs @@ -173,7 +173,7 @@ impl ProverEvaluate for FilterExec { // Compute filtered_columns and indexes let (filtered_columns, result_len) = filter_columns(alloc, &columns, selection); - builder.table_length = result_len; + builder.set_table_length(result_len); builder.request_post_result_challenges(2); filtered_columns } diff --git a/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test.rs b/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test.rs index 3794d4d76..a69b826e6 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test.rs @@ -192,7 +192,7 @@ fn we_can_get_an_empty_result_from_a_basic_filter_on_an_empty_table_using_result where_clause, ); let alloc = Bump::new(); - let mut builder = ResultBuilder::new(0); + let mut builder = ResultBuilder::new(); let result_cols = expr.result_evaluate(&mut builder, &alloc, &accessor); let fields = &[ ColumnField::new("b".parse().unwrap(), ColumnType::BigInt), @@ -237,7 +237,7 @@ fn we_can_get_an_empty_result_from_a_basic_filter_using_result_evaluate() { where_clause, ); let alloc = Bump::new(); - let mut builder = ResultBuilder::new(5); + let mut builder = ResultBuilder::new(); let result_cols = expr.result_evaluate(&mut builder, &alloc, &accessor); let fields = &[ ColumnField::new("b".parse().unwrap(), ColumnType::BigInt), @@ -278,7 +278,7 @@ fn we_can_get_no_columns_from_a_basic_filter_with_no_selected_columns_using_resu equal(column(t, "a", &accessor), const_int128(5)); let expr = filter(cols_expr_plan(t, &[], &accessor), tab(t), where_clause); let alloc = Bump::new(); - let mut builder = ResultBuilder::new(5); + let mut builder = ResultBuilder::new(); let result_cols = expr.result_evaluate(&mut builder, &alloc, &accessor); let fields = &[]; let res: OwnedTable = @@ -309,7 +309,7 @@ fn we_can_get_the_correct_result_from_a_basic_filter_using_result_evaluate() { where_clause, ); let alloc = Bump::new(); - let mut builder = ResultBuilder::new(5); + let mut builder = ResultBuilder::new(); let result_cols = expr.result_evaluate(&mut builder, &alloc, &accessor); let fields = &[ ColumnField::new("b".parse().unwrap(), ColumnType::BigInt), diff --git a/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test_dishonest_prover.rs b/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test_dishonest_prover.rs index 803b09e55..dc13ef03e 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test_dishonest_prover.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test_dishonest_prover.rs @@ -60,7 +60,7 @@ impl ProverEvaluate for DishonestFilterExec { .collect(); // Compute filtered_columns let (filtered_columns, result_len) = filter_columns(alloc, &columns, selection); - builder.table_length = result_len; + builder.set_table_length(result_len); let filtered_columns = tamper_column(alloc, filtered_columns); builder.request_post_result_challenges(2); filtered_columns diff --git a/crates/proof-of-sql/src/sql/proof_plans/group_by_exec.rs b/crates/proof-of-sql/src/sql/proof_plans/group_by_exec.rs index 9da923747..0f7ddc535 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/group_by_exec.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/group_by_exec.rs @@ -245,7 +245,7 @@ impl ProverEvaluate for GroupByExec { } = aggregate_columns(alloc, &group_by_columns, &sum_columns, &[], &[], selection) .expect("columns should be aggregatable"); let sum_result_columns_iter = sum_result_columns.iter().map(|col| Column::Scalar(col)); - builder.table_length = count_column.len(); + builder.set_table_length(count_column.len()); builder.request_post_result_challenges(2); group_by_result_columns .into_iter() diff --git a/crates/proof-of-sql/src/sql/proof_plans/projection_exec_test.rs b/crates/proof-of-sql/src/sql/proof_plans/projection_exec_test.rs index 5245ae266..3198f714c 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/projection_exec_test.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/projection_exec_test.rs @@ -165,7 +165,7 @@ fn we_can_get_an_empty_result_from_a_basic_projection_on_an_empty_table_using_re let expr: DynProofPlan = projection(cols_expr_plan(t, &["b", "c", "d", "e"], &accessor), tab(t)); let alloc = Bump::new(); - let mut builder = ResultBuilder::new(0); + let mut builder = ResultBuilder::new(); let result_cols = expr.result_evaluate(&mut builder, &alloc, &accessor); let fields = &[ ColumnField::new("b".parse().unwrap(), ColumnType::BigInt), @@ -204,7 +204,7 @@ fn we_can_get_no_columns_from_a_basic_projection_with_no_selected_columns_using_ accessor.add_table(t, data, 0); let expr: DynProofPlan = projection(cols_expr_plan(t, &[], &accessor), tab(t)); let alloc = Bump::new(); - let mut builder = ResultBuilder::new(5); + let mut builder = ResultBuilder::new(); let result_cols = expr.result_evaluate(&mut builder, &alloc, &accessor); let fields = &[]; let res: OwnedTable = @@ -240,7 +240,7 @@ fn we_can_get_the_correct_result_from_a_basic_projection_using_result_evaluate() tab(t), ); let alloc = Bump::new(); - let mut builder = ResultBuilder::new(5); + let mut builder = ResultBuilder::new(); let result_cols = expr.result_evaluate(&mut builder, &alloc, &accessor); let fields = &[ ColumnField::new("b".parse().unwrap(), ColumnType::BigInt), From 291a53f93bf4c70fc5b650650ef4b7da743c7ce3 Mon Sep 17 00:00:00 2001 From: Ian Joiner <14581281+iajoiner@users.noreply.github.com> Date: Thu, 10 Oct 2024 16:47:29 -0400 Subject: [PATCH 11/29] fix!: address reviews --- crates/proof-of-sql/src/sql/proof/query_proof.rs | 2 +- .../proof-of-sql/src/sql/proof/query_proof_test.rs | 11 ++++++++--- crates/proof-of-sql/src/sql/proof/result_builder.rs | 12 ++++++------ .../src/sql/proof/verifiable_query_result_test.rs | 3 ++- .../proof-of-sql/src/sql/proof_plans/filter_exec.rs | 7 ++++--- .../src/sql/proof_plans/filter_exec_test.rs | 8 ++++---- .../proof_plans/filter_exec_test_dishonest_prover.rs | 7 ++++--- .../src/sql/proof_plans/group_by_exec.rs | 9 +++++---- .../src/sql/proof_plans/projection_exec.rs | 5 ++++- .../src/sql/proof_plans/projection_exec_test.rs | 6 +++--- 10 files changed, 41 insertions(+), 29 deletions(-) diff --git a/crates/proof-of-sql/src/sql/proof/query_proof.rs b/crates/proof-of-sql/src/sql/proof/query_proof.rs index ff78cf253..afa488d7c 100644 --- a/crates/proof-of-sql/src/sql/proof/query_proof.rs +++ b/crates/proof-of-sql/src/sql/proof/query_proof.rs @@ -56,7 +56,7 @@ impl QueryProof { let mut result_builder = ResultBuilder::new(); let result_cols = expr.result_evaluate(&mut result_builder, &alloc, accessor); let provable_result = - ProvableQueryResult::new(result_builder.table_length() as u64, &result_cols); + ProvableQueryResult::new(result_builder.result_table_length() as u64, &result_cols); // construct a transcript for the proof let mut transcript: Keccak256Transcript = diff --git a/crates/proof-of-sql/src/sql/proof/query_proof_test.rs b/crates/proof-of-sql/src/sql/proof/query_proof_test.rs index 53cefa5a2..3817c3e19 100644 --- a/crates/proof-of-sql/src/sql/proof/query_proof_test.rs +++ b/crates/proof-of-sql/src/sql/proof/query_proof_test.rs @@ -47,7 +47,9 @@ impl ProverEvaluate for TrivialTestProofPlan { alloc: &'a Bump, _accessor: &'a dyn DataAccessor, ) -> Vec> { - let col = alloc.alloc_slice_fill_copy(builder.table_length(), self.column_fill_value); + let input_length = self.length; + let col = alloc.alloc_slice_fill_copy(input_length, self.column_fill_value); + builder.set_result_table_length(input_length); vec![Column::BigInt(col)] } @@ -198,11 +200,12 @@ impl Default for SquareTestProofPlan { impl ProverEvaluate for SquareTestProofPlan { fn result_evaluate<'a>( &self, - _builder: &mut ResultBuilder, + builder: &mut ResultBuilder, alloc: &'a Bump, _accessor: &'a dyn DataAccessor, ) -> Vec> { let res: &[_] = alloc.alloc_slice_copy(&self.res); + builder.set_result_table_length(2); vec![Column::BigInt(res)] } @@ -378,11 +381,12 @@ impl Default for DoubleSquareTestProofPlan { impl ProverEvaluate for DoubleSquareTestProofPlan { fn result_evaluate<'a>( &self, - _builder: &mut ResultBuilder, + builder: &mut ResultBuilder, alloc: &'a Bump, _accessor: &'a dyn DataAccessor, ) -> Vec> { let res: &[_] = alloc.alloc_slice_copy(&self.res); + builder.set_result_table_length(2); vec![Column::BigInt(res)] } @@ -593,6 +597,7 @@ impl ProverEvaluate for ChallengeTestProofPlan { _accessor: &'a dyn DataAccessor, ) -> Vec> { builder.request_post_result_challenges(2); + builder.set_result_table_length(2); vec![Column::BigInt(&[9, 25])] } diff --git a/crates/proof-of-sql/src/sql/proof/result_builder.rs b/crates/proof-of-sql/src/sql/proof/result_builder.rs index 9b17f045f..34c19438e 100644 --- a/crates/proof-of-sql/src/sql/proof/result_builder.rs +++ b/crates/proof-of-sql/src/sql/proof/result_builder.rs @@ -1,6 +1,6 @@ /// Track the result created by a query pub struct ResultBuilder { - table_length: usize, + result_table_length: usize, /// The number of challenges used in the proof. /// Specifically, these are the challenges that the verifier sends to @@ -13,19 +13,19 @@ impl ResultBuilder { /// Create a new result builder for a table with the given length. For multi table queries, this will likely need to change. pub fn new() -> Self { Self { - table_length: 0, + result_table_length: 0, num_post_result_challenges: 0, } } /// Get the length of the output table - pub fn table_length(&self) -> usize { - self.table_length + pub fn result_table_length(&self) -> usize { + self.result_table_length } /// Set the length of the output table - pub fn set_table_length(&mut self, table_length: usize) { - self.table_length = table_length; + pub fn set_result_table_length(&mut self, result_table_length: usize) { + self.result_table_length = result_table_length; } /// The number of challenges used in the proof. diff --git a/crates/proof-of-sql/src/sql/proof/verifiable_query_result_test.rs b/crates/proof-of-sql/src/sql/proof/verifiable_query_result_test.rs index 02efaf453..505984fb8 100644 --- a/crates/proof-of-sql/src/sql/proof/verifiable_query_result_test.rs +++ b/crates/proof-of-sql/src/sql/proof/verifiable_query_result_test.rs @@ -27,12 +27,13 @@ pub(super) struct EmptyTestQueryExpr { impl ProverEvaluate for EmptyTestQueryExpr { fn result_evaluate<'a>( &self, - _builder: &mut ResultBuilder, + builder: &mut ResultBuilder, alloc: &'a Bump, _accessor: &'a dyn DataAccessor, ) -> Vec> { let zeros = vec![0; self.length]; let res: &[_] = alloc.alloc_slice_copy(&zeros); + builder.set_result_table_length(self.length); vec![Column::BigInt(res); self.columns] } fn prover_evaluate<'a>( diff --git a/crates/proof-of-sql/src/sql/proof_plans/filter_exec.rs b/crates/proof-of-sql/src/sql/proof_plans/filter_exec.rs index ca89e45f0..2408a8303 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/filter_exec.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/filter_exec.rs @@ -152,10 +152,11 @@ impl ProverEvaluate for FilterExec { alloc: &'a Bump, accessor: &'a dyn DataAccessor, ) -> Vec> { + let input_length = accessor.get_length(self.table.table_ref); // 1. selection let selection_column: Column<'a, C::Scalar> = self.where_clause - .result_evaluate(builder.table_length(), alloc, accessor); + .result_evaluate(input_length, alloc, accessor); let selection = selection_column .as_boolean() .expect("selection is not boolean"); @@ -167,13 +168,13 @@ impl ProverEvaluate for FilterExec { .map(|aliased_expr| { aliased_expr .expr - .result_evaluate(builder.table_length(), alloc, accessor) + .result_evaluate(input_length, alloc, accessor) }) .collect(); // Compute filtered_columns and indexes let (filtered_columns, result_len) = filter_columns(alloc, &columns, selection); - builder.set_table_length(result_len); + builder.set_result_table_length(result_len); builder.request_post_result_challenges(2); filtered_columns } diff --git a/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test.rs b/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test.rs index a69b826e6..f9a9363c3 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test.rs @@ -204,7 +204,7 @@ fn we_can_get_an_empty_result_from_a_basic_filter_on_an_empty_table_using_result ), ]; let res: OwnedTable = - ProvableQueryResult::new(builder.table_length() as u64, &result_cols) + ProvableQueryResult::new(builder.result_table_length() as u64, &result_cols) .to_owned_table(fields) .unwrap(); let expected: OwnedTable = owned_table([ @@ -249,7 +249,7 @@ fn we_can_get_an_empty_result_from_a_basic_filter_using_result_evaluate() { ), ]; let res: OwnedTable = - ProvableQueryResult::new(builder.table_length() as u64, &result_cols) + ProvableQueryResult::new(builder.result_table_length() as u64, &result_cols) .to_owned_table(fields) .unwrap(); let expected: OwnedTable = owned_table([ @@ -282,7 +282,7 @@ fn we_can_get_no_columns_from_a_basic_filter_with_no_selected_columns_using_resu let result_cols = expr.result_evaluate(&mut builder, &alloc, &accessor); let fields = &[]; let res: OwnedTable = - ProvableQueryResult::new(builder.table_length() as u64, &result_cols) + ProvableQueryResult::new(builder.result_table_length() as u64, &result_cols) .to_owned_table(fields) .unwrap(); let expected = OwnedTable::try_new(IndexMap::default()).unwrap(); @@ -321,7 +321,7 @@ fn we_can_get_the_correct_result_from_a_basic_filter_using_result_evaluate() { ), ]; let res: OwnedTable = - ProvableQueryResult::new(builder.table_length() as u64, &result_cols) + ProvableQueryResult::new(builder.result_table_length() as u64, &result_cols) .to_owned_table(fields) .unwrap(); let expected: OwnedTable = owned_table([ diff --git a/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test_dishonest_prover.rs b/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test_dishonest_prover.rs index dc13ef03e..efcb0695c 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test_dishonest_prover.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test_dishonest_prover.rs @@ -41,10 +41,11 @@ impl ProverEvaluate for DishonestFilterExec { alloc: &'a Bump, accessor: &'a dyn DataAccessor, ) -> Vec> { + let input_length = accessor.get_length(self.table.table_ref); // 1. selection let selection_column: Column<'a, Curve25519Scalar> = self.where_clause - .result_evaluate(builder.table_length(), alloc, accessor); + .result_evaluate(input_length, alloc, accessor); let selection = selection_column .as_boolean() .expect("selection is not boolean"); @@ -55,12 +56,12 @@ impl ProverEvaluate for DishonestFilterExec { .map(|aliased_expr| { aliased_expr .expr - .result_evaluate(builder.table_length(), alloc, accessor) + .result_evaluate(input_length, alloc, accessor) }) .collect(); // Compute filtered_columns let (filtered_columns, result_len) = filter_columns(alloc, &columns, selection); - builder.set_table_length(result_len); + builder.set_result_table_length(result_len); let filtered_columns = tamper_column(alloc, filtered_columns); builder.request_post_result_challenges(2); filtered_columns diff --git a/crates/proof-of-sql/src/sql/proof_plans/group_by_exec.rs b/crates/proof-of-sql/src/sql/proof_plans/group_by_exec.rs index 0f7ddc535..35d324516 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/group_by_exec.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/group_by_exec.rs @@ -212,10 +212,11 @@ impl ProverEvaluate for GroupByExec { alloc: &'a Bump, accessor: &'a dyn DataAccessor, ) -> Vec> { + let input_length = accessor.get_length(self.table.table_ref); // 1. selection let selection_column: Column<'a, C::Scalar> = self.where_clause - .result_evaluate(builder.table_length(), alloc, accessor); + .result_evaluate(input_length, alloc, accessor); let selection = selection_column .as_boolean() @@ -225,7 +226,7 @@ impl ProverEvaluate for GroupByExec { let group_by_columns = self .group_by_exprs .iter() - .map(|expr| expr.result_evaluate(builder.table_length(), alloc, accessor)) + .map(|expr| expr.result_evaluate(input_length, alloc, accessor)) .collect::>(); let sum_columns = self .sum_expr @@ -233,7 +234,7 @@ impl ProverEvaluate for GroupByExec { .map(|aliased_expr| { aliased_expr .expr - .result_evaluate(builder.table_length(), alloc, accessor) + .result_evaluate(input_length, alloc, accessor) }) .collect::>(); // Compute filtered_columns @@ -245,7 +246,7 @@ impl ProverEvaluate for GroupByExec { } = aggregate_columns(alloc, &group_by_columns, &sum_columns, &[], &[], selection) .expect("columns should be aggregatable"); let sum_result_columns_iter = sum_result_columns.iter().map(|col| Column::Scalar(col)); - builder.set_table_length(count_column.len()); + builder.set_result_table_length(count_column.len()); builder.request_post_result_challenges(2); group_by_result_columns .into_iter() diff --git a/crates/proof-of-sql/src/sql/proof_plans/projection_exec.rs b/crates/proof-of-sql/src/sql/proof_plans/projection_exec.rs index 0d2de3170..001e4b9f1 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/projection_exec.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/projection_exec.rs @@ -102,15 +102,18 @@ impl ProverEvaluate for ProjectionExec { alloc: &'a Bump, accessor: &'a dyn DataAccessor, ) -> Vec> { + let input_length = accessor.get_length(self.table.table_ref); let columns: Vec<_> = self .aliased_results .iter() .map(|aliased_expr| { aliased_expr .expr - .result_evaluate(builder.table_length(), alloc, accessor) + .result_evaluate(input_length, alloc, accessor) }) .collect(); + // For projection, the result table length is the same as the input table length + builder.set_result_table_length(input_length); columns } diff --git a/crates/proof-of-sql/src/sql/proof_plans/projection_exec_test.rs b/crates/proof-of-sql/src/sql/proof_plans/projection_exec_test.rs index 3198f714c..53914c11c 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/projection_exec_test.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/projection_exec_test.rs @@ -177,7 +177,7 @@ fn we_can_get_an_empty_result_from_a_basic_projection_on_an_empty_table_using_re ), ]; let res: OwnedTable = - ProvableQueryResult::new(builder.table_length() as u64, &result_cols) + ProvableQueryResult::new(builder.result_table_length() as u64, &result_cols) .to_owned_table(fields) .unwrap(); let expected: OwnedTable = owned_table([ @@ -208,7 +208,7 @@ fn we_can_get_no_columns_from_a_basic_projection_with_no_selected_columns_using_ let result_cols = expr.result_evaluate(&mut builder, &alloc, &accessor); let fields = &[]; let res: OwnedTable = - ProvableQueryResult::new(builder.table_length() as u64, &result_cols) + ProvableQueryResult::new(builder.result_table_length() as u64, &result_cols) .to_owned_table(fields) .unwrap(); let expected = OwnedTable::try_new(IndexMap::default()).unwrap(); @@ -252,7 +252,7 @@ fn we_can_get_the_correct_result_from_a_basic_projection_using_result_evaluate() ), ]; let res: OwnedTable = - ProvableQueryResult::new(builder.table_length() as u64, &result_cols) + ProvableQueryResult::new(builder.result_table_length() as u64, &result_cols) .to_owned_table(fields) .unwrap(); let expected: OwnedTable = owned_table([ From 5fd0ceb6e9e996c7e8cbafd2bb88da9f769eb532 Mon Sep 17 00:00:00 2001 From: Ian Joiner <14581281+iajoiner@users.noreply.github.com> Date: Thu, 10 Oct 2024 16:55:09 -0400 Subject: [PATCH 12/29] fix: happy clippy (new_without_default) --- crates/proof-of-sql/src/sql/proof/result_builder.rs | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/crates/proof-of-sql/src/sql/proof/result_builder.rs b/crates/proof-of-sql/src/sql/proof/result_builder.rs index 34c19438e..4f296c46f 100644 --- a/crates/proof-of-sql/src/sql/proof/result_builder.rs +++ b/crates/proof-of-sql/src/sql/proof/result_builder.rs @@ -9,6 +9,12 @@ pub struct ResultBuilder { num_post_result_challenges: usize, } +impl Default for ResultBuilder { + fn default() -> Self { + Self::new() + } +} + impl ResultBuilder { /// Create a new result builder for a table with the given length. For multi table queries, this will likely need to change. pub fn new() -> Self { From 1e33fcc00d5b9f577955e3a1e96a270d1a3c91a6 Mon Sep 17 00:00:00 2001 From: Abinand P Date: Fri, 11 Oct 2024 11:03:23 +0530 Subject: [PATCH 13/29] feat: updated the examples to use the `DynamicDoryEvaluationProof` (#253) # Rationale for this change Our example code leverages the `InnerProductProof` commitment scheme. While this is still valid, it is more difficult as an entry point to the code because it requires Linux (and GPU by default). # What changes are included in this PR? Modified the examples completely to depend and run on `DynamicDoryEvaluationProof` . # Are these changes tested? Yes, the existing test are enough with the same values --------- Signed-off-by: Abinand P --- crates/proof-of-sql/Cargo.toml | 4 +-- .../proof-of-sql/examples/hello_world/main.rs | 27 ++++++++++++---- crates/proof-of-sql/examples/posql_db/main.rs | 32 ++++++++++++------- .../examples/posql_db/run_example.sh | 8 ++--- 4 files changed, 47 insertions(+), 24 deletions(-) diff --git a/crates/proof-of-sql/Cargo.toml b/crates/proof-of-sql/Cargo.toml index f6d1e5804..4cbdd894e 100644 --- a/crates/proof-of-sql/Cargo.toml +++ b/crates/proof-of-sql/Cargo.toml @@ -83,11 +83,11 @@ workspace = true [[example]] name = "hello_world" -required-features = [ "blitzar", "test" ] +required-features = ["test"] [[example]] name = "posql_db" -required-features = [ "arrow", "blitzar" ] +required-features = [ "arrow" ] [[bench]] name = "posql_benches" diff --git a/crates/proof-of-sql/examples/hello_world/main.rs b/crates/proof-of-sql/examples/hello_world/main.rs index 26d025707..d8bf9633e 100644 --- a/crates/proof-of-sql/examples/hello_world/main.rs +++ b/crates/proof-of-sql/examples/hello_world/main.rs @@ -1,11 +1,14 @@ #![doc = include_str!("README.md")] - -use blitzar::{compute::init_backend, proof::InnerProductProof}; +use ark_std::test_rng; +use blitzar::compute::init_backend; use proof_of_sql::{ base::database::{ owned_table_utility::{bigint, owned_table, varchar}, OwnedTableTestAccessor, TestAccessor, }, + proof_primitive::dory::{ + DynamicDoryEvaluationProof, ProverSetup, PublicParameters, VerifierSetup, + }, sql::{parse::QueryExpr, proof::QueryProof}, }; use std::{ @@ -42,7 +45,11 @@ fn main() { init_backend(); end_timer(timer); let timer = start_timer("Loading data"); - let mut accessor = OwnedTableTestAccessor::::new_empty_with_setup(()); + let public_parameters = PublicParameters::test_rand(5, &mut test_rng()); + let prover_setup = ProverSetup::from(&public_parameters); + let verifier_setup = VerifierSetup::from(&public_parameters); + let mut accessor = + OwnedTableTestAccessor::::new_empty_with_setup(&prover_setup); accessor.add_table( "sxt.table".parse().unwrap(), owned_table([ @@ -61,11 +68,19 @@ fn main() { .unwrap(); end_timer(timer); let timer = start_timer("Generating Proof"); - let (proof, serialized_result) = - QueryProof::::new(query.proof_expr(), &accessor, &()); + let (proof, serialized_result) = QueryProof::::new( + query.proof_expr(), + &accessor, + &&prover_setup, + ); end_timer(timer); let timer = start_timer("Verifying Proof"); - let result = proof.verify(query.proof_expr(), &accessor, &serialized_result, &()); + let result = proof.verify( + query.proof_expr(), + &accessor, + &serialized_result, + &&verifier_setup, + ); end_timer(timer); match result { Ok(result) => { diff --git a/crates/proof-of-sql/examples/posql_db/main.rs b/crates/proof-of-sql/examples/posql_db/main.rs index 9ec7f2bc0..a9138ae11 100644 --- a/crates/proof-of-sql/examples/posql_db/main.rs +++ b/crates/proof-of-sql/examples/posql_db/main.rs @@ -9,17 +9,19 @@ use arrow::{ datatypes::{DataType, Field, Schema}, record_batch::RecordBatch, }; -use blitzar::proof::InnerProductProof; use clap::{arg, Parser, Subcommand, ValueEnum}; use commit_accessor::CommitAccessor; use csv_accessor::{read_record_batch_from_csv, CsvDataAccessor}; -use curve25519_dalek::RistrettoPoint; use itertools::Itertools; use proof_of_sql::{ base::{ commitment::TableCommitment, database::{SchemaAccessor, TableRef}, }, + proof_primitive::dory::{ + DynamicDoryCommitment, DynamicDoryEvaluationProof, ProverSetup, PublicParameters, + VerifierSetup, + }, sql::{parse::QueryExpr, proof::VerifiableQueryResult}, }; use proof_of_sql_parser::{Identifier, SelectStatement}; @@ -149,6 +151,11 @@ fn main() { println!("Warming up GPU..."); blitzar::compute::init_backend(); println!("Done."); + + let mut rng = ::from_seed([0u8; 32]); + let public_parameters = PublicParameters::rand(5, &mut rng); + let prover_setup = ProverSetup::from(&public_parameters); + let verifier_setup = VerifierSetup::from(&public_parameters); match args.command { Commands::Create { table, @@ -156,7 +163,7 @@ fn main() { data_types, } => { let commit_accessor = - CommitAccessor::::new(PathBuf::from(args.path.clone())); + CommitAccessor::::new(PathBuf::from(args.path.clone())); let csv_accessor = CsvDataAccessor::new(PathBuf::from(args.path)); let schema = Schema::new( columns @@ -166,7 +173,7 @@ fn main() { .collect::>(), ); let batch = RecordBatch::new_empty(Arc::new(schema)); - let table_commitment = TableCommitment::try_from_record_batch(&batch, &()) + let table_commitment = TableCommitment::try_from_record_batch(&batch, &&prover_setup) .expect("Failed to create table commitment."); commit_accessor .write_commit(&table, &table_commitment) @@ -180,7 +187,7 @@ fn main() { file: file_path, } => { let mut commit_accessor = - CommitAccessor::::new(PathBuf::from(args.path.clone())); + CommitAccessor::::new(PathBuf::from(args.path.clone())); let csv_accessor = CsvDataAccessor::new(PathBuf::from(args.path)); commit_accessor .load_commit(table_name) @@ -200,7 +207,7 @@ fn main() { .expect("Failed to write batch"); let timer = start_timer("Updating Commitment"); table_commitment - .try_append_record_batch(&append_batch, &()) + .try_append_record_batch(&append_batch, &&prover_setup) .expect("Failed to append batch"); end_timer(timer); commit_accessor @@ -209,7 +216,7 @@ fn main() { } Commands::Prove { query, file } => { let mut commit_accessor = - CommitAccessor::::new(PathBuf::from(args.path.clone())); + CommitAccessor::::new(PathBuf::from(args.path.clone())); let mut csv_accessor = CsvDataAccessor::new(PathBuf::from(args.path.clone())); let tables = query.get_table_references("example".parse().unwrap()); for table in tables.into_iter().map(TableRef::new) { @@ -230,10 +237,10 @@ fn main() { let query = QueryExpr::try_new(query, "example".parse().unwrap(), &commit_accessor).unwrap(); let timer = start_timer("Generating Proof"); - let proof = VerifiableQueryResult::::new( + let proof = VerifiableQueryResult::::new( query.proof_expr(), &csv_accessor, - &(), + &&prover_setup, ); end_timer(timer); fs::write( @@ -244,7 +251,7 @@ fn main() { } Commands::Verify { query, file } => { let mut commit_accessor = - CommitAccessor::::new(PathBuf::from(args.path.clone())); + CommitAccessor::::new(PathBuf::from(args.path.clone())); let table_refs = query.get_table_references("example".parse().unwrap()); for table_ref in table_refs { let table_name = TableRef::new(table_ref); @@ -254,12 +261,13 @@ fn main() { } let query = QueryExpr::try_new(query, "example".parse().unwrap(), &commit_accessor).unwrap(); - let result: VerifiableQueryResult = + let result: VerifiableQueryResult = postcard::from_bytes(&fs::read(file).expect("Failed to read proof")) .expect("Failed to deserialize proof"); + let timer = start_timer("Verifying Proof"); let query_result = result - .verify(query.proof_expr(), &commit_accessor, &()) + .verify(query.proof_expr(), &commit_accessor, &&verifier_setup) .expect("Failed to verify proof"); end_timer(timer); println!( diff --git a/crates/proof-of-sql/examples/posql_db/run_example.sh b/crates/proof-of-sql/examples/posql_db/run_example.sh index 91f773fe4..133aea220 100644 --- a/crates/proof-of-sql/examples/posql_db/run_example.sh +++ b/crates/proof-of-sql/examples/posql_db/run_example.sh @@ -1,5 +1,5 @@ cd crates/proof-of-sql/examples/posql_db -cargo run --features="arrow blitzar" --example posql_db create -t sxt.table -c a,b -d BIGINT,VARCHAR -cargo run --features="arrow blitzar" --example posql_db append -t sxt.table -f hello_world.csv -cargo run --features="arrow blitzar" --example posql_db prove -q "SELECT b FROM sxt.table WHERE a = 2" -f hello.proof -cargo run --features="arrow blitzar" --example posql_db verify -q "SELECT b FROM sxt.table WHERE a = 2" -f hello.proof \ No newline at end of file +cargo run --features="arrow " --example posql_db create -t sxt.table -c a,b -d BIGINT,VARCHAR +cargo run --features="arrow " --example posql_db append -t sxt.table -f hello_world.csv +cargo run --features="arrow " --example posql_db prove -q "SELECT b FROM sxt.table WHERE a = 2" -f hello.proof +cargo run --features="arrow " --example posql_db verify -q "SELECT b FROM sxt.table WHERE a = 2" -f hello.proof \ No newline at end of file From caf563cb96ba46489fe1c617ee348d731bf394b7 Mon Sep 17 00:00:00 2001 From: Ian Joiner <14581281+iajoiner@users.noreply.github.com> Date: Thu, 10 Oct 2024 23:16:33 -0400 Subject: [PATCH 14/29] refactor!: rename `ResultBuilder` to `FirstRoundBuilder` - rename `ResultBuilder` to `FirstRoundBuilder` - rename `proof-of-sql/src/sql/proof/result_builder.rs` to `first_round_builder.rs` --- .../{result_builder.rs => first_round_builder.rs} | 6 +++--- crates/proof-of-sql/src/sql/proof/mod.rs | 4 ++-- crates/proof-of-sql/src/sql/proof/proof_plan.rs | 6 +++--- crates/proof-of-sql/src/sql/proof/query_proof.rs | 4 ++-- .../proof-of-sql/src/sql/proof/query_proof_test.rs | 10 +++++----- .../src/sql/proof/verifiable_query_result_test.rs | 4 ++-- .../proof-of-sql/src/sql/proof_exprs/column_expr.rs | 2 +- .../src/sql/proof_plans/dyn_proof_plan.rs | 2 +- .../proof-of-sql/src/sql/proof_plans/filter_exec.rs | 6 +++--- .../src/sql/proof_plans/filter_exec_test.rs | 12 ++++++------ .../proof_plans/filter_exec_test_dishonest_prover.rs | 4 ++-- .../src/sql/proof_plans/group_by_exec.rs | 4 ++-- .../src/sql/proof_plans/projection_exec.rs | 4 ++-- .../src/sql/proof_plans/projection_exec_test.rs | 10 +++++----- 14 files changed, 39 insertions(+), 39 deletions(-) rename crates/proof-of-sql/src/sql/proof/{result_builder.rs => first_round_builder.rs} (95%) diff --git a/crates/proof-of-sql/src/sql/proof/result_builder.rs b/crates/proof-of-sql/src/sql/proof/first_round_builder.rs similarity index 95% rename from crates/proof-of-sql/src/sql/proof/result_builder.rs rename to crates/proof-of-sql/src/sql/proof/first_round_builder.rs index 4f296c46f..3d2e1df8f 100644 --- a/crates/proof-of-sql/src/sql/proof/result_builder.rs +++ b/crates/proof-of-sql/src/sql/proof/first_round_builder.rs @@ -1,5 +1,5 @@ /// Track the result created by a query -pub struct ResultBuilder { +pub struct FirstRoundBuilder { result_table_length: usize, /// The number of challenges used in the proof. @@ -9,13 +9,13 @@ pub struct ResultBuilder { num_post_result_challenges: usize, } -impl Default for ResultBuilder { +impl Default for FirstRoundBuilder { fn default() -> Self { Self::new() } } -impl ResultBuilder { +impl FirstRoundBuilder { /// Create a new result builder for a table with the given length. For multi table queries, this will likely need to change. pub fn new() -> Self { Self { diff --git a/crates/proof-of-sql/src/sql/proof/mod.rs b/crates/proof-of-sql/src/sql/proof/mod.rs index c3082b476..10de7a523 100644 --- a/crates/proof-of-sql/src/sql/proof/mod.rs +++ b/crates/proof-of-sql/src/sql/proof/mod.rs @@ -68,5 +68,5 @@ pub(crate) use result_element_serialization::{ decode_and_convert, decode_multiple_elements, ProvableResultElement, }; -mod result_builder; -pub(crate) use result_builder::ResultBuilder; +mod first_round_builder; +pub(crate) use first_round_builder::FirstRoundBuilder; diff --git a/crates/proof-of-sql/src/sql/proof/proof_plan.rs b/crates/proof-of-sql/src/sql/proof/proof_plan.rs index f500d8b0a..841aadbbb 100644 --- a/crates/proof-of-sql/src/sql/proof/proof_plan.rs +++ b/crates/proof-of-sql/src/sql/proof/proof_plan.rs @@ -1,4 +1,4 @@ -use super::{CountBuilder, ProofBuilder, ResultBuilder, VerificationBuilder}; +use super::{CountBuilder, FirstRoundBuilder, ProofBuilder, VerificationBuilder}; use crate::base::{ commitment::Commitment, database::{ @@ -49,10 +49,10 @@ pub trait ProofPlan: Debug + Send + Sync + ProverEvaluate { - /// Evaluate the query and modify `ResultBuilder` to track the result of the query. + /// Evaluate the query and modify `FirstRoundBuilder` to track the result of the query. fn result_evaluate<'a>( &self, - builder: &mut ResultBuilder, + builder: &mut FirstRoundBuilder, alloc: &'a Bump, accessor: &'a dyn DataAccessor, ) -> Vec>; diff --git a/crates/proof-of-sql/src/sql/proof/query_proof.rs b/crates/proof-of-sql/src/sql/proof/query_proof.rs index afa488d7c..0ca7c7afe 100644 --- a/crates/proof-of-sql/src/sql/proof/query_proof.rs +++ b/crates/proof-of-sql/src/sql/proof/query_proof.rs @@ -12,7 +12,7 @@ use crate::{ proof::{Keccak256Transcript, ProofError, Transcript}, }, proof_primitive::sumcheck::SumcheckProof, - sql::proof::{QueryData, ResultBuilder}, + sql::proof::{FirstRoundBuilder, QueryData}, }; use alloc::{vec, vec::Vec}; use bumpalo::Bump; @@ -53,7 +53,7 @@ impl QueryProof { assert!(num_sumcheck_variables > 0); let alloc = Bump::new(); - let mut result_builder = ResultBuilder::new(); + let mut result_builder = FirstRoundBuilder::new(); let result_cols = expr.result_evaluate(&mut result_builder, &alloc, accessor); let provable_result = ProvableQueryResult::new(result_builder.result_table_length() as u64, &result_cols); diff --git a/crates/proof-of-sql/src/sql/proof/query_proof_test.rs b/crates/proof-of-sql/src/sql/proof/query_proof_test.rs index 3817c3e19..9c6ab7243 100644 --- a/crates/proof-of-sql/src/sql/proof/query_proof_test.rs +++ b/crates/proof-of-sql/src/sql/proof/query_proof_test.rs @@ -14,7 +14,7 @@ use crate::{ proof::ProofError, scalar::{Curve25519Scalar, Scalar}, }, - sql::proof::{QueryData, ResultBuilder, SumcheckSubpolynomialType}, + sql::proof::{FirstRoundBuilder, QueryData, SumcheckSubpolynomialType}, }; use bumpalo::Bump; use serde::Serialize; @@ -43,7 +43,7 @@ impl Default for TrivialTestProofPlan { impl ProverEvaluate for TrivialTestProofPlan { fn result_evaluate<'a>( &self, - builder: &mut ResultBuilder, + builder: &mut FirstRoundBuilder, alloc: &'a Bump, _accessor: &'a dyn DataAccessor, ) -> Vec> { @@ -200,7 +200,7 @@ impl Default for SquareTestProofPlan { impl ProverEvaluate for SquareTestProofPlan { fn result_evaluate<'a>( &self, - builder: &mut ResultBuilder, + builder: &mut FirstRoundBuilder, alloc: &'a Bump, _accessor: &'a dyn DataAccessor, ) -> Vec> { @@ -381,7 +381,7 @@ impl Default for DoubleSquareTestProofPlan { impl ProverEvaluate for DoubleSquareTestProofPlan { fn result_evaluate<'a>( &self, - builder: &mut ResultBuilder, + builder: &mut FirstRoundBuilder, alloc: &'a Bump, _accessor: &'a dyn DataAccessor, ) -> Vec> { @@ -592,7 +592,7 @@ struct ChallengeTestProofPlan {} impl ProverEvaluate for ChallengeTestProofPlan { fn result_evaluate<'a>( &self, - builder: &mut ResultBuilder, + builder: &mut FirstRoundBuilder, _alloc: &'a Bump, _accessor: &'a dyn DataAccessor, ) -> Vec> { diff --git a/crates/proof-of-sql/src/sql/proof/verifiable_query_result_test.rs b/crates/proof-of-sql/src/sql/proof/verifiable_query_result_test.rs index 505984fb8..79cd74431 100644 --- a/crates/proof-of-sql/src/sql/proof/verifiable_query_result_test.rs +++ b/crates/proof-of-sql/src/sql/proof/verifiable_query_result_test.rs @@ -14,7 +14,7 @@ use crate::{ proof::ProofError, scalar::Scalar, }, - sql::proof::{ProvableQueryResult, QueryData, ResultBuilder}, + sql::proof::{FirstRoundBuilder, ProvableQueryResult, QueryData}, }; use bumpalo::Bump; use serde::Serialize; @@ -27,7 +27,7 @@ pub(super) struct EmptyTestQueryExpr { impl ProverEvaluate for EmptyTestQueryExpr { fn result_evaluate<'a>( &self, - builder: &mut ResultBuilder, + builder: &mut FirstRoundBuilder, alloc: &'a Bump, _accessor: &'a dyn DataAccessor, ) -> Vec> { diff --git a/crates/proof-of-sql/src/sql/proof_exprs/column_expr.rs b/crates/proof-of-sql/src/sql/proof_exprs/column_expr.rs index ba078a100..2a403b3d6 100644 --- a/crates/proof-of-sql/src/sql/proof_exprs/column_expr.rs +++ b/crates/proof-of-sql/src/sql/proof_exprs/column_expr.rs @@ -59,7 +59,7 @@ impl ProofExpr for ColumnExpr { } /// Evaluate the column expression and - /// add the result to the [`ResultBuilder`](crate::sql::proof::ResultBuilder) + /// add the result to the [`FirstRoundBuilder`](crate::sql::proof::FirstRoundBuilder) fn result_evaluate<'a>( &self, table_length: usize, diff --git a/crates/proof-of-sql/src/sql/proof_plans/dyn_proof_plan.rs b/crates/proof-of-sql/src/sql/proof_plans/dyn_proof_plan.rs index 2ee976dd2..92d99ddf9 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/dyn_proof_plan.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/dyn_proof_plan.rs @@ -95,7 +95,7 @@ impl ProverEvaluate for DynProofPlan { #[tracing::instrument(name = "DynProofPlan::result_evaluate", level = "debug", skip_all)] fn result_evaluate<'a>( &self, - builder: &mut crate::sql::proof::ResultBuilder, + builder: &mut crate::sql::proof::FirstRoundBuilder, alloc: &'a bumpalo::Bump, accessor: &'a dyn crate::base::database::DataAccessor, ) -> Vec> { diff --git a/crates/proof-of-sql/src/sql/proof_plans/filter_exec.rs b/crates/proof-of-sql/src/sql/proof_plans/filter_exec.rs index 2408a8303..dedd36db2 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/filter_exec.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/filter_exec.rs @@ -13,8 +13,8 @@ use crate::{ }, sql::{ proof::{ - CountBuilder, HonestProver, ProofBuilder, ProofPlan, ProverEvaluate, - ProverHonestyMarker, ResultBuilder, SumcheckSubpolynomialType, VerificationBuilder, + CountBuilder, FirstRoundBuilder, HonestProver, ProofBuilder, ProofPlan, ProverEvaluate, + ProverHonestyMarker, SumcheckSubpolynomialType, VerificationBuilder, }, proof_exprs::{AliasedDynProofExpr, DynProofExpr, ProofExpr, TableExpr}, }, @@ -148,7 +148,7 @@ impl ProverEvaluate for FilterExec { #[tracing::instrument(name = "FilterExec::result_evaluate", level = "debug", skip_all)] fn result_evaluate<'a>( &self, - builder: &mut ResultBuilder, + builder: &mut FirstRoundBuilder, alloc: &'a Bump, accessor: &'a dyn DataAccessor, ) -> Vec> { diff --git a/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test.rs b/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test.rs index f9a9363c3..117d52f89 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test.rs @@ -11,8 +11,8 @@ use crate::{ }, sql::{ proof::{ - exercise_verification, ProofPlan, ProvableQueryResult, ProverEvaluate, ResultBuilder, - VerifiableQueryResult, + exercise_verification, FirstRoundBuilder, ProofPlan, ProvableQueryResult, + ProverEvaluate, VerifiableQueryResult, }, proof_exprs::{test_utility::*, ColumnExpr, DynProofExpr, LiteralExpr, TableExpr}, }, @@ -192,7 +192,7 @@ fn we_can_get_an_empty_result_from_a_basic_filter_on_an_empty_table_using_result where_clause, ); let alloc = Bump::new(); - let mut builder = ResultBuilder::new(); + let mut builder = FirstRoundBuilder::new(); let result_cols = expr.result_evaluate(&mut builder, &alloc, &accessor); let fields = &[ ColumnField::new("b".parse().unwrap(), ColumnType::BigInt), @@ -237,7 +237,7 @@ fn we_can_get_an_empty_result_from_a_basic_filter_using_result_evaluate() { where_clause, ); let alloc = Bump::new(); - let mut builder = ResultBuilder::new(); + let mut builder = FirstRoundBuilder::new(); let result_cols = expr.result_evaluate(&mut builder, &alloc, &accessor); let fields = &[ ColumnField::new("b".parse().unwrap(), ColumnType::BigInt), @@ -278,7 +278,7 @@ fn we_can_get_no_columns_from_a_basic_filter_with_no_selected_columns_using_resu equal(column(t, "a", &accessor), const_int128(5)); let expr = filter(cols_expr_plan(t, &[], &accessor), tab(t), where_clause); let alloc = Bump::new(); - let mut builder = ResultBuilder::new(); + let mut builder = FirstRoundBuilder::new(); let result_cols = expr.result_evaluate(&mut builder, &alloc, &accessor); let fields = &[]; let res: OwnedTable = @@ -309,7 +309,7 @@ fn we_can_get_the_correct_result_from_a_basic_filter_using_result_evaluate() { where_clause, ); let alloc = Bump::new(); - let mut builder = ResultBuilder::new(); + let mut builder = FirstRoundBuilder::new(); let result_cols = expr.result_evaluate(&mut builder, &alloc, &accessor); let fields = &[ ColumnField::new("b".parse().unwrap(), ColumnType::BigInt), diff --git a/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test_dishonest_prover.rs b/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test_dishonest_prover.rs index efcb0695c..a354c37b7 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test_dishonest_prover.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test_dishonest_prover.rs @@ -8,7 +8,7 @@ use crate::{ }, sql::{ proof::{ - ProofBuilder, ProverEvaluate, ProverHonestyMarker, QueryError, ResultBuilder, + FirstRoundBuilder, ProofBuilder, ProverEvaluate, ProverHonestyMarker, QueryError, VerifiableQueryResult, }, // Making this explicit to ensure that we don't accidentally use the @@ -37,7 +37,7 @@ impl ProverEvaluate for DishonestFilterExec { )] fn result_evaluate<'a>( &self, - builder: &mut ResultBuilder, + builder: &mut FirstRoundBuilder, alloc: &'a Bump, accessor: &'a dyn DataAccessor, ) -> Vec> { diff --git a/crates/proof-of-sql/src/sql/proof_plans/group_by_exec.rs b/crates/proof-of-sql/src/sql/proof_plans/group_by_exec.rs index 35d324516..a19000ad1 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/group_by_exec.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/group_by_exec.rs @@ -16,7 +16,7 @@ use crate::{ }, sql::{ proof::{ - CountBuilder, ProofBuilder, ProofPlan, ProverEvaluate, ResultBuilder, + CountBuilder, FirstRoundBuilder, ProofBuilder, ProofPlan, ProverEvaluate, SumcheckSubpolynomialType, VerificationBuilder, }, proof_exprs::{AliasedDynProofExpr, ColumnExpr, DynProofExpr, ProofExpr, TableExpr}, @@ -208,7 +208,7 @@ impl ProverEvaluate for GroupByExec { #[tracing::instrument(name = "GroupByExec::result_evaluate", level = "debug", skip_all)] fn result_evaluate<'a>( &self, - builder: &mut ResultBuilder, + builder: &mut FirstRoundBuilder, alloc: &'a Bump, accessor: &'a dyn DataAccessor, ) -> Vec> { diff --git a/crates/proof-of-sql/src/sql/proof_plans/projection_exec.rs b/crates/proof-of-sql/src/sql/proof_plans/projection_exec.rs index 001e4b9f1..6d28a154b 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/projection_exec.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/projection_exec.rs @@ -10,7 +10,7 @@ use crate::{ }, sql::{ proof::{ - CountBuilder, ProofBuilder, ProofPlan, ProverEvaluate, ResultBuilder, + CountBuilder, FirstRoundBuilder, ProofBuilder, ProofPlan, ProverEvaluate, VerificationBuilder, }, proof_exprs::{AliasedDynProofExpr, ProofExpr, TableExpr}, @@ -98,7 +98,7 @@ impl ProverEvaluate for ProjectionExec { #[tracing::instrument(name = "ProjectionExec::result_evaluate", level = "debug", skip_all)] fn result_evaluate<'a>( &self, - builder: &mut ResultBuilder, + builder: &mut FirstRoundBuilder, alloc: &'a Bump, accessor: &'a dyn DataAccessor, ) -> Vec> { diff --git a/crates/proof-of-sql/src/sql/proof_plans/projection_exec_test.rs b/crates/proof-of-sql/src/sql/proof_plans/projection_exec_test.rs index 53914c11c..7e649a12d 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/projection_exec_test.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/projection_exec_test.rs @@ -11,8 +11,8 @@ use crate::{ }, sql::{ proof::{ - exercise_verification, ProofPlan, ProvableQueryResult, ProverEvaluate, ResultBuilder, - VerifiableQueryResult, + exercise_verification, FirstRoundBuilder, ProofPlan, ProvableQueryResult, + ProverEvaluate, VerifiableQueryResult, }, proof_exprs::{test_utility::*, ColumnExpr, DynProofExpr, TableExpr}, }, @@ -165,7 +165,7 @@ fn we_can_get_an_empty_result_from_a_basic_projection_on_an_empty_table_using_re let expr: DynProofPlan = projection(cols_expr_plan(t, &["b", "c", "d", "e"], &accessor), tab(t)); let alloc = Bump::new(); - let mut builder = ResultBuilder::new(); + let mut builder = FirstRoundBuilder::new(); let result_cols = expr.result_evaluate(&mut builder, &alloc, &accessor); let fields = &[ ColumnField::new("b".parse().unwrap(), ColumnType::BigInt), @@ -204,7 +204,7 @@ fn we_can_get_no_columns_from_a_basic_projection_with_no_selected_columns_using_ accessor.add_table(t, data, 0); let expr: DynProofPlan = projection(cols_expr_plan(t, &[], &accessor), tab(t)); let alloc = Bump::new(); - let mut builder = ResultBuilder::new(); + let mut builder = FirstRoundBuilder::new(); let result_cols = expr.result_evaluate(&mut builder, &alloc, &accessor); let fields = &[]; let res: OwnedTable = @@ -240,7 +240,7 @@ fn we_can_get_the_correct_result_from_a_basic_projection_using_result_evaluate() tab(t), ); let alloc = Bump::new(); - let mut builder = ResultBuilder::new(); + let mut builder = FirstRoundBuilder::new(); let result_cols = expr.result_evaluate(&mut builder, &alloc, &accessor); let fields = &[ ColumnField::new("b".parse().unwrap(), ColumnType::BigInt), From 34454651797a844446c55576b76111448aef29eb Mon Sep 17 00:00:00 2001 From: Ian Joiner <14581281+iajoiner@users.noreply.github.com> Date: Thu, 10 Oct 2024 23:25:04 -0400 Subject: [PATCH 15/29] refactor!: rename `ProofBuilder` to `FinalRoundBuilder` - rename `ProofBuilder` to `FinalRoundBuilder` - rename `proof-of-sql/src/sql/proof/proof_builder.rs` to `final_round_builder.rs` --- .../{proof_builder.rs => final_round_builder.rs} | 16 ++++++++++------ ...ilder_test.rs => final_round_builder_test.rs} | 14 +++++++------- crates/proof-of-sql/src/sql/proof/mod.rs | 6 +++--- crates/proof-of-sql/src/sql/proof/proof_plan.rs | 6 +++--- crates/proof-of-sql/src/sql/proof/query_proof.rs | 4 ++-- .../src/sql/proof/query_proof_test.rs | 10 +++++----- .../sql/proof/verifiable_query_result_test.rs | 4 ++-- .../src/sql/proof_exprs/add_subtract_expr.rs | 4 ++-- .../src/sql/proof_exprs/aggregate_expr.rs | 4 ++-- .../proof-of-sql/src/sql/proof_exprs/and_expr.rs | 4 ++-- .../src/sql/proof_exprs/column_expr.rs | 4 ++-- .../src/sql/proof_exprs/dyn_proof_expr.rs | 4 ++-- .../src/sql/proof_exprs/equals_expr.rs | 6 +++--- .../src/sql/proof_exprs/inequality_expr.rs | 4 ++-- .../src/sql/proof_exprs/literal_expr.rs | 4 ++-- .../src/sql/proof_exprs/multiply_expr.rs | 4 ++-- .../proof-of-sql/src/sql/proof_exprs/not_expr.rs | 4 ++-- .../proof-of-sql/src/sql/proof_exprs/or_expr.rs | 6 +++--- .../src/sql/proof_exprs/proof_expr.rs | 4 ++-- .../src/sql/proof_exprs/sign_expr.rs | 11 +++++++---- .../src/sql/proof_exprs/sign_expr_test.rs | 6 +++--- .../src/sql/proof_plans/dyn_proof_plan.rs | 2 +- .../src/sql/proof_plans/filter_exec.rs | 8 ++++---- .../filter_exec_test_dishonest_prover.rs | 4 ++-- .../src/sql/proof_plans/group_by_exec.rs | 6 +++--- .../src/sql/proof_plans/projection_exec.rs | 4 ++-- 26 files changed, 80 insertions(+), 73 deletions(-) rename crates/proof-of-sql/src/sql/proof/{proof_builder.rs => final_round_builder.rs} (93%) rename crates/proof-of-sql/src/sql/proof/{proof_builder_test.rs => final_round_builder_test.rs} (92%) diff --git a/crates/proof-of-sql/src/sql/proof/proof_builder.rs b/crates/proof-of-sql/src/sql/proof/final_round_builder.rs similarity index 93% rename from crates/proof-of-sql/src/sql/proof/proof_builder.rs rename to crates/proof-of-sql/src/sql/proof/final_round_builder.rs index 3b011aaa5..f3c6a1d39 100644 --- a/crates/proof-of-sql/src/sql/proof/proof_builder.rs +++ b/crates/proof-of-sql/src/sql/proof/final_round_builder.rs @@ -12,7 +12,7 @@ use alloc::{boxed::Box, vec, vec::Vec}; use num_traits::Zero; /// Track components used to form a query's proof -pub struct ProofBuilder<'a, S: Scalar> { +pub struct FinalRoundBuilder<'a, S: Scalar> { table_length: usize, num_sumcheck_variables: usize, bit_distributions: Vec, @@ -29,7 +29,7 @@ pub struct ProofBuilder<'a, S: Scalar> { post_result_challenges: Vec, } -impl<'a, S: Scalar> ProofBuilder<'a, S> { +impl<'a, S: Scalar> FinalRoundBuilder<'a, S> { pub fn new( table_length: usize, num_sumcheck_variables: usize, @@ -96,7 +96,7 @@ impl<'a, S: Scalar> ProofBuilder<'a, S> { /// Compute commitments of all the interemdiate MLEs used in sumcheck #[tracing::instrument( - name = "ProofBuilder::commit_intermediate_mles", + name = "FinalRoundBuilder::commit_intermediate_mles", level = "debug", skip_all )] @@ -115,7 +115,7 @@ impl<'a, S: Scalar> ProofBuilder<'a, S> { /// Given random multipliers, construct an aggregatated sumcheck polynomial from all /// the individual subpolynomials. #[tracing::instrument( - name = "ProofBuilder::make_sumcheck_polynomial", + name = "FinalRoundBuilder::make_sumcheck_polynomial", level = "debug", skip_all )] @@ -140,7 +140,7 @@ impl<'a, S: Scalar> ProofBuilder<'a, S> { /// Given the evaluation vector, compute evaluations of all the MLEs used in sumcheck except /// for those that correspond to result columns sent to the verifier. #[tracing::instrument( - name = "ProofBuilder::evaluate_pcs_proof_mles", + name = "FinalRoundBuilder::evaluate_pcs_proof_mles", level = "debug", skip_all )] @@ -154,7 +154,11 @@ impl<'a, S: Scalar> ProofBuilder<'a, S> { /// Given random multipliers, multiply and add together all of the MLEs used in sumcheck except /// for those that correspond to result columns sent to the verifier. - #[tracing::instrument(name = "ProofBuilder::fold_pcs_proof_mles", level = "debug", skip_all)] + #[tracing::instrument( + name = "FinalRoundBuilder::fold_pcs_proof_mles", + level = "debug", + skip_all + )] pub fn fold_pcs_proof_mles(&self, multipliers: &[S]) -> Vec { assert_eq!(multipliers.len(), self.pcs_proof_mles.len()); let mut res = vec![Zero::zero(); self.table_length]; diff --git a/crates/proof-of-sql/src/sql/proof/proof_builder_test.rs b/crates/proof-of-sql/src/sql/proof/final_round_builder_test.rs similarity index 92% rename from crates/proof-of-sql/src/sql/proof/proof_builder_test.rs rename to crates/proof-of-sql/src/sql/proof/final_round_builder_test.rs index 788920551..204821772 100644 --- a/crates/proof-of-sql/src/sql/proof/proof_builder_test.rs +++ b/crates/proof-of-sql/src/sql/proof/final_round_builder_test.rs @@ -1,4 +1,4 @@ -use super::{ProofBuilder, ProvableQueryResult, SumcheckRandomScalars}; +use super::{FinalRoundBuilder, ProvableQueryResult, SumcheckRandomScalars}; use crate::{ base::{ commitment::{Commitment, CommittableColumn}, @@ -22,7 +22,7 @@ use num_traits::{One, Zero}; fn we_can_compute_commitments_for_intermediate_mles_using_a_zero_offset() { let mle1 = [1, 2]; let mle2 = [10i64, 20]; - let mut builder = ProofBuilder::::new(2, 1, Vec::new()); + let mut builder = FinalRoundBuilder::::new(2, 1, Vec::new()); builder.produce_anchored_mle(&mle1); builder.produce_intermediate_mle(&mle2[..]); let offset_generators = 0_usize; @@ -41,7 +41,7 @@ fn we_can_compute_commitments_for_intermediate_mles_using_a_zero_offset() { fn we_can_compute_commitments_for_intermediate_mles_using_a_non_zero_offset() { let mle1 = [1, 2]; let mle2 = [10i64, 20]; - let mut builder = ProofBuilder::::new(2, 1, Vec::new()); + let mut builder = FinalRoundBuilder::::new(2, 1, Vec::new()); builder.produce_anchored_mle(&mle1); builder.produce_intermediate_mle(&mle2[..]); let offset_generators = 123_usize; @@ -60,7 +60,7 @@ fn we_can_compute_commitments_for_intermediate_mles_using_a_non_zero_offset() { fn we_can_evaluate_pcs_proof_mles() { let mle1 = [1, 2]; let mle2 = [10i64, 20]; - let mut builder = ProofBuilder::new(2, 1, Vec::new()); + let mut builder = FinalRoundBuilder::new(2, 1, Vec::new()); builder.produce_anchored_mle(&mle1); builder.produce_intermediate_mle(&mle2[..]); let evaluation_vec = [ @@ -80,7 +80,7 @@ fn we_can_form_an_aggregated_sumcheck_polynomial() { let mle1 = [1, 2, -1]; let mle2 = [10i64, 20, 100, 30]; let mle3 = [2000i64, 3000, 5000, 7000]; - let mut builder = ProofBuilder::new(4, 2, Vec::new()); + let mut builder = FinalRoundBuilder::new(4, 2, Vec::new()); builder.produce_anchored_mle(&mle1); builder.produce_intermediate_mle(&mle2[..]); builder.produce_intermediate_mle(&mle3[..]); @@ -170,7 +170,7 @@ fn we_can_form_the_provable_query_result() { fn we_can_fold_pcs_proof_mles() { let mle1 = [1, 2]; let mle2 = [10i64, 20]; - let mut builder = ProofBuilder::new(2, 1, Vec::new()); + let mut builder = FinalRoundBuilder::new(2, 1, Vec::new()); builder.produce_anchored_mle(&mle1); builder.produce_intermediate_mle(&mle2[..]); let multipliers = [Curve25519Scalar::from(100u64), Curve25519Scalar::from(2u64)]; @@ -184,7 +184,7 @@ fn we_can_fold_pcs_proof_mles() { #[test] fn we_can_consume_post_result_challenges_in_proof_builder() { - let mut builder = ProofBuilder::new( + let mut builder = FinalRoundBuilder::new( 0, 0, vec![ diff --git a/crates/proof-of-sql/src/sql/proof/mod.rs b/crates/proof-of-sql/src/sql/proof/mod.rs index 10de7a523..48139dc22 100644 --- a/crates/proof-of-sql/src/sql/proof/mod.rs +++ b/crates/proof-of-sql/src/sql/proof/mod.rs @@ -2,10 +2,10 @@ mod count_builder; pub(crate) use count_builder::CountBuilder; -mod proof_builder; -pub(crate) use proof_builder::ProofBuilder; +mod final_round_builder; +pub(crate) use final_round_builder::FinalRoundBuilder; #[cfg(all(test, feature = "blitzar"))] -mod proof_builder_test; +mod final_round_builder_test; mod composite_polynomial_builder; pub(crate) use composite_polynomial_builder::CompositePolynomialBuilder; diff --git a/crates/proof-of-sql/src/sql/proof/proof_plan.rs b/crates/proof-of-sql/src/sql/proof/proof_plan.rs index 841aadbbb..ea2e9e1b6 100644 --- a/crates/proof-of-sql/src/sql/proof/proof_plan.rs +++ b/crates/proof-of-sql/src/sql/proof/proof_plan.rs @@ -1,4 +1,4 @@ -use super::{CountBuilder, FirstRoundBuilder, ProofBuilder, VerificationBuilder}; +use super::{CountBuilder, FinalRoundBuilder, FirstRoundBuilder, VerificationBuilder}; use crate::base::{ commitment::Commitment, database::{ @@ -57,7 +57,7 @@ pub trait ProverEvaluate { accessor: &'a dyn DataAccessor, ) -> Vec>; - /// Evaluate the query and modify `ProofBuilder` to store an intermediate representation + /// Evaluate the query and modify `FinalRoundBuilder` to store an intermediate representation /// of the query result and track all the components needed to form the query's proof. /// /// Intermediate values that are needed to form the proof are allocated into the arena @@ -65,7 +65,7 @@ pub trait ProverEvaluate { /// will be bulk deallocated once the proof is formed. fn prover_evaluate<'a>( &self, - builder: &mut ProofBuilder<'a, S>, + builder: &mut FinalRoundBuilder<'a, S>, alloc: &'a Bump, accessor: &'a dyn DataAccessor, ) -> Vec>; diff --git a/crates/proof-of-sql/src/sql/proof/query_proof.rs b/crates/proof-of-sql/src/sql/proof/query_proof.rs index 0ca7c7afe..e72d698c0 100644 --- a/crates/proof-of-sql/src/sql/proof/query_proof.rs +++ b/crates/proof-of-sql/src/sql/proof/query_proof.rs @@ -1,5 +1,5 @@ use super::{ - CountBuilder, ProofBuilder, ProofCounts, ProofPlan, ProvableQueryResult, QueryResult, + CountBuilder, FinalRoundBuilder, ProofCounts, ProofPlan, ProvableQueryResult, QueryResult, SumcheckMleEvaluations, SumcheckRandomScalars, VerificationBuilder, }; use crate::{ @@ -73,7 +73,7 @@ impl QueryProof { .collect(); let mut builder = - ProofBuilder::new(table_length, num_sumcheck_variables, post_result_challenges); + FinalRoundBuilder::new(table_length, num_sumcheck_variables, post_result_challenges); expr.prover_evaluate(&mut builder, &alloc, accessor); let num_sumcheck_variables = builder.num_sumcheck_variables(); diff --git a/crates/proof-of-sql/src/sql/proof/query_proof_test.rs b/crates/proof-of-sql/src/sql/proof/query_proof_test.rs index 9c6ab7243..114ff5adf 100644 --- a/crates/proof-of-sql/src/sql/proof/query_proof_test.rs +++ b/crates/proof-of-sql/src/sql/proof/query_proof_test.rs @@ -1,5 +1,5 @@ use super::{ - CountBuilder, ProofBuilder, ProofPlan, ProverEvaluate, QueryProof, VerificationBuilder, + CountBuilder, FinalRoundBuilder, ProofPlan, ProverEvaluate, QueryProof, VerificationBuilder, }; use crate::{ base::{ @@ -55,7 +55,7 @@ impl ProverEvaluate for TrivialTestProofPlan { fn prover_evaluate<'a>( &self, - builder: &mut ProofBuilder<'a, S>, + builder: &mut FinalRoundBuilder<'a, S>, alloc: &'a Bump, _accessor: &'a dyn DataAccessor, ) -> Vec> { @@ -211,7 +211,7 @@ impl ProverEvaluate for SquareTestProofPlan { fn prover_evaluate<'a>( &self, - builder: &mut ProofBuilder<'a, S>, + builder: &mut FinalRoundBuilder<'a, S>, alloc: &'a Bump, accessor: &'a dyn DataAccessor, ) -> Vec> { @@ -392,7 +392,7 @@ impl ProverEvaluate for DoubleSquareTestProofPlan { fn prover_evaluate<'a>( &self, - builder: &mut ProofBuilder<'a, S>, + builder: &mut FinalRoundBuilder<'a, S>, alloc: &'a Bump, accessor: &'a dyn DataAccessor, ) -> Vec> { @@ -603,7 +603,7 @@ impl ProverEvaluate for ChallengeTestProofPlan { fn prover_evaluate<'a>( &self, - builder: &mut ProofBuilder<'a, S>, + builder: &mut FinalRoundBuilder<'a, S>, alloc: &'a Bump, accessor: &'a dyn DataAccessor, ) -> Vec> { diff --git a/crates/proof-of-sql/src/sql/proof/verifiable_query_result_test.rs b/crates/proof-of-sql/src/sql/proof/verifiable_query_result_test.rs index 79cd74431..0e0b40cab 100644 --- a/crates/proof-of-sql/src/sql/proof/verifiable_query_result_test.rs +++ b/crates/proof-of-sql/src/sql/proof/verifiable_query_result_test.rs @@ -1,5 +1,5 @@ use super::{ - CountBuilder, ProofBuilder, ProofPlan, ProverEvaluate, VerifiableQueryResult, + CountBuilder, FinalRoundBuilder, ProofPlan, ProverEvaluate, VerifiableQueryResult, VerificationBuilder, }; use crate::{ @@ -38,7 +38,7 @@ impl ProverEvaluate for EmptyTestQueryExpr { } fn prover_evaluate<'a>( &self, - builder: &mut ProofBuilder<'a, S>, + builder: &mut FinalRoundBuilder<'a, S>, alloc: &'a Bump, _accessor: &'a dyn DataAccessor, ) -> Vec> { diff --git a/crates/proof-of-sql/src/sql/proof_exprs/add_subtract_expr.rs b/crates/proof-of-sql/src/sql/proof_exprs/add_subtract_expr.rs index 2b7611c06..10a623f85 100644 --- a/crates/proof-of-sql/src/sql/proof_exprs/add_subtract_expr.rs +++ b/crates/proof-of-sql/src/sql/proof_exprs/add_subtract_expr.rs @@ -9,7 +9,7 @@ use crate::{ map::IndexSet, proof::ProofError, }, - sql::proof::{CountBuilder, ProofBuilder, VerificationBuilder}, + sql::proof::{CountBuilder, FinalRoundBuilder, VerificationBuilder}, }; use alloc::boxed::Box; use bumpalo::Bump; @@ -79,7 +79,7 @@ impl ProofExpr for AddSubtractExpr { )] fn prover_evaluate<'a>( &self, - builder: &mut ProofBuilder<'a, C::Scalar>, + builder: &mut FinalRoundBuilder<'a, C::Scalar>, alloc: &'a Bump, accessor: &'a dyn DataAccessor, ) -> Column<'a, C::Scalar> { diff --git a/crates/proof-of-sql/src/sql/proof_exprs/aggregate_expr.rs b/crates/proof-of-sql/src/sql/proof_exprs/aggregate_expr.rs index ddeb08b77..d11c157b5 100644 --- a/crates/proof-of-sql/src/sql/proof_exprs/aggregate_expr.rs +++ b/crates/proof-of-sql/src/sql/proof_exprs/aggregate_expr.rs @@ -6,7 +6,7 @@ use crate::{ map::IndexSet, proof::ProofError, }, - sql::proof::{CountBuilder, ProofBuilder, VerificationBuilder}, + sql::proof::{CountBuilder, FinalRoundBuilder, VerificationBuilder}, }; use alloc::boxed::Box; use bumpalo::Bump; @@ -55,7 +55,7 @@ impl ProofExpr for AggregateExpr { #[tracing::instrument(name = "AggregateExpr::prover_evaluate", level = "debug", skip_all)] fn prover_evaluate<'a>( &self, - builder: &mut ProofBuilder<'a, C::Scalar>, + builder: &mut FinalRoundBuilder<'a, C::Scalar>, alloc: &'a Bump, accessor: &'a dyn DataAccessor, ) -> Column<'a, C::Scalar> { diff --git a/crates/proof-of-sql/src/sql/proof_exprs/and_expr.rs b/crates/proof-of-sql/src/sql/proof_exprs/and_expr.rs index 190e9f7a7..d1166733d 100644 --- a/crates/proof-of-sql/src/sql/proof_exprs/and_expr.rs +++ b/crates/proof-of-sql/src/sql/proof_exprs/and_expr.rs @@ -6,7 +6,7 @@ use crate::{ map::IndexSet, proof::ProofError, }, - sql::proof::{CountBuilder, ProofBuilder, SumcheckSubpolynomialType, VerificationBuilder}, + sql::proof::{CountBuilder, FinalRoundBuilder, SumcheckSubpolynomialType, VerificationBuilder}, }; use alloc::{boxed::Box, vec}; use bumpalo::Bump; @@ -60,7 +60,7 @@ impl ProofExpr for AndExpr { #[tracing::instrument(name = "AndExpr::prover_evaluate", level = "debug", skip_all)] fn prover_evaluate<'a>( &self, - builder: &mut ProofBuilder<'a, C::Scalar>, + builder: &mut FinalRoundBuilder<'a, C::Scalar>, alloc: &'a Bump, accessor: &'a dyn DataAccessor, ) -> Column<'a, C::Scalar> { diff --git a/crates/proof-of-sql/src/sql/proof_exprs/column_expr.rs b/crates/proof-of-sql/src/sql/proof_exprs/column_expr.rs index 2a403b3d6..93b7be813 100644 --- a/crates/proof-of-sql/src/sql/proof_exprs/column_expr.rs +++ b/crates/proof-of-sql/src/sql/proof_exprs/column_expr.rs @@ -6,7 +6,7 @@ use crate::{ map::IndexSet, proof::ProofError, }, - sql::proof::{CountBuilder, ProofBuilder, VerificationBuilder}, + sql::proof::{CountBuilder, FinalRoundBuilder, VerificationBuilder}, }; use bumpalo::Bump; use core::marker::PhantomData; @@ -75,7 +75,7 @@ impl ProofExpr for ColumnExpr { /// add the components needed to prove the result fn prover_evaluate<'a>( &self, - builder: &mut ProofBuilder<'a, C::Scalar>, + builder: &mut FinalRoundBuilder<'a, C::Scalar>, _alloc: &'a Bump, accessor: &'a dyn DataAccessor, ) -> Column<'a, C::Scalar> { diff --git a/crates/proof-of-sql/src/sql/proof_exprs/dyn_proof_expr.rs b/crates/proof-of-sql/src/sql/proof_exprs/dyn_proof_expr.rs index eae4c37d3..88527ad60 100644 --- a/crates/proof-of-sql/src/sql/proof_exprs/dyn_proof_expr.rs +++ b/crates/proof-of-sql/src/sql/proof_exprs/dyn_proof_expr.rs @@ -11,7 +11,7 @@ use crate::{ }, sql::{ parse::{type_check_binary_operation, ConversionError, ConversionResult}, - proof::{CountBuilder, ProofBuilder, VerificationBuilder}, + proof::{CountBuilder, FinalRoundBuilder, VerificationBuilder}, }, }; use alloc::{boxed::Box, string::ToString}; @@ -253,7 +253,7 @@ impl ProofExpr for DynProofExpr { fn prover_evaluate<'a>( &self, - builder: &mut ProofBuilder<'a, C::Scalar>, + builder: &mut FinalRoundBuilder<'a, C::Scalar>, alloc: &'a Bump, accessor: &'a dyn DataAccessor, ) -> Column<'a, C::Scalar> { diff --git a/crates/proof-of-sql/src/sql/proof_exprs/equals_expr.rs b/crates/proof-of-sql/src/sql/proof_exprs/equals_expr.rs index faf9af5e1..58cfa6155 100644 --- a/crates/proof-of-sql/src/sql/proof_exprs/equals_expr.rs +++ b/crates/proof-of-sql/src/sql/proof_exprs/equals_expr.rs @@ -8,7 +8,7 @@ use crate::{ scalar::Scalar, slice_ops, }, - sql::proof::{CountBuilder, ProofBuilder, SumcheckSubpolynomialType, VerificationBuilder}, + sql::proof::{CountBuilder, FinalRoundBuilder, SumcheckSubpolynomialType, VerificationBuilder}, }; use alloc::{boxed::Box, vec}; use bumpalo::Bump; @@ -59,7 +59,7 @@ impl ProofExpr for EqualsExpr { #[tracing::instrument(name = "EqualsExpr::prover_evaluate", level = "debug", skip_all)] fn prover_evaluate<'a>( &self, - builder: &mut ProofBuilder<'a, C::Scalar>, + builder: &mut FinalRoundBuilder<'a, C::Scalar>, alloc: &'a Bump, accessor: &'a dyn DataAccessor, ) -> Column<'a, C::Scalar> { @@ -105,7 +105,7 @@ pub fn result_evaluate_equals_zero<'a, S: Scalar>( } pub fn prover_evaluate_equals_zero<'a, S: Scalar>( - builder: &mut ProofBuilder<'a, S>, + builder: &mut FinalRoundBuilder<'a, S>, alloc: &'a Bump, lhs: &'a [S], ) -> &'a [bool] { diff --git a/crates/proof-of-sql/src/sql/proof_exprs/inequality_expr.rs b/crates/proof-of-sql/src/sql/proof_exprs/inequality_expr.rs index 06ce2a217..f1f647682 100644 --- a/crates/proof-of-sql/src/sql/proof_exprs/inequality_expr.rs +++ b/crates/proof-of-sql/src/sql/proof_exprs/inequality_expr.rs @@ -11,7 +11,7 @@ use crate::{ map::IndexSet, proof::ProofError, }, - sql::proof::{CountBuilder, ProofBuilder, VerificationBuilder}, + sql::proof::{CountBuilder, FinalRoundBuilder, VerificationBuilder}, }; use alloc::boxed::Box; use bumpalo::Bump; @@ -86,7 +86,7 @@ impl ProofExpr for InequalityExpr { #[tracing::instrument(name = "InequalityExpr::prover_evaluate", level = "debug", skip_all)] fn prover_evaluate<'a>( &self, - builder: &mut ProofBuilder<'a, C::Scalar>, + builder: &mut FinalRoundBuilder<'a, C::Scalar>, alloc: &'a Bump, accessor: &'a dyn DataAccessor, ) -> Column<'a, C::Scalar> { diff --git a/crates/proof-of-sql/src/sql/proof_exprs/literal_expr.rs b/crates/proof-of-sql/src/sql/proof_exprs/literal_expr.rs index 4ea429cc5..c00af32d1 100644 --- a/crates/proof-of-sql/src/sql/proof_exprs/literal_expr.rs +++ b/crates/proof-of-sql/src/sql/proof_exprs/literal_expr.rs @@ -7,7 +7,7 @@ use crate::{ proof::ProofError, scalar::Scalar, }, - sql::proof::{CountBuilder, ProofBuilder, VerificationBuilder}, + sql::proof::{CountBuilder, FinalRoundBuilder, VerificationBuilder}, }; use bumpalo::Bump; use serde::{Deserialize, Serialize}; @@ -57,7 +57,7 @@ impl ProofExpr for LiteralExpr { #[tracing::instrument(name = "LiteralExpr::prover_evaluate", level = "debug", skip_all)] fn prover_evaluate<'a>( &self, - builder: &mut ProofBuilder<'a, C::Scalar>, + builder: &mut FinalRoundBuilder<'a, C::Scalar>, alloc: &'a Bump, _accessor: &'a dyn DataAccessor, ) -> Column<'a, C::Scalar> { diff --git a/crates/proof-of-sql/src/sql/proof_exprs/multiply_expr.rs b/crates/proof-of-sql/src/sql/proof_exprs/multiply_expr.rs index 391091745..9e3dad92e 100644 --- a/crates/proof-of-sql/src/sql/proof_exprs/multiply_expr.rs +++ b/crates/proof-of-sql/src/sql/proof_exprs/multiply_expr.rs @@ -10,7 +10,7 @@ use crate::{ proof::ProofError, }, sql::{ - proof::{CountBuilder, ProofBuilder, SumcheckSubpolynomialType, VerificationBuilder}, + proof::{CountBuilder, FinalRoundBuilder, SumcheckSubpolynomialType, VerificationBuilder}, proof_exprs::multiply_columns, }, }; @@ -69,7 +69,7 @@ impl ProofExpr for MultiplyExpr { )] fn prover_evaluate<'a>( &self, - builder: &mut ProofBuilder<'a, C::Scalar>, + builder: &mut FinalRoundBuilder<'a, C::Scalar>, alloc: &'a Bump, accessor: &'a dyn DataAccessor, ) -> Column<'a, C::Scalar> { diff --git a/crates/proof-of-sql/src/sql/proof_exprs/not_expr.rs b/crates/proof-of-sql/src/sql/proof_exprs/not_expr.rs index c0f2f899f..194d5b9be 100644 --- a/crates/proof-of-sql/src/sql/proof_exprs/not_expr.rs +++ b/crates/proof-of-sql/src/sql/proof_exprs/not_expr.rs @@ -6,7 +6,7 @@ use crate::{ map::IndexSet, proof::ProofError, }, - sql::proof::{CountBuilder, ProofBuilder, VerificationBuilder}, + sql::proof::{CountBuilder, FinalRoundBuilder, VerificationBuilder}, }; use alloc::boxed::Box; use bumpalo::Bump; @@ -50,7 +50,7 @@ impl ProofExpr for NotExpr { #[tracing::instrument(name = "NotExpr::prover_evaluate", level = "debug", skip_all)] fn prover_evaluate<'a>( &self, - builder: &mut ProofBuilder<'a, C::Scalar>, + builder: &mut FinalRoundBuilder<'a, C::Scalar>, alloc: &'a Bump, accessor: &'a dyn DataAccessor, ) -> Column<'a, C::Scalar> { diff --git a/crates/proof-of-sql/src/sql/proof_exprs/or_expr.rs b/crates/proof-of-sql/src/sql/proof_exprs/or_expr.rs index efdeb3d5a..e31c2a9c9 100644 --- a/crates/proof-of-sql/src/sql/proof_exprs/or_expr.rs +++ b/crates/proof-of-sql/src/sql/proof_exprs/or_expr.rs @@ -7,7 +7,7 @@ use crate::{ proof::ProofError, scalar::Scalar, }, - sql::proof::{CountBuilder, ProofBuilder, SumcheckSubpolynomialType, VerificationBuilder}, + sql::proof::{CountBuilder, FinalRoundBuilder, SumcheckSubpolynomialType, VerificationBuilder}, }; use alloc::{boxed::Box, vec}; use bumpalo::Bump; @@ -58,7 +58,7 @@ impl ProofExpr for OrExpr { #[tracing::instrument(name = "OrExpr::prover_evaluate", level = "debug", skip_all)] fn prover_evaluate<'a>( &self, - builder: &mut ProofBuilder<'a, C::Scalar>, + builder: &mut FinalRoundBuilder<'a, C::Scalar>, alloc: &'a Bump, accessor: &'a dyn DataAccessor, ) -> Column<'a, C::Scalar> { @@ -106,7 +106,7 @@ pub fn result_evaluate_or<'a>( reason = "lhs and rhs are guaranteed to have the same length, ensuring no panic occurs" )] pub fn prover_evaluate_or<'a, S: Scalar>( - builder: &mut ProofBuilder<'a, S>, + builder: &mut FinalRoundBuilder<'a, S>, alloc: &'a Bump, lhs: &'a [bool], rhs: &'a [bool], diff --git a/crates/proof-of-sql/src/sql/proof_exprs/proof_expr.rs b/crates/proof-of-sql/src/sql/proof_exprs/proof_expr.rs index 681fd0194..88f215484 100644 --- a/crates/proof-of-sql/src/sql/proof_exprs/proof_expr.rs +++ b/crates/proof-of-sql/src/sql/proof_exprs/proof_expr.rs @@ -5,7 +5,7 @@ use crate::{ map::IndexSet, proof::ProofError, }, - sql::proof::{CountBuilder, ProofBuilder, VerificationBuilder}, + sql::proof::{CountBuilder, FinalRoundBuilder, VerificationBuilder}, }; use bumpalo::Bump; use core::fmt::Debug; @@ -32,7 +32,7 @@ pub trait ProofExpr: Debug + Send + Sync { /// of values fn prover_evaluate<'a>( &self, - builder: &mut ProofBuilder<'a, C::Scalar>, + builder: &mut FinalRoundBuilder<'a, C::Scalar>, alloc: &'a Bump, accessor: &'a dyn DataAccessor, ) -> Column<'a, C::Scalar>; diff --git a/crates/proof-of-sql/src/sql/proof_exprs/sign_expr.rs b/crates/proof-of-sql/src/sql/proof_exprs/sign_expr.rs index 9e614d22e..5dc668972 100644 --- a/crates/proof-of-sql/src/sql/proof_exprs/sign_expr.rs +++ b/crates/proof-of-sql/src/sql/proof_exprs/sign_expr.rs @@ -10,7 +10,7 @@ use crate::{ scalar::Scalar, }, sql::proof::{ - CountBuilder, ProofBuilder, SumcheckSubpolynomialTerm, SumcheckSubpolynomialType, + CountBuilder, FinalRoundBuilder, SumcheckSubpolynomialTerm, SumcheckSubpolynomialType, VerificationBuilder, }, }; @@ -80,7 +80,7 @@ pub fn result_evaluate_sign<'a, S: Scalar>( /// Note: We can only prove the sign bit for non-zero scalars, and we restict /// the range of non-zero scalar so that there is a unique sign representation. pub fn prover_evaluate_sign<'a, S: Scalar>( - builder: &mut ProofBuilder<'a, S>, + builder: &mut FinalRoundBuilder<'a, S>, alloc: &'a Bump, expr: &'a [S], #[cfg(test)] treat_column_of_zeros_as_negative: bool, @@ -173,7 +173,10 @@ fn verifier_const_sign_evaluate( } } -fn prove_bits_are_binary<'a, S: Scalar>(builder: &mut ProofBuilder<'a, S>, bits: &[&'a [bool]]) { +fn prove_bits_are_binary<'a, S: Scalar>( + builder: &mut FinalRoundBuilder<'a, S>, + bits: &[&'a [bool]], +) { for &seq in bits { builder.produce_intermediate_mle(seq); builder.produce_sumcheck_subpolynomial( @@ -203,7 +206,7 @@ fn verify_bits_are_binary( /// /// This function generates subpolynomial terms for sumcheck, involving the scalar expression and its bit decomposition. fn prove_bit_decomposition<'a, S: Scalar>( - builder: &mut ProofBuilder<'a, S>, + builder: &mut FinalRoundBuilder<'a, S>, alloc: &'a Bump, expr: &'a [S], bits: &[&'a [bool]], diff --git a/crates/proof-of-sql/src/sql/proof_exprs/sign_expr_test.rs b/crates/proof-of-sql/src/sql/proof_exprs/sign_expr_test.rs index 7e24b66ae..61ec2402c 100644 --- a/crates/proof-of-sql/src/sql/proof_exprs/sign_expr_test.rs +++ b/crates/proof-of-sql/src/sql/proof_exprs/sign_expr_test.rs @@ -2,7 +2,7 @@ use super::{count_sign, prover_evaluate_sign, result_evaluate_sign, verifier_eva use crate::{ base::{bit::BitDistribution, polynomial::MultilinearExtension, scalar::Curve25519Scalar}, sql::proof::{ - CountBuilder, ProofBuilder, SumcheckMleEvaluations, SumcheckRandomScalars, + CountBuilder, FinalRoundBuilder, SumcheckMleEvaluations, SumcheckRandomScalars, VerificationBuilder, }, }; @@ -16,7 +16,7 @@ fn prover_evaluation_generates_the_bit_distribution_of_a_constant_column() { let dist = BitDistribution::new::(&data); let alloc = Bump::new(); let data: Vec = data.into_iter().map(Curve25519Scalar::from).collect(); - let mut builder = ProofBuilder::new(3, 2, Vec::new()); + let mut builder = FinalRoundBuilder::new(3, 2, Vec::new()); let sign = prover_evaluate_sign(&mut builder, &alloc, &data, false); assert_eq!(sign, [false; 3]); assert_eq!(builder.bit_distributions(), [dist]); @@ -28,7 +28,7 @@ fn prover_evaluation_generates_the_bit_distribution_of_a_negative_constant_colum let dist = BitDistribution::new::(&data); let alloc = Bump::new(); let data: Vec = data.into_iter().map(Curve25519Scalar::from).collect(); - let mut builder = ProofBuilder::new(3, 2, Vec::new()); + let mut builder = FinalRoundBuilder::new(3, 2, Vec::new()); let sign = prover_evaluate_sign(&mut builder, &alloc, &data, false); assert_eq!(sign, [true; 3]); assert_eq!(builder.bit_distributions(), [dist]); diff --git a/crates/proof-of-sql/src/sql/proof_plans/dyn_proof_plan.rs b/crates/proof-of-sql/src/sql/proof_plans/dyn_proof_plan.rs index 92d99ddf9..b17349ef6 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/dyn_proof_plan.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/dyn_proof_plan.rs @@ -109,7 +109,7 @@ impl ProverEvaluate for DynProofPlan { #[tracing::instrument(name = "DynProofPlan::prover_evaluate", level = "debug", skip_all)] fn prover_evaluate<'a>( &self, - builder: &mut crate::sql::proof::ProofBuilder<'a, C::Scalar>, + builder: &mut crate::sql::proof::FinalRoundBuilder<'a, C::Scalar>, alloc: &'a bumpalo::Bump, accessor: &'a dyn crate::base::database::DataAccessor, ) -> Vec> { diff --git a/crates/proof-of-sql/src/sql/proof_plans/filter_exec.rs b/crates/proof-of-sql/src/sql/proof_plans/filter_exec.rs index dedd36db2..7cf51cffa 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/filter_exec.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/filter_exec.rs @@ -13,8 +13,8 @@ use crate::{ }, sql::{ proof::{ - CountBuilder, FirstRoundBuilder, HonestProver, ProofBuilder, ProofPlan, ProverEvaluate, - ProverHonestyMarker, SumcheckSubpolynomialType, VerificationBuilder, + CountBuilder, FinalRoundBuilder, FirstRoundBuilder, HonestProver, ProofPlan, + ProverEvaluate, ProverHonestyMarker, SumcheckSubpolynomialType, VerificationBuilder, }, proof_exprs::{AliasedDynProofExpr, DynProofExpr, ProofExpr, TableExpr}, }, @@ -183,7 +183,7 @@ impl ProverEvaluate for FilterExec { #[allow(unused_variables)] fn prover_evaluate<'a>( &self, - builder: &mut ProofBuilder<'a, C::Scalar>, + builder: &mut FinalRoundBuilder<'a, C::Scalar>, alloc: &'a Bump, accessor: &'a dyn DataAccessor, ) -> Vec> { @@ -263,7 +263,7 @@ fn verify_filter( #[allow(clippy::too_many_arguments, clippy::many_single_char_names)] pub(super) fn prove_filter<'a, S: Scalar + 'a>( - builder: &mut ProofBuilder<'a, S>, + builder: &mut FinalRoundBuilder<'a, S>, alloc: &'a Bump, alpha: S, beta: S, diff --git a/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test_dishonest_prover.rs b/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test_dishonest_prover.rs index a354c37b7..74bac6e40 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test_dishonest_prover.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test_dishonest_prover.rs @@ -8,7 +8,7 @@ use crate::{ }, sql::{ proof::{ - FirstRoundBuilder, ProofBuilder, ProverEvaluate, ProverHonestyMarker, QueryError, + FinalRoundBuilder, FirstRoundBuilder, ProverEvaluate, ProverHonestyMarker, QueryError, VerifiableQueryResult, }, // Making this explicit to ensure that we don't accidentally use the @@ -75,7 +75,7 @@ impl ProverEvaluate for DishonestFilterExec { #[allow(unused_variables)] fn prover_evaluate<'a>( &self, - builder: &mut ProofBuilder<'a, Curve25519Scalar>, + builder: &mut FinalRoundBuilder<'a, Curve25519Scalar>, alloc: &'a Bump, accessor: &'a dyn DataAccessor, ) -> Vec> { diff --git a/crates/proof-of-sql/src/sql/proof_plans/group_by_exec.rs b/crates/proof-of-sql/src/sql/proof_plans/group_by_exec.rs index a19000ad1..3b835fd20 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/group_by_exec.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/group_by_exec.rs @@ -16,7 +16,7 @@ use crate::{ }, sql::{ proof::{ - CountBuilder, FirstRoundBuilder, ProofBuilder, ProofPlan, ProverEvaluate, + CountBuilder, FinalRoundBuilder, FirstRoundBuilder, ProofPlan, ProverEvaluate, SumcheckSubpolynomialType, VerificationBuilder, }, proof_exprs::{AliasedDynProofExpr, ColumnExpr, DynProofExpr, ProofExpr, TableExpr}, @@ -259,7 +259,7 @@ impl ProverEvaluate for GroupByExec { #[allow(unused_variables)] fn prover_evaluate<'a>( &self, - builder: &mut ProofBuilder<'a, C::Scalar>, + builder: &mut FinalRoundBuilder<'a, C::Scalar>, alloc: &'a Bump, accessor: &'a dyn DataAccessor, ) -> Vec> { @@ -365,7 +365,7 @@ fn verify_group_by( reason = "alpha is guaranteed to not be zero in this context" )] pub fn prove_group_by<'a, S: Scalar>( - builder: &mut ProofBuilder<'a, S>, + builder: &mut FinalRoundBuilder<'a, S>, alloc: &'a Bump, alpha: S, beta: S, diff --git a/crates/proof-of-sql/src/sql/proof_plans/projection_exec.rs b/crates/proof-of-sql/src/sql/proof_plans/projection_exec.rs index 6d28a154b..fda409950 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/projection_exec.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/projection_exec.rs @@ -10,7 +10,7 @@ use crate::{ }, sql::{ proof::{ - CountBuilder, FirstRoundBuilder, ProofBuilder, ProofPlan, ProverEvaluate, + CountBuilder, FinalRoundBuilder, FirstRoundBuilder, ProofPlan, ProverEvaluate, VerificationBuilder, }, proof_exprs::{AliasedDynProofExpr, ProofExpr, TableExpr}, @@ -121,7 +121,7 @@ impl ProverEvaluate for ProjectionExec { #[allow(unused_variables)] fn prover_evaluate<'a>( &self, - builder: &mut ProofBuilder<'a, C::Scalar>, + builder: &mut FinalRoundBuilder<'a, C::Scalar>, alloc: &'a Bump, accessor: &'a dyn DataAccessor, ) -> Vec> { From 8e69f9d97ae90ec5bb089bc018191c3164549b5a Mon Sep 17 00:00:00 2001 From: Ian Joiner <14581281+iajoiner@users.noreply.github.com> Date: Fri, 11 Oct 2024 00:15:17 -0400 Subject: [PATCH 16/29] refactor!: split `first_round_evaluate` out of `ProofPlan::result_evaluate` - split out post result challenges into `ProofPlan::first_round_evaluate` - replace `FirstRoundBuilder` in `result_evaluate` with `input_length` --- .../src/sql/proof/first_round_builder.rs | 13 --------- .../proof-of-sql/src/sql/proof/proof_plan.rs | 5 +++- .../proof-of-sql/src/sql/proof/query_proof.rs | 17 +++++++---- .../src/sql/proof/query_proof_test.rs | 26 +++++++++-------- .../sql/proof/verifiable_query_result_test.rs | 4 +-- .../src/sql/proof_plans/dyn_proof_plan.rs | 16 ++++++++--- .../src/sql/proof_plans/filter_exec.rs | 11 ++++---- .../src/sql/proof_plans/filter_exec_test.rs | 28 ++++++++++++------- .../filter_exec_test_dishonest_prover.rs | 11 ++++---- .../src/sql/proof_plans/group_by_exec.rs | 9 +++--- .../src/sql/proof_plans/projection_exec.rs | 7 ++--- .../sql/proof_plans/projection_exec_test.rs | 20 ++++++++----- 12 files changed, 95 insertions(+), 72 deletions(-) diff --git a/crates/proof-of-sql/src/sql/proof/first_round_builder.rs b/crates/proof-of-sql/src/sql/proof/first_round_builder.rs index 3d2e1df8f..88d2c025a 100644 --- a/crates/proof-of-sql/src/sql/proof/first_round_builder.rs +++ b/crates/proof-of-sql/src/sql/proof/first_round_builder.rs @@ -1,7 +1,5 @@ /// Track the result created by a query pub struct FirstRoundBuilder { - result_table_length: usize, - /// The number of challenges used in the proof. /// Specifically, these are the challenges that the verifier sends to /// the prover after the prover sends the result, but before the prover @@ -19,21 +17,10 @@ impl FirstRoundBuilder { /// Create a new result builder for a table with the given length. For multi table queries, this will likely need to change. pub fn new() -> Self { Self { - result_table_length: 0, num_post_result_challenges: 0, } } - /// Get the length of the output table - pub fn result_table_length(&self) -> usize { - self.result_table_length - } - - /// Set the length of the output table - pub fn set_result_table_length(&mut self, result_table_length: usize) { - self.result_table_length = result_table_length; - } - /// The number of challenges used in the proof. /// Specifically, these are the challenges that the verifier sends to /// the prover after the prover sends the result, but before the prover diff --git a/crates/proof-of-sql/src/sql/proof/proof_plan.rs b/crates/proof-of-sql/src/sql/proof/proof_plan.rs index ea2e9e1b6..82a565081 100644 --- a/crates/proof-of-sql/src/sql/proof/proof_plan.rs +++ b/crates/proof-of-sql/src/sql/proof/proof_plan.rs @@ -52,11 +52,14 @@ pub trait ProverEvaluate { /// Evaluate the query and modify `FirstRoundBuilder` to track the result of the query. fn result_evaluate<'a>( &self, - builder: &mut FirstRoundBuilder, + input_length: usize, alloc: &'a Bump, accessor: &'a dyn DataAccessor, ) -> Vec>; + /// Evaluate the query and modify `FirstRoundBuilder` to form the query's proof. + fn first_round_evaluate(&self, builder: &mut FirstRoundBuilder); + /// Evaluate the query and modify `FinalRoundBuilder` to store an intermediate representation /// of the query result and track all the components needed to form the query's proof. /// diff --git a/crates/proof-of-sql/src/sql/proof/query_proof.rs b/crates/proof-of-sql/src/sql/proof/query_proof.rs index e72d698c0..27caad2b4 100644 --- a/crates/proof-of-sql/src/sql/proof/query_proof.rs +++ b/crates/proof-of-sql/src/sql/proof/query_proof.rs @@ -6,7 +6,7 @@ use crate::{ base::{ bit::BitDistribution, commitment::{Commitment, CommitmentEvaluationProof}, - database::{CommitmentAccessor, DataAccessor}, + database::{Column, CommitmentAccessor, DataAccessor}, math::log2_up, polynomial::{compute_evaluation_vector, CompositePolynomialInfo}, proof::{Keccak256Transcript, ProofError, Transcript}, @@ -53,10 +53,15 @@ impl QueryProof { assert!(num_sumcheck_variables > 0); let alloc = Bump::new(); - let mut result_builder = FirstRoundBuilder::new(); - let result_cols = expr.result_evaluate(&mut result_builder, &alloc, accessor); - let provable_result = - ProvableQueryResult::new(result_builder.result_table_length() as u64, &result_cols); + + // Evaluate query result + let result_cols = expr.result_evaluate(table_length, &alloc, accessor); + let output_length = result_cols.first().map_or(0, Column::len); + let provable_result = ProvableQueryResult::new(output_length as u64, &result_cols); + + // Prover First Round + let mut first_round_builder = FirstRoundBuilder::new(); + expr.first_round_evaluate(&mut first_round_builder); // construct a transcript for the proof let mut transcript: Keccak256Transcript = @@ -69,7 +74,7 @@ impl QueryProof { // Note: the last challenge in the vec is the first one that is consumed. let post_result_challenges = core::iter::repeat_with(|| transcript.scalar_challenge_as_be()) - .take(result_builder.num_post_result_challenges()) + .take(first_round_builder.num_post_result_challenges()) .collect(); let mut builder = diff --git a/crates/proof-of-sql/src/sql/proof/query_proof_test.rs b/crates/proof-of-sql/src/sql/proof/query_proof_test.rs index 114ff5adf..9cba400dd 100644 --- a/crates/proof-of-sql/src/sql/proof/query_proof_test.rs +++ b/crates/proof-of-sql/src/sql/proof/query_proof_test.rs @@ -43,16 +43,16 @@ impl Default for TrivialTestProofPlan { impl ProverEvaluate for TrivialTestProofPlan { fn result_evaluate<'a>( &self, - builder: &mut FirstRoundBuilder, + _input_length: usize, alloc: &'a Bump, _accessor: &'a dyn DataAccessor, ) -> Vec> { - let input_length = self.length; - let col = alloc.alloc_slice_fill_copy(input_length, self.column_fill_value); - builder.set_result_table_length(input_length); + let col = alloc.alloc_slice_fill_copy(self.length, self.column_fill_value); vec![Column::BigInt(col)] } + fn first_round_evaluate(&self, _builder: &mut FirstRoundBuilder) {} + fn prover_evaluate<'a>( &self, builder: &mut FinalRoundBuilder<'a, S>, @@ -200,15 +200,16 @@ impl Default for SquareTestProofPlan { impl ProverEvaluate for SquareTestProofPlan { fn result_evaluate<'a>( &self, - builder: &mut FirstRoundBuilder, + _table_length: usize, alloc: &'a Bump, _accessor: &'a dyn DataAccessor, ) -> Vec> { let res: &[_] = alloc.alloc_slice_copy(&self.res); - builder.set_result_table_length(2); vec![Column::BigInt(res)] } + fn first_round_evaluate(&self, _builder: &mut FirstRoundBuilder) {} + fn prover_evaluate<'a>( &self, builder: &mut FinalRoundBuilder<'a, S>, @@ -381,15 +382,16 @@ impl Default for DoubleSquareTestProofPlan { impl ProverEvaluate for DoubleSquareTestProofPlan { fn result_evaluate<'a>( &self, - builder: &mut FirstRoundBuilder, + _input_length: usize, alloc: &'a Bump, _accessor: &'a dyn DataAccessor, ) -> Vec> { let res: &[_] = alloc.alloc_slice_copy(&self.res); - builder.set_result_table_length(2); vec![Column::BigInt(res)] } + fn first_round_evaluate(&self, _builder: &mut FirstRoundBuilder) {} + fn prover_evaluate<'a>( &self, builder: &mut FinalRoundBuilder<'a, S>, @@ -592,15 +594,17 @@ struct ChallengeTestProofPlan {} impl ProverEvaluate for ChallengeTestProofPlan { fn result_evaluate<'a>( &self, - builder: &mut FirstRoundBuilder, + _input_length: usize, _alloc: &'a Bump, _accessor: &'a dyn DataAccessor, ) -> Vec> { - builder.request_post_result_challenges(2); - builder.set_result_table_length(2); vec![Column::BigInt(&[9, 25])] } + fn first_round_evaluate(&self, builder: &mut FirstRoundBuilder) { + builder.request_post_result_challenges(2); + } + fn prover_evaluate<'a>( &self, builder: &mut FinalRoundBuilder<'a, S>, diff --git a/crates/proof-of-sql/src/sql/proof/verifiable_query_result_test.rs b/crates/proof-of-sql/src/sql/proof/verifiable_query_result_test.rs index 0e0b40cab..f899af5e8 100644 --- a/crates/proof-of-sql/src/sql/proof/verifiable_query_result_test.rs +++ b/crates/proof-of-sql/src/sql/proof/verifiable_query_result_test.rs @@ -27,15 +27,15 @@ pub(super) struct EmptyTestQueryExpr { impl ProverEvaluate for EmptyTestQueryExpr { fn result_evaluate<'a>( &self, - builder: &mut FirstRoundBuilder, + _input_length: usize, alloc: &'a Bump, _accessor: &'a dyn DataAccessor, ) -> Vec> { let zeros = vec![0; self.length]; let res: &[_] = alloc.alloc_slice_copy(&zeros); - builder.set_result_table_length(self.length); vec![Column::BigInt(res); self.columns] } + fn first_round_evaluate(&self, _builder: &mut FirstRoundBuilder) {} fn prover_evaluate<'a>( &self, builder: &mut FinalRoundBuilder<'a, S>, diff --git a/crates/proof-of-sql/src/sql/proof_plans/dyn_proof_plan.rs b/crates/proof-of-sql/src/sql/proof_plans/dyn_proof_plan.rs index b17349ef6..6490effe3 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/dyn_proof_plan.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/dyn_proof_plan.rs @@ -95,14 +95,22 @@ impl ProverEvaluate for DynProofPlan { #[tracing::instrument(name = "DynProofPlan::result_evaluate", level = "debug", skip_all)] fn result_evaluate<'a>( &self, - builder: &mut crate::sql::proof::FirstRoundBuilder, + input_length: usize, alloc: &'a bumpalo::Bump, accessor: &'a dyn crate::base::database::DataAccessor, ) -> Vec> { match self { - DynProofPlan::Projection(expr) => expr.result_evaluate(builder, alloc, accessor), - DynProofPlan::GroupBy(expr) => expr.result_evaluate(builder, alloc, accessor), - DynProofPlan::Filter(expr) => expr.result_evaluate(builder, alloc, accessor), + DynProofPlan::Projection(expr) => expr.result_evaluate(input_length, alloc, accessor), + DynProofPlan::GroupBy(expr) => expr.result_evaluate(input_length, alloc, accessor), + DynProofPlan::Filter(expr) => expr.result_evaluate(input_length, alloc, accessor), + } + } + + fn first_round_evaluate(&self, builder: &mut crate::sql::proof::FirstRoundBuilder) { + match self { + DynProofPlan::Projection(expr) => expr.first_round_evaluate(builder), + DynProofPlan::GroupBy(expr) => expr.first_round_evaluate(builder), + DynProofPlan::Filter(expr) => expr.first_round_evaluate(builder), } } diff --git a/crates/proof-of-sql/src/sql/proof_plans/filter_exec.rs b/crates/proof-of-sql/src/sql/proof_plans/filter_exec.rs index 7cf51cffa..f2ab08115 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/filter_exec.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/filter_exec.rs @@ -148,11 +148,10 @@ impl ProverEvaluate for FilterExec { #[tracing::instrument(name = "FilterExec::result_evaluate", level = "debug", skip_all)] fn result_evaluate<'a>( &self, - builder: &mut FirstRoundBuilder, + input_length: usize, alloc: &'a Bump, accessor: &'a dyn DataAccessor, ) -> Vec> { - let input_length = accessor.get_length(self.table.table_ref); // 1. selection let selection_column: Column<'a, C::Scalar> = self.where_clause @@ -173,12 +172,14 @@ impl ProverEvaluate for FilterExec { .collect(); // Compute filtered_columns and indexes - let (filtered_columns, result_len) = filter_columns(alloc, &columns, selection); - builder.set_result_table_length(result_len); - builder.request_post_result_challenges(2); + let (filtered_columns, _) = filter_columns(alloc, &columns, selection); filtered_columns } + fn first_round_evaluate(&self, builder: &mut FirstRoundBuilder) { + builder.request_post_result_challenges(2); + } + #[tracing::instrument(name = "FilterExec::prover_evaluate", level = "debug", skip_all)] #[allow(unused_variables)] fn prover_evaluate<'a>( diff --git a/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test.rs b/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test.rs index 117d52f89..c6252d133 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test.rs @@ -2,8 +2,8 @@ use super::{test_utility::*, FilterExec}; use crate::{ base::{ database::{ - owned_table_utility::*, ColumnField, ColumnRef, ColumnType, LiteralValue, OwnedTable, - OwnedTableTestAccessor, TableRef, TestAccessor, + owned_table_utility::*, Column, ColumnField, ColumnRef, ColumnType, LiteralValue, + OwnedTable, OwnedTableTestAccessor, TableRef, TestAccessor, }, map::{IndexMap, IndexSet}, math::decimal::Precision, @@ -192,8 +192,10 @@ fn we_can_get_an_empty_result_from_a_basic_filter_on_an_empty_table_using_result where_clause, ); let alloc = Bump::new(); + let result_cols = expr.result_evaluate(0, &alloc, &accessor); + let output_length = result_cols.first().map_or(0, Column::len) as u64; let mut builder = FirstRoundBuilder::new(); - let result_cols = expr.result_evaluate(&mut builder, &alloc, &accessor); + expr.first_round_evaluate(&mut builder); let fields = &[ ColumnField::new("b".parse().unwrap(), ColumnType::BigInt), ColumnField::new("c".parse().unwrap(), ColumnType::Int128), @@ -204,7 +206,7 @@ fn we_can_get_an_empty_result_from_a_basic_filter_on_an_empty_table_using_result ), ]; let res: OwnedTable = - ProvableQueryResult::new(builder.result_table_length() as u64, &result_cols) + ProvableQueryResult::new(output_length as u64, &result_cols) .to_owned_table(fields) .unwrap(); let expected: OwnedTable = owned_table([ @@ -237,8 +239,10 @@ fn we_can_get_an_empty_result_from_a_basic_filter_using_result_evaluate() { where_clause, ); let alloc = Bump::new(); + let result_cols = expr.result_evaluate(5, &alloc, &accessor); + let output_length = result_cols.first().map_or(0, Column::len) as u64; let mut builder = FirstRoundBuilder::new(); - let result_cols = expr.result_evaluate(&mut builder, &alloc, &accessor); + expr.first_round_evaluate(&mut builder); let fields = &[ ColumnField::new("b".parse().unwrap(), ColumnType::BigInt), ColumnField::new("c".parse().unwrap(), ColumnType::Int128), @@ -249,7 +253,7 @@ fn we_can_get_an_empty_result_from_a_basic_filter_using_result_evaluate() { ), ]; let res: OwnedTable = - ProvableQueryResult::new(builder.result_table_length() as u64, &result_cols) + ProvableQueryResult::new(output_length as u64, &result_cols) .to_owned_table(fields) .unwrap(); let expected: OwnedTable = owned_table([ @@ -278,11 +282,13 @@ fn we_can_get_no_columns_from_a_basic_filter_with_no_selected_columns_using_resu equal(column(t, "a", &accessor), const_int128(5)); let expr = filter(cols_expr_plan(t, &[], &accessor), tab(t), where_clause); let alloc = Bump::new(); + let result_cols = expr.result_evaluate(5, &alloc, &accessor); + let output_length = result_cols.first().map_or(0, Column::len) as u64; let mut builder = FirstRoundBuilder::new(); - let result_cols = expr.result_evaluate(&mut builder, &alloc, &accessor); + expr.first_round_evaluate(&mut builder); let fields = &[]; let res: OwnedTable = - ProvableQueryResult::new(builder.result_table_length() as u64, &result_cols) + ProvableQueryResult::new(output_length as u64, &result_cols) .to_owned_table(fields) .unwrap(); let expected = OwnedTable::try_new(IndexMap::default()).unwrap(); @@ -309,8 +315,10 @@ fn we_can_get_the_correct_result_from_a_basic_filter_using_result_evaluate() { where_clause, ); let alloc = Bump::new(); + let result_cols = expr.result_evaluate(5, &alloc, &accessor); + let output_length = result_cols.first().map_or(0, Column::len) as u64; let mut builder = FirstRoundBuilder::new(); - let result_cols = expr.result_evaluate(&mut builder, &alloc, &accessor); + expr.first_round_evaluate(&mut builder); let fields = &[ ColumnField::new("b".parse().unwrap(), ColumnType::BigInt), ColumnField::new("c".parse().unwrap(), ColumnType::Int128), @@ -321,7 +329,7 @@ fn we_can_get_the_correct_result_from_a_basic_filter_using_result_evaluate() { ), ]; let res: OwnedTable = - ProvableQueryResult::new(builder.result_table_length() as u64, &result_cols) + ProvableQueryResult::new(output_length as u64, &result_cols) .to_owned_table(fields) .unwrap(); let expected: OwnedTable = owned_table([ diff --git a/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test_dishonest_prover.rs b/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test_dishonest_prover.rs index 74bac6e40..55930d36a 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test_dishonest_prover.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test_dishonest_prover.rs @@ -37,11 +37,10 @@ impl ProverEvaluate for DishonestFilterExec { )] fn result_evaluate<'a>( &self, - builder: &mut FirstRoundBuilder, + input_length: usize, alloc: &'a Bump, accessor: &'a dyn DataAccessor, ) -> Vec> { - let input_length = accessor.get_length(self.table.table_ref); // 1. selection let selection_column: Column<'a, Curve25519Scalar> = self.where_clause @@ -60,13 +59,15 @@ impl ProverEvaluate for DishonestFilterExec { }) .collect(); // Compute filtered_columns - let (filtered_columns, result_len) = filter_columns(alloc, &columns, selection); - builder.set_result_table_length(result_len); + let (filtered_columns, _) = filter_columns(alloc, &columns, selection); let filtered_columns = tamper_column(alloc, filtered_columns); - builder.request_post_result_challenges(2); filtered_columns } + fn first_round_evaluate(&self, builder: &mut FirstRoundBuilder) { + builder.request_post_result_challenges(2); + } + #[tracing::instrument( name = "DishonestFilterExec::prover_evaluate", level = "debug", diff --git a/crates/proof-of-sql/src/sql/proof_plans/group_by_exec.rs b/crates/proof-of-sql/src/sql/proof_plans/group_by_exec.rs index 3b835fd20..f50d1ede5 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/group_by_exec.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/group_by_exec.rs @@ -208,11 +208,10 @@ impl ProverEvaluate for GroupByExec { #[tracing::instrument(name = "GroupByExec::result_evaluate", level = "debug", skip_all)] fn result_evaluate<'a>( &self, - builder: &mut FirstRoundBuilder, + input_length: usize, alloc: &'a Bump, accessor: &'a dyn DataAccessor, ) -> Vec> { - let input_length = accessor.get_length(self.table.table_ref); // 1. selection let selection_column: Column<'a, C::Scalar> = self.where_clause @@ -246,8 +245,6 @@ impl ProverEvaluate for GroupByExec { } = aggregate_columns(alloc, &group_by_columns, &sum_columns, &[], &[], selection) .expect("columns should be aggregatable"); let sum_result_columns_iter = sum_result_columns.iter().map(|col| Column::Scalar(col)); - builder.set_result_table_length(count_column.len()); - builder.request_post_result_challenges(2); group_by_result_columns .into_iter() .chain(sum_result_columns_iter) @@ -255,6 +252,10 @@ impl ProverEvaluate for GroupByExec { .collect::>() } + fn first_round_evaluate(&self, builder: &mut FirstRoundBuilder) { + builder.request_post_result_challenges(2); + } + #[tracing::instrument(name = "GroupByExec::prover_evaluate", level = "debug", skip_all)] #[allow(unused_variables)] fn prover_evaluate<'a>( diff --git a/crates/proof-of-sql/src/sql/proof_plans/projection_exec.rs b/crates/proof-of-sql/src/sql/proof_plans/projection_exec.rs index fda409950..1a888d6e2 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/projection_exec.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/projection_exec.rs @@ -98,11 +98,10 @@ impl ProverEvaluate for ProjectionExec { #[tracing::instrument(name = "ProjectionExec::result_evaluate", level = "debug", skip_all)] fn result_evaluate<'a>( &self, - builder: &mut FirstRoundBuilder, + input_length: usize, alloc: &'a Bump, accessor: &'a dyn DataAccessor, ) -> Vec> { - let input_length = accessor.get_length(self.table.table_ref); let columns: Vec<_> = self .aliased_results .iter() @@ -112,11 +111,11 @@ impl ProverEvaluate for ProjectionExec { .result_evaluate(input_length, alloc, accessor) }) .collect(); - // For projection, the result table length is the same as the input table length - builder.set_result_table_length(input_length); columns } + fn first_round_evaluate(&self, _builder: &mut FirstRoundBuilder) {} + #[tracing::instrument(name = "ProjectionExec::prover_evaluate", level = "debug", skip_all)] #[allow(unused_variables)] fn prover_evaluate<'a>( diff --git a/crates/proof-of-sql/src/sql/proof_plans/projection_exec_test.rs b/crates/proof-of-sql/src/sql/proof_plans/projection_exec_test.rs index 7e649a12d..3addcfb17 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/projection_exec_test.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/projection_exec_test.rs @@ -2,7 +2,7 @@ use super::{test_utility::*, DynProofPlan, ProjectionExec}; use crate::{ base::{ database::{ - owned_table_utility::*, ColumnField, ColumnRef, ColumnType, OwnedTable, + owned_table_utility::*, Column, ColumnField, ColumnRef, ColumnType, OwnedTable, OwnedTableTestAccessor, TableRef, TestAccessor, }, map::{IndexMap, IndexSet}, @@ -165,8 +165,10 @@ fn we_can_get_an_empty_result_from_a_basic_projection_on_an_empty_table_using_re let expr: DynProofPlan = projection(cols_expr_plan(t, &["b", "c", "d", "e"], &accessor), tab(t)); let alloc = Bump::new(); + let result_cols = expr.result_evaluate(0, &alloc, &accessor); + let output_length = result_cols.first().map_or(0, Column::len) as u64; let mut builder = FirstRoundBuilder::new(); - let result_cols = expr.result_evaluate(&mut builder, &alloc, &accessor); + expr.first_round_evaluate(&mut builder); let fields = &[ ColumnField::new("b".parse().unwrap(), ColumnType::BigInt), ColumnField::new("c".parse().unwrap(), ColumnType::Int128), @@ -177,7 +179,7 @@ fn we_can_get_an_empty_result_from_a_basic_projection_on_an_empty_table_using_re ), ]; let res: OwnedTable = - ProvableQueryResult::new(builder.result_table_length() as u64, &result_cols) + ProvableQueryResult::new(output_length as u64, &result_cols) .to_owned_table(fields) .unwrap(); let expected: OwnedTable = owned_table([ @@ -204,11 +206,13 @@ fn we_can_get_no_columns_from_a_basic_projection_with_no_selected_columns_using_ accessor.add_table(t, data, 0); let expr: DynProofPlan = projection(cols_expr_plan(t, &[], &accessor), tab(t)); let alloc = Bump::new(); + let result_cols = expr.result_evaluate(5, &alloc, &accessor); + let output_length = result_cols.first().map_or(0, Column::len) as u64; let mut builder = FirstRoundBuilder::new(); - let result_cols = expr.result_evaluate(&mut builder, &alloc, &accessor); + expr.first_round_evaluate(&mut builder); let fields = &[]; let res: OwnedTable = - ProvableQueryResult::new(builder.result_table_length() as u64, &result_cols) + ProvableQueryResult::new(output_length as u64, &result_cols) .to_owned_table(fields) .unwrap(); let expected = OwnedTable::try_new(IndexMap::default()).unwrap(); @@ -240,8 +244,10 @@ fn we_can_get_the_correct_result_from_a_basic_projection_using_result_evaluate() tab(t), ); let alloc = Bump::new(); + let result_cols = expr.result_evaluate(5, &alloc, &accessor); + let output_length = result_cols.first().map_or(0, Column::len) as u64; let mut builder = FirstRoundBuilder::new(); - let result_cols = expr.result_evaluate(&mut builder, &alloc, &accessor); + expr.first_round_evaluate(&mut builder); let fields = &[ ColumnField::new("b".parse().unwrap(), ColumnType::BigInt), ColumnField::new("prod".parse().unwrap(), ColumnType::Int128), @@ -252,7 +258,7 @@ fn we_can_get_the_correct_result_from_a_basic_projection_using_result_evaluate() ), ]; let res: OwnedTable = - ProvableQueryResult::new(builder.result_table_length() as u64, &result_cols) + ProvableQueryResult::new(output_length as u64, &result_cols) .to_owned_table(fields) .unwrap(); let expected: OwnedTable = owned_table([ From 42f3069a1115c4302ba8b43935297165e3aa73ef Mon Sep 17 00:00:00 2001 From: Ian Joiner <14581281+iajoiner@users.noreply.github.com> Date: Fri, 11 Oct 2024 00:31:01 -0400 Subject: [PATCH 17/29] refactor!: rename `ProverEvaluate::prover_evaluate` to `final_round_evaluate` --- crates/proof-of-sql/src/sql/proof/proof_plan.rs | 2 +- crates/proof-of-sql/src/sql/proof/query_proof.rs | 2 +- crates/proof-of-sql/src/sql/proof/query_proof_test.rs | 8 ++++---- .../src/sql/proof/verifiable_query_result_test.rs | 2 +- .../proof-of-sql/src/sql/proof_plans/dyn_proof_plan.rs | 10 +++++----- crates/proof-of-sql/src/sql/proof_plans/filter_exec.rs | 4 ++-- .../proof_plans/filter_exec_test_dishonest_prover.rs | 4 ++-- .../proof-of-sql/src/sql/proof_plans/group_by_exec.rs | 4 ++-- .../src/sql/proof_plans/projection_exec.rs | 8 ++++++-- 9 files changed, 24 insertions(+), 20 deletions(-) diff --git a/crates/proof-of-sql/src/sql/proof/proof_plan.rs b/crates/proof-of-sql/src/sql/proof/proof_plan.rs index 82a565081..430485308 100644 --- a/crates/proof-of-sql/src/sql/proof/proof_plan.rs +++ b/crates/proof-of-sql/src/sql/proof/proof_plan.rs @@ -66,7 +66,7 @@ pub trait ProverEvaluate { /// Intermediate values that are needed to form the proof are allocated into the arena /// allocator alloc. These intermediate values will persist through proof creation and /// will be bulk deallocated once the proof is formed. - fn prover_evaluate<'a>( + fn final_round_evaluate<'a>( &self, builder: &mut FinalRoundBuilder<'a, S>, alloc: &'a Bump, diff --git a/crates/proof-of-sql/src/sql/proof/query_proof.rs b/crates/proof-of-sql/src/sql/proof/query_proof.rs index 27caad2b4..62f3ada00 100644 --- a/crates/proof-of-sql/src/sql/proof/query_proof.rs +++ b/crates/proof-of-sql/src/sql/proof/query_proof.rs @@ -79,7 +79,7 @@ impl QueryProof { let mut builder = FinalRoundBuilder::new(table_length, num_sumcheck_variables, post_result_challenges); - expr.prover_evaluate(&mut builder, &alloc, accessor); + expr.final_round_evaluate(&mut builder, &alloc, accessor); let num_sumcheck_variables = builder.num_sumcheck_variables(); let table_length = builder.table_length(); diff --git a/crates/proof-of-sql/src/sql/proof/query_proof_test.rs b/crates/proof-of-sql/src/sql/proof/query_proof_test.rs index 9cba400dd..684cde01e 100644 --- a/crates/proof-of-sql/src/sql/proof/query_proof_test.rs +++ b/crates/proof-of-sql/src/sql/proof/query_proof_test.rs @@ -53,7 +53,7 @@ impl ProverEvaluate for TrivialTestProofPlan { fn first_round_evaluate(&self, _builder: &mut FirstRoundBuilder) {} - fn prover_evaluate<'a>( + fn final_round_evaluate<'a>( &self, builder: &mut FinalRoundBuilder<'a, S>, alloc: &'a Bump, @@ -210,7 +210,7 @@ impl ProverEvaluate for SquareTestProofPlan { fn first_round_evaluate(&self, _builder: &mut FirstRoundBuilder) {} - fn prover_evaluate<'a>( + fn final_round_evaluate<'a>( &self, builder: &mut FinalRoundBuilder<'a, S>, alloc: &'a Bump, @@ -392,7 +392,7 @@ impl ProverEvaluate for DoubleSquareTestProofPlan { fn first_round_evaluate(&self, _builder: &mut FirstRoundBuilder) {} - fn prover_evaluate<'a>( + fn final_round_evaluate<'a>( &self, builder: &mut FinalRoundBuilder<'a, S>, alloc: &'a Bump, @@ -605,7 +605,7 @@ impl ProverEvaluate for ChallengeTestProofPlan { builder.request_post_result_challenges(2); } - fn prover_evaluate<'a>( + fn final_round_evaluate<'a>( &self, builder: &mut FinalRoundBuilder<'a, S>, alloc: &'a Bump, diff --git a/crates/proof-of-sql/src/sql/proof/verifiable_query_result_test.rs b/crates/proof-of-sql/src/sql/proof/verifiable_query_result_test.rs index f899af5e8..5d299e408 100644 --- a/crates/proof-of-sql/src/sql/proof/verifiable_query_result_test.rs +++ b/crates/proof-of-sql/src/sql/proof/verifiable_query_result_test.rs @@ -36,7 +36,7 @@ impl ProverEvaluate for EmptyTestQueryExpr { vec![Column::BigInt(res); self.columns] } fn first_round_evaluate(&self, _builder: &mut FirstRoundBuilder) {} - fn prover_evaluate<'a>( + fn final_round_evaluate<'a>( &self, builder: &mut FinalRoundBuilder<'a, S>, alloc: &'a Bump, diff --git a/crates/proof-of-sql/src/sql/proof_plans/dyn_proof_plan.rs b/crates/proof-of-sql/src/sql/proof_plans/dyn_proof_plan.rs index 6490effe3..c524a2c76 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/dyn_proof_plan.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/dyn_proof_plan.rs @@ -114,17 +114,17 @@ impl ProverEvaluate for DynProofPlan { } } - #[tracing::instrument(name = "DynProofPlan::prover_evaluate", level = "debug", skip_all)] - fn prover_evaluate<'a>( + #[tracing::instrument(name = "DynProofPlan::final_round_evaluate", level = "debug", skip_all)] + fn final_round_evaluate<'a>( &self, builder: &mut crate::sql::proof::FinalRoundBuilder<'a, C::Scalar>, alloc: &'a bumpalo::Bump, accessor: &'a dyn crate::base::database::DataAccessor, ) -> Vec> { match self { - DynProofPlan::Projection(expr) => expr.prover_evaluate(builder, alloc, accessor), - DynProofPlan::GroupBy(expr) => expr.prover_evaluate(builder, alloc, accessor), - DynProofPlan::Filter(expr) => expr.prover_evaluate(builder, alloc, accessor), + DynProofPlan::Projection(expr) => expr.final_round_evaluate(builder, alloc, accessor), + DynProofPlan::GroupBy(expr) => expr.final_round_evaluate(builder, alloc, accessor), + DynProofPlan::Filter(expr) => expr.final_round_evaluate(builder, alloc, accessor), } } } diff --git a/crates/proof-of-sql/src/sql/proof_plans/filter_exec.rs b/crates/proof-of-sql/src/sql/proof_plans/filter_exec.rs index f2ab08115..28895df61 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/filter_exec.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/filter_exec.rs @@ -180,9 +180,9 @@ impl ProverEvaluate for FilterExec { builder.request_post_result_challenges(2); } - #[tracing::instrument(name = "FilterExec::prover_evaluate", level = "debug", skip_all)] + #[tracing::instrument(name = "FilterExec::final_round_evaluate", level = "debug", skip_all)] #[allow(unused_variables)] - fn prover_evaluate<'a>( + fn final_round_evaluate<'a>( &self, builder: &mut FinalRoundBuilder<'a, C::Scalar>, alloc: &'a Bump, diff --git a/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test_dishonest_prover.rs b/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test_dishonest_prover.rs index 55930d36a..4d32bc735 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test_dishonest_prover.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/filter_exec_test_dishonest_prover.rs @@ -69,12 +69,12 @@ impl ProverEvaluate for DishonestFilterExec { } #[tracing::instrument( - name = "DishonestFilterExec::prover_evaluate", + name = "DishonestFilterExec::final_round_evaluate", level = "debug", skip_all )] #[allow(unused_variables)] - fn prover_evaluate<'a>( + fn final_round_evaluate<'a>( &self, builder: &mut FinalRoundBuilder<'a, Curve25519Scalar>, alloc: &'a Bump, diff --git a/crates/proof-of-sql/src/sql/proof_plans/group_by_exec.rs b/crates/proof-of-sql/src/sql/proof_plans/group_by_exec.rs index f50d1ede5..069aa08d1 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/group_by_exec.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/group_by_exec.rs @@ -256,9 +256,9 @@ impl ProverEvaluate for GroupByExec { builder.request_post_result_challenges(2); } - #[tracing::instrument(name = "GroupByExec::prover_evaluate", level = "debug", skip_all)] + #[tracing::instrument(name = "GroupByExec::final_round_evaluate", level = "debug", skip_all)] #[allow(unused_variables)] - fn prover_evaluate<'a>( + fn final_round_evaluate<'a>( &self, builder: &mut FinalRoundBuilder<'a, C::Scalar>, alloc: &'a Bump, diff --git a/crates/proof-of-sql/src/sql/proof_plans/projection_exec.rs b/crates/proof-of-sql/src/sql/proof_plans/projection_exec.rs index 1a888d6e2..fb66bff00 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/projection_exec.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/projection_exec.rs @@ -116,9 +116,13 @@ impl ProverEvaluate for ProjectionExec { fn first_round_evaluate(&self, _builder: &mut FirstRoundBuilder) {} - #[tracing::instrument(name = "ProjectionExec::prover_evaluate", level = "debug", skip_all)] + #[tracing::instrument( + name = "ProjectionExec::final_round_evaluate", + level = "debug", + skip_all + )] #[allow(unused_variables)] - fn prover_evaluate<'a>( + fn final_round_evaluate<'a>( &self, builder: &mut FinalRoundBuilder<'a, C::Scalar>, alloc: &'a Bump, From 98452c4f1c91c50c30db0491a85f953e605aaaa5 Mon Sep 17 00:00:00 2001 From: Vamshi Maskuri <117595548+varshith257@users.noreply.github.com> Date: Fri, 11 Oct 2024 12:16:10 +0530 Subject: [PATCH 18/29] ci: add code coverage workflow with llvm-cov (#246) # Rationale for this change Coverage reports help a lot with code quality and review. # What changes are included in this PR? * Added llvm-cov workflow. # Are these changes tested? Yes. --------- Co-authored-by: Jay White --- .github/workflows/lint-and-test.yml | 35 +++++++++++++++++++++++++++++ CONTRIBUTING.md | 8 ++++++- 2 files changed, 42 insertions(+), 1 deletion(-) diff --git a/.github/workflows/lint-and-test.yml b/.github/workflows/lint-and-test.yml index 3cb23d904..d3a21d6ce 100644 --- a/.github/workflows/lint-and-test.yml +++ b/.github/workflows/lint-and-test.yml @@ -136,6 +136,41 @@ jobs: - name: Run clippy::pedantic for proof-of-sql-parser run: cargo clippy --lib -p proof-of-sql-parser -- -D clippy::pedantic + coverage: + name: Code Coverage + runs-on: ubuntu-latest + steps: + - name: Checkout sources + uses: actions/checkout@v3 + - name: Install Dependencies + run: sudo apt-get update && sudo apt-get install -y clang lld + - uses: taiki-e/install-action@cargo-llvm-cov + - name: Clean Previous Coverage Artifacts + run: cargo llvm-cov clean --workspace + - name: Run Tests to Generate Coverage Data (All Features) + run: cargo llvm-cov --no-report --all-features + #- name: Run Tests to Generate Coverage Data (Rayon Only) + # run: cargo llvm-cov --no-report --no-default-features --features="rayon" + #- name: Run Tests to Generate Coverage Data (Blitzar Only) + # run: cargo llvm-cov --no-report --no-default-features --features="blitzar" + #- name: Run Tests to Generate Coverage Data (std only) + # run: cargo llvm-cov --no-report --no-default-features --features="std" + - name: Generate Final LCOV Report (Merged Coverage) + run: cargo llvm-cov report --summary-only --fail-under-lines 90 + # Future CodeCov Integration + # To integrate with CodeCov in the future,, follow these steps: + # 1. Add the CodeCov token to the repository secrets. + # 2. Use the CodeCov Action to upload the coverage report. For more detailed info refer to [CodeCov Documentation](https://docs.codecov.com/docs). + # + # - name: Generate Final LCOV Report (Merged Coverage) + # run: cargo llvm-cov report --lcov --output-path lcov.info --fail-under-lines 95 + # - name: Upload Coverage to Codecov + # uses: codecov/codecov-action@v2 + # with: + # token: ${{ secrets.CODECOV_TOKEN }} + # files: lcov.info + # fail_ci_if_error: true + # Run cargo fmt --all -- --config imports_granularity=Crate,group_imports=One --check format: name: Format diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 8e80a76e6..add16e0da 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -14,6 +14,7 @@ The following guideline is heavily based on the [Angular Project Guideline](http - [Submitting a Pull Request (PR)](#submit-pr) - [Addressing review feedback](#address-review) - [Updating the commit message](#updating-commit-message) + - [Running Code Coverage Locally](#coverage-locally) - [After your pull request is merged](#after-pr-merged) - [Coding Rules](#rules) - [Commit Message Guidelines](#commit-guidelines) @@ -246,6 +247,12 @@ In order to update the commit message of the last commit on your branch: NOTE: If you need to update the commit message of an earlier commit, you can use `git rebase` in interactive mode. See the [git docs](https://git-scm.com/docs/git-rebase#_interactive_mode) for more details. +### Running Code Coverage Locally +To run code coverage locally, install `cargo-llvm-cov` by following the instructions here: [cargo-llvm-cov Installation](https://github.com/taiki-e/cargo-llvm-cov). + +For users of VSCode, you can display coverage reports directly in the IDE by following these instructions: [Display Coverage in VSCode](https://github.com/taiki-e/cargo-llvm-cov?tab=readme-ov-file#display-coverage-in-vs-code). + +You can exclude specific functions from coverage by adding an attribute to your code: [Exclude Functions from Coverage](https://github.com/taiki-e/cargo-llvm-cov?tab=readme-ov-file#exclude-function-from-coverage). ### After your pull request is merged @@ -275,7 +282,6 @@ After your pull request is merged, you can safely delete your branch and pull th git pull --ff upstream main ``` - ## Coding Rules To ensure consistency throughout the source code, keep these rules in mind as you are working: From 40f1f77ee64597ca2ed23f107b1c20f89601d034 Mon Sep 17 00:00:00 2001 From: Mehul Mathur Date: Fri, 11 Oct 2024 18:07:59 +0530 Subject: [PATCH 19/29] chore: resolved some `clippy::pedantic` lints (#260) # Rationale for this change We have cargo clippy running in our CI in order to enforce code quality. In order to increase our standards, we should enable the clippy::pedantic lint group. # What changes are included in this PR? Resolved the following lint warnings `bool_to_int_with_if` `ptr_as_ptr` `match_wildcard_for_single_variants` `match_bool` `manual_assert` `trivially_copy_pass_by_ref` # Are these changes tested? Yes. --- Cargo.toml | 6 ++++++ .../commitment_evaluation_proof_test.rs | 2 +- .../base/polynomial/multilinear_extension.rs | 2 +- .../src/base/scalar/mont_scalar.rs | 16 ++++++++-------- .../dory/dynamic_dory_standard_basis_helper.rs | 9 ++++++--- .../proof_primitive/sumcheck/prover_round.rs | 15 +++++++++------ .../proof_primitive/sumcheck/prover_state.rs | 7 ++++--- .../src/sql/parse/dyn_proof_expr_builder.rs | 1 + .../src/sql/parse/query_context_builder.rs | 18 +++++++++--------- .../verifiable_query_result_test_utility.rs | 8 +++++--- 10 files changed, 50 insertions(+), 34 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 64113f778..feee15c25 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -91,3 +91,9 @@ struct_field_names = "deny" unicode_not_nfc = "deny" manual_string_new = "deny" large_types_passed_by_value = "deny" +bool_to_int_with_if = "deny" +ptr_as_ptr = "deny" +match_wildcard_for_single_variants = "deny" +match_bool = "deny" +manual_assert = "deny" +trivially_copy_pass_by_ref = "deny" \ No newline at end of file diff --git a/crates/proof-of-sql/src/base/commitment/commitment_evaluation_proof_test.rs b/crates/proof-of-sql/src/base/commitment/commitment_evaluation_proof_test.rs index 89e4c2a15..fe151feca 100644 --- a/crates/proof-of-sql/src/base/commitment/commitment_evaluation_proof_test.rs +++ b/crates/proof-of-sql/src/base/commitment/commitment_evaluation_proof_test.rs @@ -123,7 +123,7 @@ pub fn test_random_commitment_evaluation_proof( assert!(r.is_err(), "verification improperly succeeded"); // Invalid offset - let wrong_offset = if offset == 0 { 1 } else { 0 }; + let wrong_offset = u64::from(offset == 0); let mut transcript = Transcript::new(b"evaluation_proof"); let r = proof.verify_proof( &mut transcript, diff --git a/crates/proof-of-sql/src/base/polynomial/multilinear_extension.rs b/crates/proof-of-sql/src/base/polynomial/multilinear_extension.rs index 1668bac98..99720707c 100644 --- a/crates/proof-of-sql/src/base/polynomial/multilinear_extension.rs +++ b/crates/proof-of-sql/src/base/polynomial/multilinear_extension.rs @@ -57,7 +57,7 @@ where } fn id(&self) -> *const c_void { - self.as_ptr() as *const c_void + self.as_ptr().cast::() } } diff --git a/crates/proof-of-sql/src/base/scalar/mont_scalar.rs b/crates/proof-of-sql/src/base/scalar/mont_scalar.rs index 0a4ae8f6d..6c646b9db 100644 --- a/crates/proof-of-sql/src/base/scalar/mont_scalar.rs +++ b/crates/proof-of-sql/src/base/scalar/mont_scalar.rs @@ -363,15 +363,15 @@ impl> From<&MontScalar> for [u64; 4] { impl> Display for MontScalar { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - let sign = match f.sign_plus() { - true => { - let n = -self; - match self > &n { - true => Some(Some(n)), - false => Some(None), - } + let sign = if f.sign_plus() { + let n = -self; + if self > &n { + Some(Some(n)) + } else { + Some(None) } - false => None, + } else { + None }; match (f.alternate(), sign) { (false, None) => { diff --git a/crates/proof-of-sql/src/proof_primitive/dory/dynamic_dory_standard_basis_helper.rs b/crates/proof-of-sql/src/proof_primitive/dory/dynamic_dory_standard_basis_helper.rs index 6e0cc4ed7..3b9e0e0fe 100644 --- a/crates/proof-of-sql/src/proof_primitive/dory/dynamic_dory_standard_basis_helper.rs +++ b/crates/proof-of-sql/src/proof_primitive/dory/dynamic_dory_standard_basis_helper.rs @@ -290,9 +290,12 @@ pub(super) mod tests { k => Some(k), }) .enumerate() - .filter_map(|(i, b)| match b % 2 == 0 { - true => None, - false => Some(point.get(i).copied().unwrap_or(F::ZERO)), + .filter_map(|(i, b)| { + if b % 2 == 0 { + None + } else { + Some(point.get(i).copied().unwrap_or(F::ZERO)) + } }) .product() } diff --git a/crates/proof-of-sql/src/proof_primitive/sumcheck/prover_round.rs b/crates/proof-of-sql/src/proof_primitive/sumcheck/prover_round.rs index 6f5f4310c..9ceb42b00 100644 --- a/crates/proof-of-sql/src/proof_primitive/sumcheck/prover_round.rs +++ b/crates/proof-of-sql/src/proof_primitive/sumcheck/prover_round.rs @@ -14,9 +14,11 @@ use rayon::prelude::*; #[tracing::instrument(level = "debug", skip_all)] pub fn prove_round(prover_state: &mut ProverState, r_maybe: &Option) -> Vec { if let Some(r) = r_maybe { - if prover_state.round == 0 { - panic!("first round should be prover first."); - } + assert!( + prover_state.round != 0, + "first round should be prover first." + ); + prover_state.randomness.push(*r); // fix argument @@ -38,9 +40,10 @@ pub fn prove_round(prover_state: &mut ProverState, r_maybe: &Optio prover_state.round += 1; - if prover_state.round > prover_state.num_vars { - panic!("Prover is not active"); - } + assert!( + prover_state.round <= prover_state.num_vars, + "Prover is not active" + ); let degree = prover_state.max_multiplicands; // the degree of univariate polynomial sent by prover at this round let round_length = 1usize << (prover_state.num_vars - prover_state.round); diff --git a/crates/proof-of-sql/src/proof_primitive/sumcheck/prover_state.rs b/crates/proof-of-sql/src/proof_primitive/sumcheck/prover_state.rs index 2f9da11c0..44138378c 100644 --- a/crates/proof-of-sql/src/proof_primitive/sumcheck/prover_state.rs +++ b/crates/proof-of-sql/src/proof_primitive/sumcheck/prover_state.rs @@ -23,9 +23,10 @@ pub struct ProverState { impl ProverState { #[tracing::instrument(name = "ProverState::create", level = "debug", skip_all)] pub fn create(polynomial: &CompositePolynomial) -> Self { - if polynomial.num_variables == 0 { - panic!("Attempt to prove a constant.") - } + assert!( + polynomial.num_variables != 0, + "Attempt to prove a constant." + ); // create a deep copy of all unique MLExtensions let flattened_ml_extensions = polynomial diff --git a/crates/proof-of-sql/src/sql/parse/dyn_proof_expr_builder.rs b/crates/proof-of-sql/src/sql/parse/dyn_proof_expr_builder.rs index 0a4b44c81..fc2c90a02 100644 --- a/crates/proof-of-sql/src/sql/parse/dyn_proof_expr_builder.rs +++ b/crates/proof-of-sql/src/sql/parse/dyn_proof_expr_builder.rs @@ -49,6 +49,7 @@ impl<'a> DynProofExprBuilder<'a> { } } +#[allow(clippy::match_wildcard_for_single_variants)] // Private interface impl DynProofExprBuilder<'_> { fn visit_expr( diff --git a/crates/proof-of-sql/src/sql/parse/query_context_builder.rs b/crates/proof-of-sql/src/sql/parse/query_context_builder.rs index 2c91ce6be..99ff7b094 100644 --- a/crates/proof-of-sql/src/sql/parse/query_context_builder.rs +++ b/crates/proof-of-sql/src/sql/parse/query_context_builder.rs @@ -133,9 +133,9 @@ impl<'a> QueryContextBuilder<'a> { Expression::Wildcard => Ok(ColumnType::BigInt), // Since COUNT(*) = COUNT(1) Expression::Literal(literal) => self.visit_literal(literal), Expression::Column(_) => self.visit_column_expr(expr), - Expression::Unary { op, expr } => self.visit_unary_expr(op, expr), - Expression::Binary { op, left, right } => self.visit_binary_expr(op, left, right), - Expression::Aggregation { op, expr } => self.visit_agg_expr(op, expr), + Expression::Unary { op, expr } => self.visit_unary_expr(*op, expr), + Expression::Binary { op, left, right } => self.visit_binary_expr(*op, left, right), + Expression::Aggregation { op, expr } => self.visit_agg_expr(*op, expr), } } @@ -152,13 +152,13 @@ impl<'a> QueryContextBuilder<'a> { fn visit_binary_expr( &mut self, - op: &BinaryOperator, + op: BinaryOperator, left: &Expression, right: &Expression, ) -> ConversionResult { let left_dtype = self.visit_expr(left)?; let right_dtype = self.visit_expr(right)?; - check_dtypes(left_dtype, right_dtype, *op)?; + check_dtypes(left_dtype, right_dtype, op)?; match op { BinaryOperator::And | BinaryOperator::Or @@ -174,7 +174,7 @@ impl<'a> QueryContextBuilder<'a> { fn visit_unary_expr( &mut self, - op: &UnaryOperator, + op: UnaryOperator, expr: &Expression, ) -> ConversionResult { match op { @@ -193,7 +193,7 @@ impl<'a> QueryContextBuilder<'a> { fn visit_agg_expr( &mut self, - op: &AggregationOperator, + op: AggregationOperator, expr: &Expression, ) -> ConversionResult { self.context.set_in_agg_scope(true)?; @@ -201,7 +201,7 @@ impl<'a> QueryContextBuilder<'a> { let expr_dtype = self.visit_expr(expr)?; // We only support sum/max/min aggregations on numeric columns. - if op != &AggregationOperator::Count && expr_dtype == ColumnType::VarChar { + if op != AggregationOperator::Count && expr_dtype == ColumnType::VarChar { return Err(ConversionError::non_numeric_expr_in_agg( expr_dtype.to_string(), op.to_string(), @@ -211,7 +211,7 @@ impl<'a> QueryContextBuilder<'a> { self.context.set_in_agg_scope(false)?; // Count aggregation always results in an integer type - if op == &AggregationOperator::Count { + if op == AggregationOperator::Count { Ok(ColumnType::BigInt) } else { Ok(expr_dtype) diff --git a/crates/proof-of-sql/src/sql/proof/verifiable_query_result_test_utility.rs b/crates/proof-of-sql/src/sql/proof/verifiable_query_result_test_utility.rs index 3616af825..d87431909 100644 --- a/crates/proof-of-sql/src/sql/proof/verifiable_query_result_test_utility.rs +++ b/crates/proof-of-sql/src/sql/proof/verifiable_query_result_test_utility.rs @@ -31,9 +31,11 @@ pub fn exercise_verification( table_ref: TableRef, ) { let verification_result = res.verify(expr, accessor, &()); - if verification_result.is_err() { - panic!("Verification failed: {:?}", verification_result.err()); - } + assert!( + verification_result.is_ok(), + "Verification failed: {:?}", + verification_result.err() + ); // try changing the result tamper_result(res, expr, accessor); From d79cbbf9036891f3c0aded15f6928a91fc0bca38 Mon Sep 17 00:00:00 2001 From: Mehul Mathur Date: Fri, 11 Oct 2024 18:28:19 +0530 Subject: [PATCH 20/29] chore: major resolved `if_not_else` and `explicit_deref_methods` lints (#262) # Rationale for this change We have cargo clippy running in our CI in order to enforce code quality. In order to increase our standards, we should enable the clippy::pedantic lint group. # What changes are included in this PR? Resolved the following lint warnings `large_types_passed_by_value ` `map_unwrap_or` `if_not_else ` `explicit_deref_methods` `items_after_statements` # Are these changes tested? Yes. --------- Co-authored-by: Jay White --- Cargo.toml | 8 ++- .../proof-of-sql/benches/bench_append_rows.rs | 27 ++++------ crates/proof-of-sql/benches/jaeger_benches.rs | 1 + .../proof-of-sql/src/base/math/permutation.rs | 6 +-- .../src/base/slice_ops/batch_inverse_test.rs | 18 +++---- .../postprocessing/group_by_postprocessing.rs | 21 ++++---- .../src/sql/proof_exprs/dyn_proof_expr.rs | 54 +++++++++---------- 7 files changed, 65 insertions(+), 70 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index feee15c25..cb9baf16b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -90,10 +90,14 @@ manual_let_else = "deny" struct_field_names = "deny" unicode_not_nfc = "deny" manual_string_new = "deny" -large_types_passed_by_value = "deny" +large_types_passed_by_value = "deny" +map_unwrap_or = "deny" +if_not_else = "deny" +explicit_deref_methods = "deny" +items_after_statements = "deny" bool_to_int_with_if = "deny" ptr_as_ptr = "deny" match_wildcard_for_single_variants = "deny" match_bool = "deny" manual_assert = "deny" -trivially_copy_pass_by_ref = "deny" \ No newline at end of file +trivially_copy_pass_by_ref = "deny" diff --git a/crates/proof-of-sql/benches/bench_append_rows.rs b/crates/proof-of-sql/benches/bench_append_rows.rs index 81855cd05..462ce404a 100644 --- a/crates/proof-of-sql/benches/bench_append_rows.rs +++ b/crates/proof-of-sql/benches/bench_append_rows.rs @@ -26,7 +26,6 @@ use proof_of_sql::{ }; use proof_of_sql_parser::posql_time::{PoSQLTimeUnit, PoSQLTimeZone}; use rand::Rng; -use std::ops::Deref; /// Bench dory performance when appending rows to a table. This includes the computation of /// commitments. Chose the number of columns to randomly generate across supported `PoSQL` @@ -99,34 +98,28 @@ pub fn generate_random_owned_table( let identifier = format!("column_{}", rng.gen::()); match column_type { - "bigint" => columns.push(bigint(identifier.deref(), vec![rng.gen::(); num_rows])), + "bigint" => columns.push(bigint(&*identifier, vec![rng.gen::(); num_rows])), "boolean" => columns.push(boolean( - identifier.deref(), + &*identifier, generate_random_boolean_vector(num_rows), )), - "int128" => columns.push(int128( - identifier.deref(), - vec![rng.gen::(); num_rows], - )), + "int128" => columns.push(int128(&*identifier, vec![rng.gen::(); num_rows])), "scalar" => columns.push(scalar( - identifier.deref(), + &*identifier, vec![generate_random_u64_array(); num_rows], )), - "varchar" => columns.push(varchar(identifier.deref(), gen_rnd_str(num_rows))), + "varchar" => columns.push(varchar(&*identifier, gen_rnd_str(num_rows))), "decimal75" => columns.push(decimal75( - identifier.deref(), + &*identifier, 12, 2, vec![generate_random_u64_array(); num_rows], )), - "tinyint" => columns.push(tinyint(identifier.deref(), vec![rng.gen::(); num_rows])), - "smallint" => columns.push(smallint( - identifier.deref(), - vec![rng.gen::(); num_rows], - )), - "int" => columns.push(int(identifier.deref(), vec![rng.gen::(); num_rows])), + "tinyint" => columns.push(tinyint(&*identifier, vec![rng.gen::(); num_rows])), + "smallint" => columns.push(smallint(&*identifier, vec![rng.gen::(); num_rows])), + "int" => columns.push(int(&*identifier, vec![rng.gen::(); num_rows])), "timestamptz" => columns.push(timestamptz( - identifier.deref(), + &*identifier, PoSQLTimeUnit::Second, PoSQLTimeZone::Utc, vec![rng.gen::(); num_rows], diff --git a/crates/proof-of-sql/benches/jaeger_benches.rs b/crates/proof-of-sql/benches/jaeger_benches.rs index 04d35e7bd..f6e20bba7 100644 --- a/crates/proof-of-sql/benches/jaeger_benches.rs +++ b/crates/proof-of-sql/benches/jaeger_benches.rs @@ -22,6 +22,7 @@ use std::env; const SIZE: usize = 1_000_000; +#[allow(clippy::items_after_statements)] fn main() { init_backend(); use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt}; diff --git a/crates/proof-of-sql/src/base/math/permutation.rs b/crates/proof-of-sql/src/base/math/permutation.rs index ca8c6bb2c..f5466e422 100644 --- a/crates/proof-of-sql/src/base/math/permutation.rs +++ b/crates/proof-of-sql/src/base/math/permutation.rs @@ -63,13 +63,13 @@ impl Permutation { where T: Clone, { - if slice.len() != self.size() { + if slice.len() == self.size() { + Ok(self.permutation.iter().map(|&i| slice[i].clone()).collect()) + } else { Err(PermutationError::PermutationSizeMismatch { permutation_size: self.size(), slice_length: slice.len(), }) - } else { - Ok(self.permutation.iter().map(|&i| slice[i].clone()).collect()) } } } diff --git a/crates/proof-of-sql/src/base/slice_ops/batch_inverse_test.rs b/crates/proof-of-sql/src/base/slice_ops/batch_inverse_test.rs index 53283aae7..c7fc8cc72 100644 --- a/crates/proof-of-sql/src/base/slice_ops/batch_inverse_test.rs +++ b/crates/proof-of-sql/src/base/slice_ops/batch_inverse_test.rs @@ -47,10 +47,10 @@ fn we_can_pseudo_invert_arrays_of_length_bigger_than_1_with_zeros_and_non_zeros( slice_ops::batch_inversion(&mut res[..]); for (input_val, res_val) in input.iter().zip(res) { - if *input_val != Curve25519Scalar::zero() { - assert!(input_val.inv().unwrap() == res_val); - } else { + if *input_val == Curve25519Scalar::zero() { assert!(Curve25519Scalar::zero() == res_val); + } else { + assert!(input_val.inv().unwrap() == res_val); } } } @@ -78,10 +78,10 @@ fn we_can_pseudo_invert_arrays_with_nonzero_count_bigger_than_min_chunking_size_ slice_ops::batch_inversion(&mut res[..]); for (input_val, res_val) in input.iter().zip(res) { - if *input_val != Curve25519Scalar::zero() { - assert!(input_val.inv().unwrap() == res_val); - } else { + if *input_val == Curve25519Scalar::zero() { assert!(Curve25519Scalar::zero() == res_val); + } else { + assert!(input_val.inv().unwrap() == res_val); } } } @@ -109,10 +109,10 @@ fn we_can_pseudo_invert_arrays_with_nonzero_count_smaller_than_min_chunking_size slice_ops::batch_inversion(&mut res[..]); for (input_val, res_val) in input.iter().zip(res) { - if *input_val != Curve25519Scalar::zero() { - assert!(input_val.inv().unwrap() == res_val); - } else { + if *input_val == Curve25519Scalar::zero() { assert!(Curve25519Scalar::zero() == res_val); + } else { + assert!(input_val.inv().unwrap() == res_val); } } } diff --git a/crates/proof-of-sql/src/sql/postprocessing/group_by_postprocessing.rs b/crates/proof-of-sql/src/sql/postprocessing/group_by_postprocessing.rs index 0b0cbb9e8..1c8ecaad9 100644 --- a/crates/proof-of-sql/src/sql/postprocessing/group_by_postprocessing.rs +++ b/crates/proof-of-sql/src/sql/postprocessing/group_by_postprocessing.rs @@ -76,14 +76,14 @@ fn get_aggregate_and_remainder_expressions( Expression::Column(_) | Expression::Literal(_) | Expression::Wildcard => expr, Expression::Aggregation { op, expr } => { let key = (op, (*expr)); - if !aggregation_expr_map.contains_key(&key) { + if aggregation_expr_map.contains_key(&key) { + Expression::Column(*aggregation_expr_map.get(&key).unwrap()) + } else { let new_col_id = format!("__col_agg_{}", aggregation_expr_map.len()) .parse() .unwrap(); aggregation_expr_map.insert(key, new_col_id); Expression::Column(new_col_id) - } else { - Expression::Column(*aggregation_expr_map.get(&key).unwrap()) } } Expression::Binary { op, left, right } => { @@ -232,31 +232,28 @@ impl PostprocessingStep for GroupByPostprocessing { let selection_in = vec![true; owned_table.num_rows()]; let (sum_identifiers, sum_columns): (Vec<_>, Vec<_>) = evaluated_columns .get(&AggregationOperator::Sum) - .map(|tuple| { + .map_or((vec![], vec![]), |tuple| { tuple .iter() .map(|(id, c)| (*id, Column::::from_owned_column(c, &alloc))) .unzip() - }) - .unwrap_or((vec![], vec![])); + }); let (max_identifiers, max_columns): (Vec<_>, Vec<_>) = evaluated_columns .get(&AggregationOperator::Max) - .map(|tuple| { + .map_or((vec![], vec![]), |tuple| { tuple .iter() .map(|(id, c)| (*id, Column::::from_owned_column(c, &alloc))) .unzip() - }) - .unwrap_or((vec![], vec![])); + }); let (min_identifiers, min_columns): (Vec<_>, Vec<_>) = evaluated_columns .get(&AggregationOperator::Min) - .map(|tuple| { + .map_or((vec![], vec![]), |tuple| { tuple .iter() .map(|(id, c)| (*id, Column::::from_owned_column(c, &alloc))) .unzip() - }) - .unwrap_or((vec![], vec![])); + }); let aggregation_results = aggregate_columns( &alloc, &group_by_ins, diff --git a/crates/proof-of-sql/src/sql/proof_exprs/dyn_proof_expr.rs b/crates/proof-of-sql/src/sql/proof_exprs/dyn_proof_expr.rs index 88527ad60..63611cb59 100644 --- a/crates/proof-of-sql/src/sql/proof_exprs/dyn_proof_expr.rs +++ b/crates/proof-of-sql/src/sql/proof_exprs/dyn_proof_expr.rs @@ -74,13 +74,13 @@ impl DynProofExpr { pub fn try_new_equals(lhs: DynProofExpr, rhs: DynProofExpr) -> ConversionResult { let lhs_datatype = lhs.data_type(); let rhs_datatype = rhs.data_type(); - if !type_check_binary_operation(&lhs_datatype, &rhs_datatype, BinaryOperator::Equal) { + if type_check_binary_operation(&lhs_datatype, &rhs_datatype, BinaryOperator::Equal) { + Ok(Self::Equals(EqualsExpr::new(Box::new(lhs), Box::new(rhs)))) + } else { Err(ConversionError::DataTypeMismatch { left_type: lhs_datatype.to_string(), right_type: rhs_datatype.to_string(), }) - } else { - Ok(Self::Equals(EqualsExpr::new(Box::new(lhs), Box::new(rhs)))) } } /// Create a new inequality expression @@ -91,21 +91,21 @@ impl DynProofExpr { ) -> ConversionResult { let lhs_datatype = lhs.data_type(); let rhs_datatype = rhs.data_type(); - if !type_check_binary_operation( + if type_check_binary_operation( &lhs_datatype, &rhs_datatype, BinaryOperator::LessThanOrEqual, ) { - Err(ConversionError::DataTypeMismatch { - left_type: lhs_datatype.to_string(), - right_type: rhs_datatype.to_string(), - }) - } else { Ok(Self::Inequality(InequalityExpr::new( Box::new(lhs), Box::new(rhs), is_lte, ))) + } else { + Err(ConversionError::DataTypeMismatch { + left_type: lhs_datatype.to_string(), + right_type: rhs_datatype.to_string(), + }) } } @@ -113,17 +113,17 @@ impl DynProofExpr { pub fn try_new_add(lhs: DynProofExpr, rhs: DynProofExpr) -> ConversionResult { let lhs_datatype = lhs.data_type(); let rhs_datatype = rhs.data_type(); - if !type_check_binary_operation(&lhs_datatype, &rhs_datatype, BinaryOperator::Add) { - Err(ConversionError::DataTypeMismatch { - left_type: lhs_datatype.to_string(), - right_type: rhs_datatype.to_string(), - }) - } else { + if type_check_binary_operation(&lhs_datatype, &rhs_datatype, BinaryOperator::Add) { Ok(Self::AddSubtract(AddSubtractExpr::new( Box::new(lhs), Box::new(rhs), false, ))) + } else { + Err(ConversionError::DataTypeMismatch { + left_type: lhs_datatype.to_string(), + right_type: rhs_datatype.to_string(), + }) } } @@ -131,17 +131,17 @@ impl DynProofExpr { pub fn try_new_subtract(lhs: DynProofExpr, rhs: DynProofExpr) -> ConversionResult { let lhs_datatype = lhs.data_type(); let rhs_datatype = rhs.data_type(); - if !type_check_binary_operation(&lhs_datatype, &rhs_datatype, BinaryOperator::Subtract) { - Err(ConversionError::DataTypeMismatch { - left_type: lhs_datatype.to_string(), - right_type: rhs_datatype.to_string(), - }) - } else { + if type_check_binary_operation(&lhs_datatype, &rhs_datatype, BinaryOperator::Subtract) { Ok(Self::AddSubtract(AddSubtractExpr::new( Box::new(lhs), Box::new(rhs), true, ))) + } else { + Err(ConversionError::DataTypeMismatch { + left_type: lhs_datatype.to_string(), + right_type: rhs_datatype.to_string(), + }) } } @@ -149,16 +149,16 @@ impl DynProofExpr { pub fn try_new_multiply(lhs: DynProofExpr, rhs: DynProofExpr) -> ConversionResult { let lhs_datatype = lhs.data_type(); let rhs_datatype = rhs.data_type(); - if !type_check_binary_operation(&lhs_datatype, &rhs_datatype, BinaryOperator::Multiply) { - Err(ConversionError::DataTypeMismatch { - left_type: lhs_datatype.to_string(), - right_type: rhs_datatype.to_string(), - }) - } else { + if type_check_binary_operation(&lhs_datatype, &rhs_datatype, BinaryOperator::Multiply) { Ok(Self::Multiply(MultiplyExpr::new( Box::new(lhs), Box::new(rhs), ))) + } else { + Err(ConversionError::DataTypeMismatch { + left_type: lhs_datatype.to_string(), + right_type: rhs_datatype.to_string(), + }) } } From cbf50784cb4d512aa0ea1e597e420b367872c41a Mon Sep 17 00:00:00 2001 From: Abinand P Date: Sat, 12 Oct 2024 00:32:16 +0530 Subject: [PATCH 21/29] refactor: modified the CI to run the examples with the `blitzer` feature disabled and updated the `ReadMe.md` (#259) # Rationale Our example code leverages the `InnerProductProof` commitment scheme. While this is still valid, it is more difficult as an entry point to the code because it requires Linux (and GPU by default). # What changes are included in this PR? Updated the documents and disabled the `blitzer` feature in the CI # Are these changes tested? yes --------- Signed-off-by: Abinand P --- .github/workflows/lint-and-test.yml | 12 +++++++++--- crates/proof-of-sql/README.md | 7 ++++++- crates/proof-of-sql/examples/hello_world/README.md | 10 ++++++++-- crates/proof-of-sql/examples/posql_db/README.md | 6 ++++++ crates/proof-of-sql/examples/posql_db/run_example.sh | 8 ++++---- 5 files changed, 33 insertions(+), 10 deletions(-) diff --git a/.github/workflows/lint-and-test.yml b/.github/workflows/lint-and-test.yml index d3a21d6ce..e5c92b1b3 100644 --- a/.github/workflows/lint-and-test.yml +++ b/.github/workflows/lint-and-test.yml @@ -104,10 +104,16 @@ jobs: run: cargo test -p proof-of-sql --no-run --no-default-features --features="std" - name: Run cargo test (proof primitives - Dory) (std feature only - i.e. not using blitzar) run: cargo test proof_primitive::dory::dory_compute_commitments_test --no-default-features --features="std" - - name: Run hello_world example - run: cargo run --example hello_world --features="blitzar test" - - name: Run posql_db example + - name: Run hello_world example (With Blitzar) + run: cargo run --example hello_world --features="test" + - name: Run hello_world example (Without Blitzar and With Rayon) + run: cargo run --example hello_world --no-default-features --features="rayon test" + - name: Run hello_world example (Without Blitzar and Without Rayon) + run: cargo run --example hello_world --no-default-features --features="test" + - name: Run posql_db example (With Blitzar) run: bash crates/proof-of-sql/examples/posql_db/run_example.sh + - name: Run posql_db example (Without Blitzar) + run: bash crates/proof-of-sql/examples/posql_db/run_example.sh --no-default-features --features="rayon" clippy: name: Clippy diff --git a/crates/proof-of-sql/README.md b/crates/proof-of-sql/README.md index be1254a6e..21fc50cb3 100644 --- a/crates/proof-of-sql/README.md +++ b/crates/proof-of-sql/README.md @@ -73,8 +73,13 @@ The "Hello World" example demonstrates generating and verifying a proof of the q #### Run ```bash -cargo run --example hello_world +cargo run --example hello_world ``` +> [!NOTE] +> To run this example without the `blitzar` (i.e CPU only) feature: +> ```bash +> cargo run --example hello_world --no-default-features --features="rayon test" +> ``` #### Output diff --git a/crates/proof-of-sql/examples/hello_world/README.md b/crates/proof-of-sql/examples/hello_world/README.md index 038f64334..dacbe439e 100644 --- a/crates/proof-of-sql/examples/hello_world/README.md +++ b/crates/proof-of-sql/examples/hello_world/README.md @@ -12,9 +12,15 @@ This example demonstrates generating and verifying a proof of the query `SELECT #### Run ```bash -cargo run --example hello_world +cargo run --example hello_world ``` +> [!NOTE] +> To run this example without the `blitzar` (i.e CPU only) feature: +> ```bash +> cargo run --example hello_world --no-default-features --features="test rayon" +> ``` + #### Output ``` @@ -25,4 +31,4 @@ Generating Proof... 467.45371ms Verifying Proof... 7.106864ms Valid proof! Query result: OwnedTable { table: {Identifier { name: "b" }: VarChar(["hello", "world"])} } -``` \ No newline at end of file +``` diff --git a/crates/proof-of-sql/examples/posql_db/README.md b/crates/proof-of-sql/examples/posql_db/README.md index d64b81071..6774b5eea 100644 --- a/crates/proof-of-sql/examples/posql_db/README.md +++ b/crates/proof-of-sql/examples/posql_db/README.md @@ -5,6 +5,12 @@ Example demonstrating an implementation of a simple csv-backed database with Pro ## Install Run `cargo install --example posql_db --path crates/proof-of-sql` to install the example. +> [!NOTE] +> To run this example without the `blitzar` (i.e CPU only )feature +> ```bash +> cargo install --example posql_db --path crates/proof-of-sql --no-default-features --features="rayon" +> ``` + ## Quick Start Exmaple Run the following ```bash diff --git a/crates/proof-of-sql/examples/posql_db/run_example.sh b/crates/proof-of-sql/examples/posql_db/run_example.sh index 133aea220..f77dd8bd8 100644 --- a/crates/proof-of-sql/examples/posql_db/run_example.sh +++ b/crates/proof-of-sql/examples/posql_db/run_example.sh @@ -1,5 +1,5 @@ cd crates/proof-of-sql/examples/posql_db -cargo run --features="arrow " --example posql_db create -t sxt.table -c a,b -d BIGINT,VARCHAR -cargo run --features="arrow " --example posql_db append -t sxt.table -f hello_world.csv -cargo run --features="arrow " --example posql_db prove -q "SELECT b FROM sxt.table WHERE a = 2" -f hello.proof -cargo run --features="arrow " --example posql_db verify -q "SELECT b FROM sxt.table WHERE a = 2" -f hello.proof \ No newline at end of file +cargo run --features="arrow" "$@" --example posql_db create -t sxt.table -c a,b -d BIGINT,VARCHAR +cargo run --features="arrow" "$@" --example posql_db append -t sxt.table -f hello_world.csv +cargo run --features="arrow" "$@" --example posql_db prove -q "SELECT b FROM sxt.table WHERE a = 2" -f hello.proof +cargo run --features="arrow" "$@" --example posql_db verify -q "SELECT b FROM sxt.table WHERE a = 2" -f hello.proof From 4211deaf1a2c6791f1d21ecdb9c89b7cf749d2fc Mon Sep 17 00:00:00 2001 From: Jacob Trombetta Date: Fri, 11 Oct 2024 21:30:52 -0400 Subject: [PATCH 22/29] test: add tests for dynamic Dory commitment computation (#242) # Rationale for this change A CPU implementation of the dynamic Dory commitment computation was implemented in a previous commit: 543573f7a2643ee05fb1ceb8aeaad8932b2b2905. This PR introduces tests for the dynamic Dory commitment computation by reworking the tests in the `dory_compute_commitments_test` module to match the updated dynamic Dory structure. # What changes are included in this PR? - Tests are added for the dynamic Dory commitment computation. - Dynamic Dory commitment computation tests are added to the CI workflow. # Are these changes tested? Yes --------- Co-authored-by: Jay White --- .github/workflows/lint-and-test.yml | 4 +- .../dynamic_dory_compute_commitments_test.rs | 513 ++++++++++++++++++ .../src/proof_primitive/dory/mod.rs | 2 + 3 files changed, 518 insertions(+), 1 deletion(-) create mode 100644 crates/proof-of-sql/src/proof_primitive/dory/dynamic_dory_compute_commitments_test.rs diff --git a/.github/workflows/lint-and-test.yml b/.github/workflows/lint-and-test.yml index e5c92b1b3..ad8becb68 100644 --- a/.github/workflows/lint-and-test.yml +++ b/.github/workflows/lint-and-test.yml @@ -103,7 +103,9 @@ jobs: - name: Dry run cargo test (proof-of-sql) (std feature only) run: cargo test -p proof-of-sql --no-run --no-default-features --features="std" - name: Run cargo test (proof primitives - Dory) (std feature only - i.e. not using blitzar) - run: cargo test proof_primitive::dory::dory_compute_commitments_test --no-default-features --features="std" + run: | + cargo test proof_primitive::dory::dory_compute_commitments_test --no-default-features --features="std" && \ + cargo test proof_primitive::dory::dynamic_dory_compute_commitments_test --no-default-features --features="std" - name: Run hello_world example (With Blitzar) run: cargo run --example hello_world --features="test" - name: Run hello_world example (Without Blitzar and With Rayon) diff --git a/crates/proof-of-sql/src/proof_primitive/dory/dynamic_dory_compute_commitments_test.rs b/crates/proof-of-sql/src/proof_primitive/dory/dynamic_dory_compute_commitments_test.rs new file mode 100644 index 000000000..b6426efba --- /dev/null +++ b/crates/proof-of-sql/src/proof_primitive/dory/dynamic_dory_compute_commitments_test.rs @@ -0,0 +1,513 @@ +use crate::{ + base::{commitment::CommittableColumn, math::decimal::Precision}, + proof_primitive::dory::{ + compute_dynamic_dory_commitments, test_rng, ProverSetup, PublicParameters, F, GT, + }, +}; +use ark_ec::pairing::Pairing; +use num_traits::Zero; +use proof_of_sql_parser::posql_time::{PoSQLTimeUnit, PoSQLTimeZone}; + +#[test] +fn we_can_compute_a_dynamic_dory_commitment_with_unsigned_bigint_values() { + let public_parameters = PublicParameters::test_rand(5, &mut test_rng()); + let setup = ProverSetup::from(&public_parameters); + let res = compute_dynamic_dory_commitments( + &[CommittableColumn::BigInt(&[ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, + ])], + 0, + &setup, + ); + let Gamma_1 = public_parameters.Gamma_1; + let Gamma_2 = public_parameters.Gamma_2; + let expected: GT = Pairing::pairing(Gamma_1[0], Gamma_2[0]) * F::from(0) + + Pairing::pairing(Gamma_1[1], Gamma_2[1]) * F::from(1) + + Pairing::pairing(Gamma_1[0], Gamma_2[2]) * F::from(2) + + Pairing::pairing(Gamma_1[1], Gamma_2[2]) * F::from(3) + + Pairing::pairing(Gamma_1[0], Gamma_2[3]) * F::from(4) + + Pairing::pairing(Gamma_1[1], Gamma_2[3]) * F::from(5) + + Pairing::pairing(Gamma_1[2], Gamma_2[3]) * F::from(6) + + Pairing::pairing(Gamma_1[3], Gamma_2[3]) * F::from(7) + + Pairing::pairing(Gamma_1[0], Gamma_2[4]) * F::from(8) + + Pairing::pairing(Gamma_1[1], Gamma_2[4]) * F::from(9) + + Pairing::pairing(Gamma_1[2], Gamma_2[4]) * F::from(10) + + Pairing::pairing(Gamma_1[3], Gamma_2[4]) * F::from(11) + + Pairing::pairing(Gamma_1[0], Gamma_2[5]) * F::from(12) + + Pairing::pairing(Gamma_1[1], Gamma_2[5]) * F::from(13) + + Pairing::pairing(Gamma_1[2], Gamma_2[5]) * F::from(14) + + Pairing::pairing(Gamma_1[3], Gamma_2[5]) * F::from(15) + + Pairing::pairing(Gamma_1[0], Gamma_2[6]) * F::from(16) + + Pairing::pairing(Gamma_1[1], Gamma_2[6]) * F::from(17) + + Pairing::pairing(Gamma_1[2], Gamma_2[6]) * F::from(18); + assert_eq!(res[0].0, expected); +} + +#[test] +fn we_can_compute_a_dynamic_dory_commitment_with_unsigned_bigint_values_and_an_offset() { + let public_parameters = PublicParameters::test_rand(5, &mut test_rng()); + let setup = ProverSetup::from(&public_parameters); + let res = compute_dynamic_dory_commitments( + &[CommittableColumn::BigInt(&[ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, + ])], + 5, + &setup, + ); + let Gamma_1 = public_parameters.Gamma_1; + let Gamma_2 = public_parameters.Gamma_2; + let expected: GT = Pairing::pairing(Gamma_1[1], Gamma_2[3]) * F::from(0) + + Pairing::pairing(Gamma_1[2], Gamma_2[3]) * F::from(1) + + Pairing::pairing(Gamma_1[3], Gamma_2[3]) * F::from(2) + + Pairing::pairing(Gamma_1[0], Gamma_2[4]) * F::from(3) + + Pairing::pairing(Gamma_1[1], Gamma_2[4]) * F::from(4) + + Pairing::pairing(Gamma_1[2], Gamma_2[4]) * F::from(5) + + Pairing::pairing(Gamma_1[3], Gamma_2[4]) * F::from(6) + + Pairing::pairing(Gamma_1[0], Gamma_2[5]) * F::from(7) + + Pairing::pairing(Gamma_1[1], Gamma_2[5]) * F::from(8) + + Pairing::pairing(Gamma_1[2], Gamma_2[5]) * F::from(9) + + Pairing::pairing(Gamma_1[3], Gamma_2[5]) * F::from(10) + + Pairing::pairing(Gamma_1[0], Gamma_2[6]) * F::from(11) + + Pairing::pairing(Gamma_1[1], Gamma_2[6]) * F::from(12) + + Pairing::pairing(Gamma_1[2], Gamma_2[6]) * F::from(13) + + Pairing::pairing(Gamma_1[3], Gamma_2[6]) * F::from(14) + + Pairing::pairing(Gamma_1[4], Gamma_2[6]) * F::from(15) + + Pairing::pairing(Gamma_1[5], Gamma_2[6]) * F::from(16) + + Pairing::pairing(Gamma_1[6], Gamma_2[6]) * F::from(17) + + Pairing::pairing(Gamma_1[7], Gamma_2[6]) * F::from(18); + assert_eq!(res[0].0, expected); +} + +#[test] +fn we_can_compute_a_dynamic_dory_commitment_with_signed_bigint_values_and_an_offset() { + let public_parameters = PublicParameters::test_rand(5, &mut test_rng()); + let setup = ProverSetup::from(&public_parameters); + let res = compute_dynamic_dory_commitments(&[CommittableColumn::BigInt(&[-2, -3])], 2, &setup); + let Gamma_1 = public_parameters.Gamma_1; + let Gamma_2 = public_parameters.Gamma_2; + let expected: GT = Pairing::pairing(Gamma_1[0], Gamma_2[2]) * F::from(-2) + + Pairing::pairing(Gamma_1[1], Gamma_2[2]) * F::from(-3); + assert_eq!(res[0].0, expected); +} + +#[test] +fn we_can_compute_three_dynamic_dory_commitments_with_unsigned_bigint_and_offset() { + let public_parameters = PublicParameters::test_rand(5, &mut test_rng()); + let setup = ProverSetup::from(&public_parameters); + let res = compute_dynamic_dory_commitments( + &[ + CommittableColumn::BigInt(&[ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, + ]), + CommittableColumn::BigInt(&[ + 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, + ]), + CommittableColumn::BigInt(&[ + 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, + ]), + ], + 5, + &setup, + ); + let Gamma_1 = public_parameters.Gamma_1; + let Gamma_2 = public_parameters.Gamma_2; + let expected: GT = Pairing::pairing(Gamma_1[1], Gamma_2[3]) * F::from(0) + + Pairing::pairing(Gamma_1[2], Gamma_2[3]) * F::from(1) + + Pairing::pairing(Gamma_1[3], Gamma_2[3]) * F::from(2) + + Pairing::pairing(Gamma_1[0], Gamma_2[4]) * F::from(3) + + Pairing::pairing(Gamma_1[1], Gamma_2[4]) * F::from(4) + + Pairing::pairing(Gamma_1[2], Gamma_2[4]) * F::from(5) + + Pairing::pairing(Gamma_1[3], Gamma_2[4]) * F::from(6) + + Pairing::pairing(Gamma_1[0], Gamma_2[5]) * F::from(7) + + Pairing::pairing(Gamma_1[1], Gamma_2[5]) * F::from(8) + + Pairing::pairing(Gamma_1[2], Gamma_2[5]) * F::from(9) + + Pairing::pairing(Gamma_1[3], Gamma_2[5]) * F::from(10) + + Pairing::pairing(Gamma_1[0], Gamma_2[6]) * F::from(11) + + Pairing::pairing(Gamma_1[1], Gamma_2[6]) * F::from(12) + + Pairing::pairing(Gamma_1[2], Gamma_2[6]) * F::from(13) + + Pairing::pairing(Gamma_1[3], Gamma_2[6]) * F::from(14) + + Pairing::pairing(Gamma_1[4], Gamma_2[6]) * F::from(15) + + Pairing::pairing(Gamma_1[5], Gamma_2[6]) * F::from(16) + + Pairing::pairing(Gamma_1[6], Gamma_2[6]) * F::from(17) + + Pairing::pairing(Gamma_1[7], Gamma_2[6]) * F::from(18); + assert_eq!(res[0].0, expected); + + let expected: GT = Pairing::pairing(Gamma_1[1], Gamma_2[3]) * F::from(19) + + Pairing::pairing(Gamma_1[2], Gamma_2[3]) * F::from(20) + + Pairing::pairing(Gamma_1[3], Gamma_2[3]) * F::from(21) + + Pairing::pairing(Gamma_1[0], Gamma_2[4]) * F::from(22) + + Pairing::pairing(Gamma_1[1], Gamma_2[4]) * F::from(23) + + Pairing::pairing(Gamma_1[2], Gamma_2[4]) * F::from(24) + + Pairing::pairing(Gamma_1[3], Gamma_2[4]) * F::from(25) + + Pairing::pairing(Gamma_1[0], Gamma_2[5]) * F::from(26) + + Pairing::pairing(Gamma_1[1], Gamma_2[5]) * F::from(27) + + Pairing::pairing(Gamma_1[2], Gamma_2[5]) * F::from(28) + + Pairing::pairing(Gamma_1[3], Gamma_2[5]) * F::from(29) + + Pairing::pairing(Gamma_1[0], Gamma_2[6]) * F::from(30) + + Pairing::pairing(Gamma_1[1], Gamma_2[6]) * F::from(31) + + Pairing::pairing(Gamma_1[2], Gamma_2[6]) * F::from(32) + + Pairing::pairing(Gamma_1[3], Gamma_2[6]) * F::from(33) + + Pairing::pairing(Gamma_1[4], Gamma_2[6]) * F::from(34) + + Pairing::pairing(Gamma_1[5], Gamma_2[6]) * F::from(35) + + Pairing::pairing(Gamma_1[6], Gamma_2[6]) * F::from(36) + + Pairing::pairing(Gamma_1[7], Gamma_2[6]) * F::from(37); + assert_eq!(res[1].0, expected); + + let expected: GT = Pairing::pairing(Gamma_1[1], Gamma_2[3]) * F::from(38) + + Pairing::pairing(Gamma_1[2], Gamma_2[3]) * F::from(39) + + Pairing::pairing(Gamma_1[3], Gamma_2[3]) * F::from(40) + + Pairing::pairing(Gamma_1[0], Gamma_2[4]) * F::from(41) + + Pairing::pairing(Gamma_1[1], Gamma_2[4]) * F::from(42) + + Pairing::pairing(Gamma_1[2], Gamma_2[4]) * F::from(43) + + Pairing::pairing(Gamma_1[3], Gamma_2[4]) * F::from(44) + + Pairing::pairing(Gamma_1[0], Gamma_2[5]) * F::from(45) + + Pairing::pairing(Gamma_1[1], Gamma_2[5]) * F::from(46) + + Pairing::pairing(Gamma_1[2], Gamma_2[5]) * F::from(47) + + Pairing::pairing(Gamma_1[3], Gamma_2[5]) * F::from(48) + + Pairing::pairing(Gamma_1[0], Gamma_2[6]) * F::from(49) + + Pairing::pairing(Gamma_1[1], Gamma_2[6]) * F::from(50) + + Pairing::pairing(Gamma_1[2], Gamma_2[6]) * F::from(51) + + Pairing::pairing(Gamma_1[3], Gamma_2[6]) * F::from(52) + + Pairing::pairing(Gamma_1[4], Gamma_2[6]) * F::from(53) + + Pairing::pairing(Gamma_1[5], Gamma_2[6]) * F::from(54) + + Pairing::pairing(Gamma_1[6], Gamma_2[6]) * F::from(55) + + Pairing::pairing(Gamma_1[7], Gamma_2[6]) * F::from(56); + assert_eq!(res[2].0, expected); +} + +#[test] +fn we_can_compute_an_empty_dynamic_dory_commitment() { + let public_parameters = PublicParameters::test_rand(5, &mut test_rng()); + let setup = ProverSetup::from(&public_parameters); + let res = compute_dynamic_dory_commitments(&[CommittableColumn::BigInt(&[0; 0])], 0, &setup); + assert_eq!(res[0].0, GT::zero()); + let res = compute_dynamic_dory_commitments(&[CommittableColumn::BigInt(&[0; 0])], 5, &setup); + assert_eq!(res[0].0, GT::zero()); + let res = compute_dynamic_dory_commitments(&[CommittableColumn::BigInt(&[0; 0])], 20, &setup); + assert_eq!(res[0].0, GT::zero()); +} + +#[test] +fn we_can_compute_a_dynamic_dory_commitment_with_mixed_committable_columns() { + let public_parameters = PublicParameters::test_rand(5, &mut test_rng()); + let setup = ProverSetup::from(&public_parameters); + let res = compute_dynamic_dory_commitments( + &[ + CommittableColumn::TinyInt(&[0, 1]), + CommittableColumn::BigInt(&[2, 3]), + CommittableColumn::Int(&[4, 5, 10]), + CommittableColumn::SmallInt(&[6, 7]), + CommittableColumn::Int128(&[8, 9]), + CommittableColumn::Boolean(&[true, true]), + CommittableColumn::Decimal75( + Precision::new(1).unwrap(), + 0, + vec![[10, 0, 0, 0], [11, 0, 0, 0], [12, 0, 0, 0], [13, 0, 0, 0]], + ), + CommittableColumn::Scalar(vec![[14, 0, 0, 0], [15, 0, 0, 0]]), + CommittableColumn::VarChar(vec![[16, 0, 0, 0]]), + CommittableColumn::TimestampTZ( + PoSQLTimeUnit::Second, + PoSQLTimeZone::Utc, + &[17, 18, 19, 20], + ), + ], + 0, + &setup, + ); + let Gamma_1 = public_parameters.Gamma_1; + let Gamma_2 = public_parameters.Gamma_2; + let expected: GT = Pairing::pairing(Gamma_1[0], Gamma_2[0]) * F::from(0) + + Pairing::pairing(Gamma_1[1], Gamma_2[1]) * F::from(1); + assert_eq!(res[0].0, expected); + + let expected: GT = Pairing::pairing(Gamma_1[0], Gamma_2[0]) * F::from(2) + + Pairing::pairing(Gamma_1[1], Gamma_2[1]) * F::from(3); + assert_eq!(res[1].0, expected); + + let expected: GT = Pairing::pairing(Gamma_1[0], Gamma_2[0]) * F::from(4) + + Pairing::pairing(Gamma_1[1], Gamma_2[1]) * F::from(5) + + Pairing::pairing(Gamma_1[0], Gamma_2[2]) * F::from(10); + assert_eq!(res[2].0, expected); + + let expected: GT = Pairing::pairing(Gamma_1[0], Gamma_2[0]) * F::from(6) + + Pairing::pairing(Gamma_1[1], Gamma_2[1]) * F::from(7); + assert_eq!(res[3].0, expected); + + let expected: GT = Pairing::pairing(Gamma_1[0], Gamma_2[0]) * F::from(8) + + Pairing::pairing(Gamma_1[1], Gamma_2[1]) * F::from(9); + assert_eq!(res[4].0, expected); + + let expected: GT = Pairing::pairing(Gamma_1[0], Gamma_2[0]) * F::from(true) + + Pairing::pairing(Gamma_1[1], Gamma_2[1]) * F::from(true); + assert_eq!(res[5].0, expected); + + let expected: GT = Pairing::pairing(Gamma_1[0], Gamma_2[0]) * F::from(10) + + Pairing::pairing(Gamma_1[1], Gamma_2[1]) * F::from(11) + + Pairing::pairing(Gamma_1[0], Gamma_2[2]) * F::from(12) + + Pairing::pairing(Gamma_1[1], Gamma_2[2]) * F::from(13); + assert_eq!(res[6].0, expected); + + let expected: GT = Pairing::pairing(Gamma_1[0], Gamma_2[0]) * F::from(14) + + Pairing::pairing(Gamma_1[1], Gamma_2[1]) * F::from(15); + assert_eq!(res[7].0, expected); + + let expected: GT = Pairing::pairing(Gamma_1[0], Gamma_2[0]) * F::from(16); + assert_eq!(res[8].0, expected); + + let expected: GT = Pairing::pairing(Gamma_1[0], Gamma_2[0]) * F::from(17) + + Pairing::pairing(Gamma_1[1], Gamma_2[1]) * F::from(18) + + Pairing::pairing(Gamma_1[0], Gamma_2[2]) * F::from(19) + + Pairing::pairing(Gamma_1[1], Gamma_2[2]) * F::from(20); + assert_eq!(res[9].0, expected); +} + +#[test] +fn we_can_compute_a_dynamic_dory_commitment_with_mixed_committable_columns_with_an_offset() { + let public_parameters = PublicParameters::test_rand(5, &mut test_rng()); + let setup = ProverSetup::from(&public_parameters); + let res = compute_dynamic_dory_commitments( + &[ + CommittableColumn::TinyInt(&[0, 1]), + CommittableColumn::BigInt(&[2, 3]), + CommittableColumn::Int(&[4, 5, 10]), + CommittableColumn::SmallInt(&[6, 7]), + CommittableColumn::Int128(&[8, 9]), + CommittableColumn::Boolean(&[true, true]), + CommittableColumn::Decimal75( + Precision::new(1).unwrap(), + 0, + vec![[10, 0, 0, 0], [11, 0, 0, 0], [12, 0, 0, 0], [13, 0, 0, 0]], + ), + CommittableColumn::Scalar(vec![[14, 0, 0, 0], [15, 0, 0, 0]]), + CommittableColumn::VarChar(vec![[16, 0, 0, 0]]), + CommittableColumn::TimestampTZ( + PoSQLTimeUnit::Second, + PoSQLTimeZone::Utc, + &[17, 18, 19, 20], + ), + ], + 2, + &setup, + ); + let Gamma_1 = public_parameters.Gamma_1; + let Gamma_2 = public_parameters.Gamma_2; + let expected: GT = Pairing::pairing(Gamma_1[0], Gamma_2[2]) * F::from(0) + + Pairing::pairing(Gamma_1[1], Gamma_2[2]) * F::from(1); + assert_eq!(res[0].0, expected); + + let expected: GT = Pairing::pairing(Gamma_1[0], Gamma_2[2]) * F::from(2) + + Pairing::pairing(Gamma_1[1], Gamma_2[2]) * F::from(3); + assert_eq!(res[1].0, expected); + + let expected: GT = Pairing::pairing(Gamma_1[0], Gamma_2[2]) * F::from(4) + + Pairing::pairing(Gamma_1[1], Gamma_2[2]) * F::from(5) + + Pairing::pairing(Gamma_1[0], Gamma_2[3]) * F::from(10); + assert_eq!(res[2].0, expected); + + let expected: GT = Pairing::pairing(Gamma_1[0], Gamma_2[2]) * F::from(6) + + Pairing::pairing(Gamma_1[1], Gamma_2[2]) * F::from(7); + assert_eq!(res[3].0, expected); + + let expected: GT = Pairing::pairing(Gamma_1[0], Gamma_2[2]) * F::from(8) + + Pairing::pairing(Gamma_1[1], Gamma_2[2]) * F::from(9); + assert_eq!(res[4].0, expected); + + let expected: GT = Pairing::pairing(Gamma_1[0], Gamma_2[2]) * F::from(true) + + Pairing::pairing(Gamma_1[1], Gamma_2[2]) * F::from(true); + assert_eq!(res[5].0, expected); + + let expected: GT = Pairing::pairing(Gamma_1[0], Gamma_2[2]) * F::from(10) + + Pairing::pairing(Gamma_1[1], Gamma_2[2]) * F::from(11) + + Pairing::pairing(Gamma_1[0], Gamma_2[3]) * F::from(12) + + Pairing::pairing(Gamma_1[1], Gamma_2[3]) * F::from(13); + assert_eq!(res[6].0, expected); + + let expected: GT = Pairing::pairing(Gamma_1[0], Gamma_2[2]) * F::from(14) + + Pairing::pairing(Gamma_1[1], Gamma_2[2]) * F::from(15); + assert_eq!(res[7].0, expected); + + let expected: GT = Pairing::pairing(Gamma_1[0], Gamma_2[2]) * F::from(16); + assert_eq!(res[8].0, expected); + + let expected: GT = Pairing::pairing(Gamma_1[0], Gamma_2[2]) * F::from(17) + + Pairing::pairing(Gamma_1[1], Gamma_2[2]) * F::from(18) + + Pairing::pairing(Gamma_1[0], Gamma_2[3]) * F::from(19) + + Pairing::pairing(Gamma_1[1], Gamma_2[3]) * F::from(20); + assert_eq!(res[9].0, expected); +} + +#[test] +fn we_can_compute_a_dynamic_dory_commitment_with_mixed_committable_columns_with_signed_values() { + let public_parameters = PublicParameters::test_rand(5, &mut test_rng()); + let setup = ProverSetup::from(&public_parameters); + let res = compute_dynamic_dory_commitments( + &[ + CommittableColumn::TinyInt(&[-2, -1, 0, 1, 2]), + CommittableColumn::BigInt(&[-3, -2, 2, 3]), + CommittableColumn::Int(&[-6, -5, -4, 4, 5, 6]), + CommittableColumn::SmallInt(&[-7, -6, 6, 7]), + CommittableColumn::Int128(&[-9, -8, 8, 9]), + CommittableColumn::Boolean(&[true, true]), + CommittableColumn::Decimal75( + Precision::new(1).unwrap(), + 0, + vec![[10, 0, 0, 0], [11, 0, 0, 0], [12, 0, 0, 0], [13, 0, 0, 0]], + ), + CommittableColumn::Scalar(vec![[14, 0, 0, 0], [15, 0, 0, 0]]), + CommittableColumn::VarChar(vec![[16, 0, 0, 0]]), + CommittableColumn::TimestampTZ( + PoSQLTimeUnit::Second, + PoSQLTimeZone::Utc, + &[-18, -17, 17, 18], + ), + ], + 0, + &setup, + ); + let Gamma_1 = public_parameters.Gamma_1; + let Gamma_2 = public_parameters.Gamma_2; + let expected: GT = Pairing::pairing(Gamma_1[0], Gamma_2[0]) * F::from(-2) + + Pairing::pairing(Gamma_1[1], Gamma_2[1]) * F::from(-1) + + Pairing::pairing(Gamma_1[0], Gamma_2[2]) * F::from(0) + + Pairing::pairing(Gamma_1[1], Gamma_2[2]) * F::from(1) + + Pairing::pairing(Gamma_1[0], Gamma_2[3]) * F::from(2); + assert_eq!(res[0].0, expected); + + let expected: GT = Pairing::pairing(Gamma_1[0], Gamma_2[0]) * F::from(-3) + + Pairing::pairing(Gamma_1[1], Gamma_2[1]) * F::from(-2) + + Pairing::pairing(Gamma_1[0], Gamma_2[2]) * F::from(2) + + Pairing::pairing(Gamma_1[1], Gamma_2[2]) * F::from(3); + assert_eq!(res[1].0, expected); + + let expected: GT = Pairing::pairing(Gamma_1[0], Gamma_2[0]) * F::from(-6) + + Pairing::pairing(Gamma_1[1], Gamma_2[1]) * F::from(-5) + + Pairing::pairing(Gamma_1[0], Gamma_2[2]) * F::from(-4) + + Pairing::pairing(Gamma_1[1], Gamma_2[2]) * F::from(4) + + Pairing::pairing(Gamma_1[0], Gamma_2[3]) * F::from(5) + + Pairing::pairing(Gamma_1[1], Gamma_2[3]) * F::from(6); + assert_eq!(res[2].0, expected); + + let expected: GT = Pairing::pairing(Gamma_1[0], Gamma_2[0]) * F::from(-7) + + Pairing::pairing(Gamma_1[1], Gamma_2[1]) * F::from(-6) + + Pairing::pairing(Gamma_1[0], Gamma_2[2]) * F::from(6) + + Pairing::pairing(Gamma_1[1], Gamma_2[2]) * F::from(7); + assert_eq!(res[3].0, expected); + + let expected: GT = Pairing::pairing(Gamma_1[0], Gamma_2[0]) * F::from(-9) + + Pairing::pairing(Gamma_1[1], Gamma_2[1]) * F::from(-8) + + Pairing::pairing(Gamma_1[0], Gamma_2[2]) * F::from(8) + + Pairing::pairing(Gamma_1[1], Gamma_2[2]) * F::from(9); + assert_eq!(res[4].0, expected); + + let expected: GT = Pairing::pairing(Gamma_1[0], Gamma_2[0]) * F::from(true) + + Pairing::pairing(Gamma_1[1], Gamma_2[1]) * F::from(true); + assert_eq!(res[5].0, expected); + + let expected: GT = Pairing::pairing(Gamma_1[0], Gamma_2[0]) * F::from(10) + + Pairing::pairing(Gamma_1[1], Gamma_2[1]) * F::from(11) + + Pairing::pairing(Gamma_1[0], Gamma_2[2]) * F::from(12) + + Pairing::pairing(Gamma_1[1], Gamma_2[2]) * F::from(13); + assert_eq!(res[6].0, expected); + + let expected: GT = Pairing::pairing(Gamma_1[0], Gamma_2[0]) * F::from(14) + + Pairing::pairing(Gamma_1[1], Gamma_2[1]) * F::from(15); + assert_eq!(res[7].0, expected); + + let expected: GT = Pairing::pairing(Gamma_1[0], Gamma_2[0]) * F::from(16); + assert_eq!(res[8].0, expected); + + let expected: GT = Pairing::pairing(Gamma_1[0], Gamma_2[0]) * F::from(-18) + + Pairing::pairing(Gamma_1[1], Gamma_2[1]) * F::from(-17) + + Pairing::pairing(Gamma_1[0], Gamma_2[2]) * F::from(17) + + Pairing::pairing(Gamma_1[1], Gamma_2[2]) * F::from(18); + assert_eq!(res[9].0, expected); +} + +#[test] +fn we_can_compute_a_dynamic_dory_commitment_with_mixed_committable_columns_with_an_offset_and_signed_values( +) { + let public_parameters = PublicParameters::test_rand(5, &mut test_rng()); + let setup = ProverSetup::from(&public_parameters); + let res = compute_dynamic_dory_commitments( + &[ + CommittableColumn::TinyInt(&[-2, -1, 0, 1, 2]), + CommittableColumn::BigInt(&[-3, -2, 2, 3]), + CommittableColumn::Int(&[-6, -5, -4, 4, 5, 6]), + CommittableColumn::SmallInt(&[-7, -6, 6, 7]), + CommittableColumn::Int128(&[-9, -8, 8, 9]), + CommittableColumn::Boolean(&[true, true]), + CommittableColumn::Decimal75( + Precision::new(1).unwrap(), + 0, + vec![[10, 0, 0, 0], [11, 0, 0, 0], [12, 0, 0, 0], [13, 0, 0, 0]], + ), + CommittableColumn::Scalar(vec![[14, 0, 0, 0], [15, 0, 0, 0]]), + CommittableColumn::VarChar(vec![[16, 0, 0, 0]]), + CommittableColumn::TimestampTZ( + PoSQLTimeUnit::Second, + PoSQLTimeZone::Utc, + &[-18, -17, 17, 18], + ), + ], + 4, + &setup, + ); + let Gamma_1 = public_parameters.Gamma_1; + let Gamma_2 = public_parameters.Gamma_2; + let expected: GT = Pairing::pairing(Gamma_1[0], Gamma_2[3]) * F::from(-2) + + Pairing::pairing(Gamma_1[1], Gamma_2[3]) * F::from(-1) + + Pairing::pairing(Gamma_1[2], Gamma_2[3]) * F::from(0) + + Pairing::pairing(Gamma_1[3], Gamma_2[3]) * F::from(1) + + Pairing::pairing(Gamma_1[0], Gamma_2[4]) * F::from(2); + assert_eq!(res[0].0, expected); + + let expected: GT = Pairing::pairing(Gamma_1[0], Gamma_2[3]) * F::from(-3) + + Pairing::pairing(Gamma_1[1], Gamma_2[3]) * F::from(-2) + + Pairing::pairing(Gamma_1[2], Gamma_2[3]) * F::from(2) + + Pairing::pairing(Gamma_1[3], Gamma_2[3]) * F::from(3); + assert_eq!(res[1].0, expected); + + let expected: GT = Pairing::pairing(Gamma_1[0], Gamma_2[3]) * F::from(-6) + + Pairing::pairing(Gamma_1[1], Gamma_2[3]) * F::from(-5) + + Pairing::pairing(Gamma_1[2], Gamma_2[3]) * F::from(-4) + + Pairing::pairing(Gamma_1[3], Gamma_2[3]) * F::from(4) + + Pairing::pairing(Gamma_1[0], Gamma_2[4]) * F::from(5) + + Pairing::pairing(Gamma_1[1], Gamma_2[4]) * F::from(6); + assert_eq!(res[2].0, expected); + + let expected: GT = Pairing::pairing(Gamma_1[0], Gamma_2[3]) * F::from(-7) + + Pairing::pairing(Gamma_1[1], Gamma_2[3]) * F::from(-6) + + Pairing::pairing(Gamma_1[2], Gamma_2[3]) * F::from(6) + + Pairing::pairing(Gamma_1[3], Gamma_2[3]) * F::from(7); + assert_eq!(res[3].0, expected); + + let expected: GT = Pairing::pairing(Gamma_1[0], Gamma_2[3]) * F::from(-9) + + Pairing::pairing(Gamma_1[1], Gamma_2[3]) * F::from(-8) + + Pairing::pairing(Gamma_1[2], Gamma_2[3]) * F::from(8) + + Pairing::pairing(Gamma_1[3], Gamma_2[3]) * F::from(9); + assert_eq!(res[4].0, expected); + + let expected: GT = Pairing::pairing(Gamma_1[0], Gamma_2[3]) * F::from(true) + + Pairing::pairing(Gamma_1[1], Gamma_2[3]) * F::from(true); + assert_eq!(res[5].0, expected); + + let expected: GT = Pairing::pairing(Gamma_1[0], Gamma_2[3]) * F::from(10) + + Pairing::pairing(Gamma_1[1], Gamma_2[3]) * F::from(11) + + Pairing::pairing(Gamma_1[2], Gamma_2[3]) * F::from(12) + + Pairing::pairing(Gamma_1[3], Gamma_2[3]) * F::from(13); + assert_eq!(res[6].0, expected); + + let expected: GT = Pairing::pairing(Gamma_1[0], Gamma_2[3]) * F::from(14) + + Pairing::pairing(Gamma_1[1], Gamma_2[3]) * F::from(15); + assert_eq!(res[7].0, expected); + + let expected: GT = Pairing::pairing(Gamma_1[0], Gamma_2[3]) * F::from(16); + assert_eq!(res[8].0, expected); + + let expected: GT = Pairing::pairing(Gamma_1[0], Gamma_2[3]) * F::from(-18) + + Pairing::pairing(Gamma_1[1], Gamma_2[3]) * F::from(-17) + + Pairing::pairing(Gamma_1[2], Gamma_2[3]) * F::from(17) + + Pairing::pairing(Gamma_1[3], Gamma_2[3]) * F::from(18); + assert_eq!(res[9].0, expected); +} diff --git a/crates/proof-of-sql/src/proof_primitive/dory/mod.rs b/crates/proof-of-sql/src/proof_primitive/dory/mod.rs index 00cdbd64a..08bbeb6c8 100644 --- a/crates/proof-of-sql/src/proof_primitive/dory/mod.rs +++ b/crates/proof-of-sql/src/proof_primitive/dory/mod.rs @@ -151,6 +151,8 @@ mod dynamic_dory_structure; use dynamic_dory_commitment_helper_cpu::compute_dynamic_dory_commitments; mod dynamic_dory_commitment; mod dynamic_dory_commitment_evaluation_proof; +#[cfg(test)] +mod dynamic_dory_compute_commitments_test; pub use dynamic_dory_commitment::DynamicDoryCommitment; #[cfg(test)] mod dynamic_dory_commitment_evaluation_proof_test; From 6768fe40f293d82725c626d6486b6ca309ac9aa6 Mon Sep 17 00:00:00 2001 From: Jacob Trombetta Date: Wed, 9 Oct 2024 15:20:06 -0400 Subject: [PATCH 23/29] chore: copy dynamic_dory_commitment_helper_cpu to create initial dynamic_dory_commitment_helper_gpu module --- .../dynamic_dory_commitment_helper_gpu.rs | 82 +++++++++++++++++++ .../src/proof_primitive/dory/mod.rs | 6 ++ 2 files changed, 88 insertions(+) create mode 100644 crates/proof-of-sql/src/proof_primitive/dory/dynamic_dory_commitment_helper_gpu.rs diff --git a/crates/proof-of-sql/src/proof_primitive/dory/dynamic_dory_commitment_helper_gpu.rs b/crates/proof-of-sql/src/proof_primitive/dory/dynamic_dory_commitment_helper_gpu.rs new file mode 100644 index 000000000..8ed2ddbb5 --- /dev/null +++ b/crates/proof-of-sql/src/proof_primitive/dory/dynamic_dory_commitment_helper_gpu.rs @@ -0,0 +1,82 @@ +use super::{ + dynamic_dory_structure::row_and_column_from_index, pairings, DoryScalar, DynamicDoryCommitment, + G1Affine, G1Projective, ProverSetup, GT, +}; +use crate::base::commitment::CommittableColumn; +use alloc::{vec, vec::Vec}; +use num_traits::Zero; + +#[tracing::instrument(name = "compute_dory_commitment_impl (cpu)", level = "debug", skip_all)] +/// # Panics +/// +/// Will panic if: +/// - `setup.Gamma_1.last()` returns `None`, indicating that `Gamma_1` is empty. +/// - `setup.Gamma_2.last()` returns `None`, indicating that `Gamma_2` is empty. +/// - The indexing for `Gamma_2` with `first_row..=last_row` goes out of bounds. +fn compute_dory_commitment_impl<'a, T>( + column: &'a [T], + offset: usize, + setup: &ProverSetup, +) -> DynamicDoryCommitment +where + &'a T: Into, + T: Sync, +{ + let Gamma_1 = setup.Gamma_1.last().unwrap(); + let Gamma_2 = setup.Gamma_2.last().unwrap(); + let (first_row, _) = row_and_column_from_index(offset); + let (last_row, _) = row_and_column_from_index(offset + column.len() - 1); + let row_commits = column.iter().enumerate().fold( + vec![G1Projective::from(G1Affine::identity()); last_row - first_row + 1], + |mut row_commits, (i, v)| { + let (row, col) = row_and_column_from_index(i + offset); + row_commits[row - first_row] += Gamma_1[col] * v.into().0; + row_commits + }, + ); + DynamicDoryCommitment(pairings::multi_pairing( + row_commits, + &Gamma_2[first_row..=last_row], + )) +} + +fn compute_dory_commitment( + committable_column: &CommittableColumn, + offset: usize, + setup: &ProverSetup, +) -> DynamicDoryCommitment { + match committable_column { + CommittableColumn::Scalar(column) => compute_dory_commitment_impl(column, offset, setup), + CommittableColumn::TinyInt(column) => compute_dory_commitment_impl(column, offset, setup), + CommittableColumn::SmallInt(column) => compute_dory_commitment_impl(column, offset, setup), + CommittableColumn::Int(column) => compute_dory_commitment_impl(column, offset, setup), + CommittableColumn::BigInt(column) => compute_dory_commitment_impl(column, offset, setup), + CommittableColumn::Int128(column) => compute_dory_commitment_impl(column, offset, setup), + CommittableColumn::VarChar(column) | CommittableColumn::Decimal75(_, _, column) => { + compute_dory_commitment_impl(column, offset, setup) + } + CommittableColumn::Boolean(column) => compute_dory_commitment_impl(column, offset, setup), + CommittableColumn::TimestampTZ(_, _, column) => { + compute_dory_commitment_impl(column, offset, setup) + } + CommittableColumn::RangeCheckWord(column) => { + compute_dory_commitment_impl(column, offset, setup) + } + } +} + +pub(super) fn compute_dynamic_dory_commitments( + committable_columns: &[CommittableColumn], + offset: usize, + setup: &ProverSetup, +) -> Vec { + committable_columns + .iter() + .map(|column| { + column + .is_empty() + .then(|| DynamicDoryCommitment(GT::zero())) + .unwrap_or_else(|| compute_dory_commitment(column, offset, setup)) + }) + .collect() +} diff --git a/crates/proof-of-sql/src/proof_primitive/dory/mod.rs b/crates/proof-of-sql/src/proof_primitive/dory/mod.rs index 08bbeb6c8..3c4070bac 100644 --- a/crates/proof-of-sql/src/proof_primitive/dory/mod.rs +++ b/crates/proof-of-sql/src/proof_primitive/dory/mod.rs @@ -144,11 +144,17 @@ mod pairings; mod transpose; mod dynamic_build_vmv_state; +#[cfg(not(feature = "blitzar"))] mod dynamic_dory_commitment_helper_cpu; +#[cfg(feature = "blitzar")] +mod dynamic_dory_commitment_helper_gpu; mod dynamic_dory_helper; mod dynamic_dory_standard_basis_helper; mod dynamic_dory_structure; +#[cfg(not(feature = "blitzar"))] use dynamic_dory_commitment_helper_cpu::compute_dynamic_dory_commitments; +#[cfg(feature = "blitzar")] +use dynamic_dory_commitment_helper_gpu::compute_dynamic_dory_commitments; mod dynamic_dory_commitment; mod dynamic_dory_commitment_evaluation_proof; #[cfg(test)] From 3b8f71ee5e297691f7a3aeb3e61ec71dcc1e307e Mon Sep 17 00:00:00 2001 From: Jacob Trombetta Date: Wed, 9 Oct 2024 15:21:49 -0400 Subject: [PATCH 24/29] feat: expose Blitzar's vlen msm function in Dory setup --- .../proof-of-sql/src/proof_primitive/dory/setup.rs | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/crates/proof-of-sql/src/proof_primitive/dory/setup.rs b/crates/proof-of-sql/src/proof_primitive/dory/setup.rs index b06199e15..4283fa7d5 100644 --- a/crates/proof-of-sql/src/proof_primitive/dory/setup.rs +++ b/crates/proof-of-sql/src/proof_primitive/dory/setup.rs @@ -88,6 +88,19 @@ impl<'a> ProverSetup<'a> { self.blitzar_handle .packed_msm(res, output_bit_table, scalars); } + + #[cfg(feature = "blitzar")] + #[tracing::instrument(name = "ProverSetup::blitzar_vlen_msm", level = "debug", skip_all)] + pub(super) fn blitzar_vlen_msm( + &self, + res: &mut [blitzar::compute::ElementP2], + output_bit_table: &[u32], + output_lengths: &[u32], + scalars: &[u8], + ) { + self.blitzar_handle + .vlen_msm(res, output_bit_table, output_lengths, scalars); + } } impl<'a> From<&'a PublicParameters> for ProverSetup<'a> { From e531c8b70725809a0ef11a20aba9961f68259924 Mon Sep 17 00:00:00 2001 From: Jacob Trombetta Date: Wed, 9 Oct 2024 15:26:23 -0400 Subject: [PATCH 25/29] refactor: remove code duplicated from dynamic_dory_commitment_helper_cpu module --- .../dynamic_dory_commitment_helper_gpu.rs | 81 ++----------------- 1 file changed, 6 insertions(+), 75 deletions(-) diff --git a/crates/proof-of-sql/src/proof_primitive/dory/dynamic_dory_commitment_helper_gpu.rs b/crates/proof-of-sql/src/proof_primitive/dory/dynamic_dory_commitment_helper_gpu.rs index 8ed2ddbb5..f79ba939f 100644 --- a/crates/proof-of-sql/src/proof_primitive/dory/dynamic_dory_commitment_helper_gpu.rs +++ b/crates/proof-of-sql/src/proof_primitive/dory/dynamic_dory_commitment_helper_gpu.rs @@ -1,82 +1,13 @@ use super::{ - dynamic_dory_structure::row_and_column_from_index, pairings, DoryScalar, DynamicDoryCommitment, - G1Affine, G1Projective, ProverSetup, GT, + DynamicDoryCommitment, + ProverSetup, }; use crate::base::commitment::CommittableColumn; -use alloc::{vec, vec::Vec}; -use num_traits::Zero; - -#[tracing::instrument(name = "compute_dory_commitment_impl (cpu)", level = "debug", skip_all)] -/// # Panics -/// -/// Will panic if: -/// - `setup.Gamma_1.last()` returns `None`, indicating that `Gamma_1` is empty. -/// - `setup.Gamma_2.last()` returns `None`, indicating that `Gamma_2` is empty. -/// - The indexing for `Gamma_2` with `first_row..=last_row` goes out of bounds. -fn compute_dory_commitment_impl<'a, T>( - column: &'a [T], - offset: usize, - setup: &ProverSetup, -) -> DynamicDoryCommitment -where - &'a T: Into, - T: Sync, -{ - let Gamma_1 = setup.Gamma_1.last().unwrap(); - let Gamma_2 = setup.Gamma_2.last().unwrap(); - let (first_row, _) = row_and_column_from_index(offset); - let (last_row, _) = row_and_column_from_index(offset + column.len() - 1); - let row_commits = column.iter().enumerate().fold( - vec![G1Projective::from(G1Affine::identity()); last_row - first_row + 1], - |mut row_commits, (i, v)| { - let (row, col) = row_and_column_from_index(i + offset); - row_commits[row - first_row] += Gamma_1[col] * v.into().0; - row_commits - }, - ); - DynamicDoryCommitment(pairings::multi_pairing( - row_commits, - &Gamma_2[first_row..=last_row], - )) -} - -fn compute_dory_commitment( - committable_column: &CommittableColumn, - offset: usize, - setup: &ProverSetup, -) -> DynamicDoryCommitment { - match committable_column { - CommittableColumn::Scalar(column) => compute_dory_commitment_impl(column, offset, setup), - CommittableColumn::TinyInt(column) => compute_dory_commitment_impl(column, offset, setup), - CommittableColumn::SmallInt(column) => compute_dory_commitment_impl(column, offset, setup), - CommittableColumn::Int(column) => compute_dory_commitment_impl(column, offset, setup), - CommittableColumn::BigInt(column) => compute_dory_commitment_impl(column, offset, setup), - CommittableColumn::Int128(column) => compute_dory_commitment_impl(column, offset, setup), - CommittableColumn::VarChar(column) | CommittableColumn::Decimal75(_, _, column) => { - compute_dory_commitment_impl(column, offset, setup) - } - CommittableColumn::Boolean(column) => compute_dory_commitment_impl(column, offset, setup), - CommittableColumn::TimestampTZ(_, _, column) => { - compute_dory_commitment_impl(column, offset, setup) - } - CommittableColumn::RangeCheckWord(column) => { - compute_dory_commitment_impl(column, offset, setup) - } - } -} pub(super) fn compute_dynamic_dory_commitments( - committable_columns: &[CommittableColumn], - offset: usize, - setup: &ProverSetup, + _committable_columns: &[CommittableColumn], + _offset: usize, + _setup: &ProverSetup, ) -> Vec { - committable_columns - .iter() - .map(|column| { - column - .is_empty() - .then(|| DynamicDoryCommitment(GT::zero())) - .unwrap_or_else(|| compute_dory_commitment(column, offset, setup)) - }) - .collect() + todo!() } From 960d23f2416d66dd306f5c83ef3a66c7153afeaf Mon Sep 17 00:00:00 2001 From: Jacob Trombetta Date: Wed, 9 Oct 2024 15:30:40 -0400 Subject: [PATCH 26/29] feat: the dynamic Dory GPU implementation should be able to handle signed commitments --- .../dynamic_dory_commitment_helper_gpu.rs | 51 +++++++++++++++++-- .../src/proof_primitive/dory/pack_scalars.rs | 2 +- 2 files changed, 47 insertions(+), 6 deletions(-) diff --git a/crates/proof-of-sql/src/proof_primitive/dory/dynamic_dory_commitment_helper_gpu.rs b/crates/proof-of-sql/src/proof_primitive/dory/dynamic_dory_commitment_helper_gpu.rs index f79ba939f..a500da055 100644 --- a/crates/proof-of-sql/src/proof_primitive/dory/dynamic_dory_commitment_helper_gpu.rs +++ b/crates/proof-of-sql/src/proof_primitive/dory/dynamic_dory_commitment_helper_gpu.rs @@ -1,8 +1,49 @@ -use super::{ - DynamicDoryCommitment, - ProverSetup, -}; -use crate::base::commitment::CommittableColumn; +use super::{DynamicDoryCommitment, G1Affine, ProverSetup}; +use crate::{base::commitment::CommittableColumn, proof_primitive::dory::pack_scalars::min_as_f}; +use ark_ec::CurveGroup; +use ark_std::ops::Mul; + +/// Modifies the sub commits by adding the minimum commitment of the column type to the signed sub commits. +/// +/// # Arguments +/// +/// * `all_sub_commits` - A reference to the sub commits. +/// * `committable_columns` - A reference to the committable columns. +/// +/// # Returns +/// +/// A vector containing the modified sub commits to be used by the dynamic Dory commitment computation. +#[tracing::instrument(name = "signed_commits", level = "debug", skip_all)] +fn signed_commits( + all_sub_commits: &Vec, + committable_columns: &[CommittableColumn], +) -> Vec { + let mut unsigned_sub_commits: Vec = Vec::new(); + let mut min_sub_commits: Vec = Vec::new(); + let mut counter = 0; + + // Every sub_commit has a corresponding offset sub_commit committable_columns.len() away. + // The commits and respective ones commits are interleaved in the all_sub_commits vector. + for commit in all_sub_commits { + if counter < committable_columns.len() { + unsigned_sub_commits.push(*commit); + } else { + let min = + min_as_f(committable_columns[counter - committable_columns.len()].column_type()); + min_sub_commits.push(commit.mul(min).into_affine()); + } + counter += 1; + if counter == 2 * committable_columns.len() { + counter = 0; + } + } + + unsigned_sub_commits + .into_iter() + .zip(min_sub_commits.into_iter()) + .map(|(unsigned, min)| (unsigned + min).into()) + .collect() +} pub(super) fn compute_dynamic_dory_commitments( _committable_columns: &[CommittableColumn], diff --git a/crates/proof-of-sql/src/proof_primitive/dory/pack_scalars.rs b/crates/proof-of-sql/src/proof_primitive/dory/pack_scalars.rs index e77410cb4..8797a315b 100644 --- a/crates/proof-of-sql/src/proof_primitive/dory/pack_scalars.rs +++ b/crates/proof-of-sql/src/proof_primitive/dory/pack_scalars.rs @@ -55,7 +55,7 @@ fn output_bit_table( /// # Arguments /// /// * `column_type` - The type of a committable column. -const fn min_as_f(column_type: ColumnType) -> F { +pub const fn min_as_f(column_type: ColumnType) -> F { match column_type { ColumnType::TinyInt => MontFp!("-128"), ColumnType::SmallInt => MontFp!("-32768"), From e4f78c56ddeff8578d0062f7c0c6d6de640023a3 Mon Sep 17 00:00:00 2001 From: Jacob Trombetta Date: Wed, 9 Oct 2024 15:33:42 -0400 Subject: [PATCH 27/29] feat: the dynamic Dory GPU implementation should be able to copy column data --- .../dynamic_dory_commitment_helper_gpu.rs | 49 ++++++++++++++++++- 1 file changed, 48 insertions(+), 1 deletion(-) diff --git a/crates/proof-of-sql/src/proof_primitive/dory/dynamic_dory_commitment_helper_gpu.rs b/crates/proof-of-sql/src/proof_primitive/dory/dynamic_dory_commitment_helper_gpu.rs index a500da055..2d4e88567 100644 --- a/crates/proof-of-sql/src/proof_primitive/dory/dynamic_dory_commitment_helper_gpu.rs +++ b/crates/proof-of-sql/src/proof_primitive/dory/dynamic_dory_commitment_helper_gpu.rs @@ -1,5 +1,8 @@ use super::{DynamicDoryCommitment, G1Affine, ProverSetup}; -use crate::{base::commitment::CommittableColumn, proof_primitive::dory::pack_scalars::min_as_f}; +use crate::{ + base::commitment::CommittableColumn, + proof_primitive::dory::{offset_to_bytes::OffsetToBytes, pack_scalars::min_as_f}, +}; use ark_ec::CurveGroup; use ark_std::ops::Mul; @@ -45,6 +48,50 @@ fn signed_commits( .collect() } +/// Copies the column data to the scalar row slice. +/// +/// # Arguments +/// +/// * `column` - A reference to the committable column. +/// * `scalar_row_slice` - A mutable reference to the scalar row slice. +/// * `start` - The start index of the slice. +/// * `end` - The end index of the slice. +/// * `index` - The index of the column. +fn copy_column_data_to_slice( + column: &CommittableColumn, + scalar_row_slice: &mut [u8], + start: usize, + end: usize, + index: usize, +) { + match column { + CommittableColumn::Boolean(column) => { + scalar_row_slice[start..end].copy_from_slice(&column[index].offset_to_bytes()); + } + CommittableColumn::TinyInt(column) => { + scalar_row_slice[start..end].copy_from_slice(&column[index].offset_to_bytes()); + } + CommittableColumn::SmallInt(column) => { + scalar_row_slice[start..end].copy_from_slice(&column[index].offset_to_bytes()); + } + CommittableColumn::Int(column) => { + scalar_row_slice[start..end].copy_from_slice(&column[index].offset_to_bytes()); + } + CommittableColumn::BigInt(column) | CommittableColumn::TimestampTZ(_, _, column) => { + scalar_row_slice[start..end].copy_from_slice(&column[index].offset_to_bytes()); + } + CommittableColumn::Int128(column) => { + scalar_row_slice[start..end].copy_from_slice(&column[index].offset_to_bytes()); + } + CommittableColumn::Scalar(column) + | CommittableColumn::Decimal75(_, _, column) + | CommittableColumn::VarChar(column) => { + scalar_row_slice[start..end].copy_from_slice(&column[index].offset_to_bytes()); + } + CommittableColumn::RangeCheckWord(_) => todo!(), + } +} + pub(super) fn compute_dynamic_dory_commitments( _committable_columns: &[CommittableColumn], _offset: usize, From ccd1072d5b297686a7fe691a66b897f67be529d9 Mon Sep 17 00:00:00 2001 From: Jacob Trombetta Date: Wed, 9 Oct 2024 15:38:05 -0400 Subject: [PATCH 28/29] feat: the dynamic_dory_commitment_helper_gpu module should be able to create metadata tables that are used to call Blitzar's vlen_msm function --- .../dynamic_dory_commitment_helper_gpu.rs | 247 +++++++++++++++++- .../dory/dynamic_dory_structure.rs | 3 - 2 files changed, 246 insertions(+), 4 deletions(-) diff --git a/crates/proof-of-sql/src/proof_primitive/dory/dynamic_dory_commitment_helper_gpu.rs b/crates/proof-of-sql/src/proof_primitive/dory/dynamic_dory_commitment_helper_gpu.rs index 2d4e88567..b23adfc2e 100644 --- a/crates/proof-of-sql/src/proof_primitive/dory/dynamic_dory_commitment_helper_gpu.rs +++ b/crates/proof-of-sql/src/proof_primitive/dory/dynamic_dory_commitment_helper_gpu.rs @@ -1,10 +1,18 @@ -use super::{DynamicDoryCommitment, G1Affine, ProverSetup}; +use super::{ + dynamic_dory_structure::{full_width_of_row, index_from_row_and_column, matrix_size}, + DynamicDoryCommitment, G1Affine, ProverSetup, +}; use crate::{ base::commitment::CommittableColumn, proof_primitive::dory::{offset_to_bytes::OffsetToBytes, pack_scalars::min_as_f}, }; use ark_ec::CurveGroup; use ark_std::ops::Mul; +use core::iter; +use itertools::Itertools; +use tracing::{span, Level}; + +const BYTE_SIZE: u32 = 8; /// Modifies the sub commits by adding the minimum commitment of the column type to the signed sub commits. /// @@ -92,6 +100,142 @@ fn copy_column_data_to_slice( } } +/// Creates the metadata tables for Blitzar's `vlen_msm` algorithm. +/// +/// # Arguments +/// +/// * `committable_columns` - A reference to the committable columns. +/// * `offset` - The offset to the data. +/// +/// # Returns +/// +/// A tuple containing the output bit table, output length table, +/// and scalars required to call Blitzar's `vlen_msm` function. +#[tracing::instrument(name = "create_blitzar_metadata_tables", level = "debug", skip_all)] +fn create_blitzar_metadata_tables( + committable_columns: &[CommittableColumn], + offset: usize, +) -> (Vec, Vec, Vec) { + // Keep track of the lengths of the columns to handled signed data columns. + let ones_columns_lengths = committable_columns + .iter() + .map(CommittableColumn::len) + .collect_vec(); + + // The maximum matrix size will be used to create the scalars vector. + let (max_height, max_width) = if let Some(max_column_len) = + committable_columns.iter().map(CommittableColumn::len).max() + { + matrix_size(max_column_len, offset) + } else { + (0, 0) + }; + + // Find the single packed byte size of all committable columns. + let num_of_bytes_in_committable_columns: usize = committable_columns + .iter() + .map(|column| column.column_type().byte_size()) + .sum(); + + // Get a single bit table entry with ones added for all committable columns that are signed. + let single_entry_in_blitzar_output_bit_table: Vec = committable_columns + .iter() + .map(|column| column.column_type().bit_size()) + .chain(iter::repeat(BYTE_SIZE).take(ones_columns_lengths.len())) + .collect(); + + // Create the full bit table vector to be used by Blitzar's vlen_msm algorithm. + let blitzar_output_bit_table: Vec = single_entry_in_blitzar_output_bit_table + .iter() + .copied() + .cycle() + .take(single_entry_in_blitzar_output_bit_table.len() * max_height) + .collect(); + + // Create the full length vector to be used by Blitzar's vlen_msm algorithm. + let blitzar_output_length_table: Vec = (0..blitzar_output_bit_table.len() + / single_entry_in_blitzar_output_bit_table.len()) + .flat_map(|i| { + itertools::repeat_n( + full_width_of_row(i) as u32, + single_entry_in_blitzar_output_bit_table.len(), + ) + }) + .collect(); + + // Create a cumulative length table to be used when packing the scalar vector. + let cumulative_byte_length_table: Vec = iter::once(0) + .chain(blitzar_output_bit_table.iter().scan(0usize, |acc, &x| { + *acc += (x / BYTE_SIZE) as usize; + Some(*acc) + })) + .collect(); + + // Create scalars array. Note, scalars need to be stored in a column-major order. + let num_scalar_rows = max_width; + let num_scalar_columns = + (num_of_bytes_in_committable_columns + ones_columns_lengths.len()) * max_height; + let mut blitzar_scalars = vec![0u8; num_scalar_rows * num_scalar_columns]; + + // Populate the scalars array. + let span = span!(Level::INFO, "pack_blitzar_scalars").entered(); + if !blitzar_scalars.is_empty() { + blitzar_scalars + .chunks_exact_mut(num_scalar_columns) + .enumerate() + .for_each(|(scalar_row, scalar_row_slice)| { + // Iterate over the columns and populate the scalars array. + for scalar_col in 0..max_height { + // Find index in the committable columns. Note, the scalar is in + // column major order, that is why the (row, col) arguments are flipped. + if let Some(index) = index_from_row_and_column(scalar_col, scalar_row).and_then( + |committable_column_idx| committable_column_idx.checked_sub(offset), + ) { + for (i, committable_column) in committable_columns + .iter() + .enumerate() + .filter(|(_, committable_column)| index < committable_column.len()) + { + let start = cumulative_byte_length_table + [i + scalar_col * single_entry_in_blitzar_output_bit_table.len()]; + let end = start + + (single_entry_in_blitzar_output_bit_table[i] / BYTE_SIZE) + as usize; + + copy_column_data_to_slice( + committable_column, + scalar_row_slice, + start, + end, + index, + ); + } + + ones_columns_lengths + .iter() + .positions(|ones_columns_length| index < *ones_columns_length) + .for_each(|i| { + let ones_index = i + + scalar_col + * (num_of_bytes_in_committable_columns + + ones_columns_lengths.len()) + + num_of_bytes_in_committable_columns; + + scalar_row_slice[ones_index] = 1_u8; + }); + } + } + }); + } + span.exit(); + + ( + blitzar_output_bit_table, + blitzar_output_length_table, + blitzar_scalars, + ) +} + pub(super) fn compute_dynamic_dory_commitments( _committable_columns: &[CommittableColumn], _offset: usize, @@ -99,3 +243,104 @@ pub(super) fn compute_dynamic_dory_commitments( ) -> Vec { todo!() } + +#[cfg(test)] +mod tests { + use super::*; + use crate::base::math::decimal::Precision; + use proof_of_sql_parser::posql_time::{PoSQLTimeUnit, PoSQLTimeZone}; + + #[test] + fn we_can_populate_blitzar_metadata_tables_with_empty_columns() { + let committable_columns = [CommittableColumn::BigInt(&[0; 0])]; + let offset = 0; + let (bit_table, length_table, scalars) = + create_blitzar_metadata_tables(&committable_columns, offset); + + assert!(bit_table.is_empty()); + assert!(length_table.is_empty()); + assert!(scalars.is_empty()); + } + + #[test] + fn we_can_populate_blitzar_metadata_tables_with_empty_columns_and_an_offset() { + let committable_columns = [CommittableColumn::BigInt(&[0; 0])]; + let offset = 1; + let (bit_table, length_table, scalars) = + create_blitzar_metadata_tables(&committable_columns, offset); + + assert_eq!(bit_table, vec![64, 8]); + assert_eq!(length_table, vec![1, 1]); + assert_eq!(scalars, vec![0, 0, 0, 0, 0, 0, 0, 0, 0]); + } + + #[test] + fn we_can_populate_blitzar_metadata_tables_with_simple_column() { + let committable_columns = [CommittableColumn::BigInt(&[1])]; + let offset = 0; + let (bit_table, length_table, scalars) = + create_blitzar_metadata_tables(&committable_columns, offset); + + assert_eq!(bit_table, vec![64, 8]); + assert_eq!(length_table, vec![1, 1]); + assert_eq!(scalars, vec![1, 0, 0, 0, 0, 0, 0, 128, 1]); + } + + #[test] + fn we_can_populate_blitzar_metadata_tables_with_simple_column_and_offset() { + let committable_columns = [CommittableColumn::BigInt(&[1])]; + let offset = 1; + let (bit_table, length_table, scalars) = + create_blitzar_metadata_tables(&committable_columns, offset); + + assert_eq!(bit_table, vec![64, 8, 64, 8]); + assert_eq!(length_table, vec![1, 1, 2, 2]); + assert_eq!( + scalars, + vec![ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, + 0, 0, 0, 0, 0, 0, 128, 1 + ] + ); + } + + #[test] + fn we_can_populate_blitzar_metadata_tables_with_mixed_columns() { + let committable_columns = [ + CommittableColumn::TinyInt(&[1]), + CommittableColumn::SmallInt(&[2]), + CommittableColumn::Int(&[3]), + CommittableColumn::BigInt(&[4]), + CommittableColumn::Int128(&[5]), + CommittableColumn::Decimal75(Precision::new(1).unwrap(), 0, vec![[6, 0, 0, 0]]), + CommittableColumn::Scalar(vec![[7, 0, 0, 0]]), + CommittableColumn::VarChar(vec![[8, 0, 0, 0]]), + CommittableColumn::TimestampTZ(PoSQLTimeUnit::Second, PoSQLTimeZone::Utc, &[9]), + CommittableColumn::Boolean(&[true]), + ]; + + let offset = 0; + let (bit_table, length_table, scalars) = + create_blitzar_metadata_tables(&committable_columns, offset); + assert_eq!( + bit_table, + vec![8, 16, 32, 64, 128, 256, 256, 256, 64, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8] + ); + + assert_eq!( + length_table, + vec![1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1] + ); + assert_eq!( + scalars, + vec![ + 129, 2, 128, 3, 0, 0, 128, 4, 0, 0, 0, 0, 0, 0, 128, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 128, 6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, 0, 0, 0, 0, 0, 0, 128, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 + ] + ); + } +} diff --git a/crates/proof-of-sql/src/proof_primitive/dory/dynamic_dory_structure.rs b/crates/proof-of-sql/src/proof_primitive/dory/dynamic_dory_structure.rs index 2f4146060..2598db988 100644 --- a/crates/proof-of-sql/src/proof_primitive/dory/dynamic_dory_structure.rs +++ b/crates/proof-of-sql/src/proof_primitive/dory/dynamic_dory_structure.rs @@ -33,7 +33,6 @@ /// Returns the full width of a row in the matrix. /// /// Note: when row = 1, this correctly returns 2, even though no data belongs at position 0. -#[allow(dead_code)] pub(crate) const fn full_width_of_row(row: usize) -> usize { ((2 * row + 4) / 3).next_power_of_two() } @@ -56,7 +55,6 @@ pub(crate) const fn row_and_column_from_index(index: usize) -> (usize, usize) { } /// Returns the index of data where the (row, column) belongs. -#[allow(dead_code)] pub(crate) fn index_from_row_and_column(row: usize, column: usize) -> Option { let width_of_row = full_width_of_row(row); (column < width_of_row && (row, column) != (1, 0)) @@ -66,7 +64,6 @@ pub(crate) fn index_from_row_and_column(row: usize, column: usize) -> Option (usize, usize) { if data_len == 0 && offset == 0 { return (0, 0); From 78c26d4e9a1f38cb62c1d4189a95784f05cc8070 Mon Sep 17 00:00:00 2001 From: Jacob Trombetta Date: Wed, 9 Oct 2024 15:41:28 -0400 Subject: [PATCH 29/29] perf: the dynamic_dory_commitment_helper_gpu module should use Blitzar's vlen_msm function --- .../dynamic_dory_commitment_helper_gpu.rs | 73 +++++++++++++++++-- 1 file changed, 67 insertions(+), 6 deletions(-) diff --git a/crates/proof-of-sql/src/proof_primitive/dory/dynamic_dory_commitment_helper_gpu.rs b/crates/proof-of-sql/src/proof_primitive/dory/dynamic_dory_commitment_helper_gpu.rs index b23adfc2e..b0aa4d0df 100644 --- a/crates/proof-of-sql/src/proof_primitive/dory/dynamic_dory_commitment_helper_gpu.rs +++ b/crates/proof-of-sql/src/proof_primitive/dory/dynamic_dory_commitment_helper_gpu.rs @@ -1,13 +1,14 @@ use super::{ dynamic_dory_structure::{full_width_of_row, index_from_row_and_column, matrix_size}, - DynamicDoryCommitment, G1Affine, ProverSetup, + pairings, DynamicDoryCommitment, G1Affine, ProverSetup, }; use crate::{ - base::commitment::CommittableColumn, + base::{commitment::CommittableColumn, slice_ops::slice_cast}, proof_primitive::dory::{offset_to_bytes::OffsetToBytes, pack_scalars::min_as_f}, }; use ark_ec::CurveGroup; use ark_std::ops::Mul; +use blitzar::compute::ElementP2; use core::iter; use itertools::Itertools; use tracing::{span, Level}; @@ -236,12 +237,72 @@ fn create_blitzar_metadata_tables( ) } +/// Computes the dynamic Dory commitment using the GPU implementation of the `vlen_msm` algorithm. +/// +/// # Arguments +/// +/// * `committable_columns` - A reference to the committable columns. +/// * `offset` - The offset to the data. +/// * `setup` - A reference to the prover setup. +/// +/// # Returns +/// +/// A vector containing the dynamic Dory commitments. +/// +/// # Panics +/// +/// Panics if the number of sub commits is not a multiple of the number of committable columns. +#[tracing::instrument( + name = "compute_dynamic_dory_commitments (gpu)", + level = "debug", + skip_all +)] pub(super) fn compute_dynamic_dory_commitments( - _committable_columns: &[CommittableColumn], - _offset: usize, - _setup: &ProverSetup, + committable_columns: &[CommittableColumn], + offset: usize, + setup: &ProverSetup, ) -> Vec { - todo!() + let Gamma_2 = setup.Gamma_2.last().unwrap(); + + // Get metadata tables for Blitzar's vlen_msm algorithm. + let (blitzar_output_bit_table, blitzar_output_length_table, blitzar_scalars) = + create_blitzar_metadata_tables(committable_columns, offset); + + // Initialize sub commits. + let mut blitzar_sub_commits = + vec![ElementP2::::default(); blitzar_output_bit_table.len()]; + + // Get sub commits from Blitzar's vlen_msm algorithm. + setup.blitzar_vlen_msm( + &mut blitzar_sub_commits, + &blitzar_output_bit_table, + &blitzar_output_length_table, + blitzar_scalars.as_slice(), + ); + + // Modify the sub commits to include the signed offset. + let all_sub_commits: Vec = slice_cast(&blitzar_sub_commits); + let signed_sub_commits = signed_commits(&all_sub_commits, committable_columns); + assert!( + signed_sub_commits.len() % committable_columns.len() == 0, + "Invalid number of sub commits" + ); + let num_commits = signed_sub_commits.len() / committable_columns.len(); + + // Calculate the dynamic Dory commitments. + let span = span!(Level::INFO, "multi_pairing").entered(); + let ddc: Vec = (0..committable_columns.len()) + .map(|i| { + let sub_slice = signed_sub_commits[i..] + .iter() + .step_by(committable_columns.len()) + .take(num_commits); + DynamicDoryCommitment(pairings::multi_pairing(sub_slice, &Gamma_2[..num_commits])) + }) + .collect(); + span.exit(); + + ddc } #[cfg(test)]