From f13a5761f043e62260d94e3b4ccb2811d17e50c7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Antonio=20Mej=C3=ADas=20Gil?= Date: Wed, 25 Oct 2023 10:04:28 +0200 Subject: [PATCH 01/75] added hyrax PCS --- Cargo.toml | 24 +- README.md | 10 + src/README.md | 10 + src/error.rs | 65 +++ src/hyrax/data_structures.rs | 141 +++++++ src/hyrax/mod.rs | 568 +++++++++++++++++++++++++++ src/hyrax/tests.rs | 219 +++++++++++ src/hyrax/utils.rs | 38 ++ src/lib.rs | 13 + src/streaming_kzg/data_structures.rs | 3 +- src/streaming_kzg/mod.rs | 6 - src/streaming_kzg/space.rs | 3 +- src/utils.rs | 222 +++++++++++ 13 files changed, 1305 insertions(+), 17 deletions(-) create mode 100644 src/hyrax/data_structures.rs create mode 100644 src/hyrax/mod.rs create mode 100644 src/hyrax/tests.rs create mode 100644 src/hyrax/utils.rs create mode 100644 src/utils.rs diff --git a/Cargo.toml b/Cargo.toml index 4b58457e..8c30f9ba 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -15,22 +15,24 @@ ark-serialize = { version = "^0.4.0", default-features = false, features = [ "de ark-ff = { version = "^0.4.0", default-features = false } ark-ec = { version = "^0.4.0", default-features = false } ark-poly = {version = "^0.4.0", default-features = false } -ark-crypto-primitives = {version = "^0.4.0", default-features = false, features = ["sponge"] } +ark-crypto-primitives = { version = "^0.4.0", default-features = false, features = ["sponge","merkle_tree" ] } ark-std = { version = "^0.4.0", default-features = false } +blake2 = { version = "0.10", default-features = false } ark-relations = { version = "^0.4.0", default-features = false, optional = true } ark-r1cs-std = { version = "^0.4.0", default-features = false, optional = true } -hashbrown = { version = "0.13", default-features = false, optional = true } +hashbrown = { version = "0.14", default-features = false, optional = true } digest = "0.10" derivative = { version = "2", features = [ "use_core" ] } rayon = { version = "1", optional = true } +merlin = { version = "3.0.0", default-features = false } [dev-dependencies] ark-ed-on-bls12-381 = { version = "^0.4.0", default-features = false } ark-bls12-381 = { version = "^0.4.0", default-features = false, features = [ "curve" ] } ark-bls12-377 = { version = "^0.4.0", default-features = false, features = [ "curve" ] } -blake2 = { version = "0.10", default-features = false } +ark-bn254 = { version = "^0.4.0", default-features = false, features = [ "curve" ] } rand_chacha = { version = "0.3.0", default-features = false } [profile.release] @@ -46,11 +48,14 @@ incremental = true debug = true [features] -default = [ "std", "parallel" ] -std = [ "ark-ff/std", "ark-ec/std", "ark-poly/std", "ark-std/std", "ark-relations/std", "ark-serialize/std", "ark-crypto-primitives/std"] -r1cs = [ "ark-relations", "ark-r1cs-std", "hashbrown", "ark-crypto-primitives/r1cs"] -print-trace = [ "ark-std/print-trace" ] -parallel = [ "std", "ark-ff/parallel", "ark-ec/parallel", "ark-poly/parallel", "ark-std/parallel", "rayon" ] +default = ["std", "parallel"] +std = ["ark-ff/std", "ark-ec/std", "ark-poly/std", "ark-std/std", "ark-relations/std", "ark-serialize/std", "ark-crypto-primitives/std"] +r1cs = ["ark-relations", "ark-r1cs-std", "hashbrown", "ark-crypto-primitives/r1cs"] +print-trace = ["ark-std/print-trace"] +parallel = ["std", "ark-ff/parallel", "ark-ec/parallel", "ark-poly/parallel", "ark-std/parallel", "rayon"] + +[target.'cfg(target_arch = "aarch64")'.dependencies] +num-traits = { version = "0.2", default-features = false, features = ["libm"] } [patch.crates-io] ark-ff = { git = "https://github.com/arkworks-rs/algebra/" } @@ -60,4 +65,5 @@ ark-crypto-primitives = { git = "https://github.com/arkworks-rs/crypto-primitive ark-r1cs-std = { git = "https://github.com/arkworks-rs/r1cs-std/" } ark-bls12-377 = { git = "https://github.com/arkworks-rs/curves/" } -ark-bls12-381 = { git = "https://github.com/arkworks-rs/curves/" } \ No newline at end of file +ark-bls12-381 = { git = "https://github.com/arkworks-rs/curves/" } +ark-bn254 = { git = "https://github.com/arkworks-rs/curves/" } diff --git a/README.md b/README.md index 7a4d582c..e86e2db1 100644 --- a/README.md +++ b/README.md @@ -181,6 +181,8 @@ Unless you explicitly state otherwise, any contribution that you submit to this [aurora-light]: https://ia.cr/2019/601 [pcd-acc]: https://ia.cr/2020/499 [pst]: https://ia.cr/2011/587 +[ligero]: https://ia.cr/2022/1608 +[hyrax]: https://eprint.iacr.org/2017/1132 ## Reference papers @@ -208,6 +210,14 @@ TCC 2020 Charalampos Papamanthou, Elaine Shi, Roberto Tamassia TCC 2013 +[Ligero: Lightweight Sublinear Arguments Without a Trusted Setup][ligero] +Scott Ames, Carmit Hazay, Yuval Ishai, Muthuramakrishnan Venkitasubramaniam +CCS 2017 + +[Doubly-efficient zkSNARKs without trusted setup][hyrax] +Riad S. Wahby, Ioanna Tzialla, abhi shelat, Justin Thaler, Michael Walfish +2018 IEEE Symposium on Security and Privacy + ## Acknowledgements This work was supported by: an Engineering and Physical Sciences Research Council grant; a Google Faculty Award; the RISELab at UC Berkeley; and donations from the Ethereum Foundation and the Interchain Foundation. diff --git a/src/README.md b/src/README.md index ec22e02a..e2e3d0fc 100644 --- a/src/README.md +++ b/src/README.md @@ -56,6 +56,16 @@ EUROCRYPT 2020 Aniket Kate, Gregory M. Zaverucha, Ian Goldberg ASIACRYPT 2010 +### Hyrax multilinear PC + +Polynomial commitment scheme introduced together with the Hyrax zkSNARK (in [this](https://eprint.iacr.org/2017/1132) article). It is based on Pedersen commitments and therefore relies on the difficulty of the discrete logarithm problem in order to provide a hiding PCS. + +[Doubly-efficient zkSNARKs without trusted setup][hyrax] +Riad S. Wahby, Ioanna Tzialla, abhi shelat, Justin Thaler, Michael Walfish +2018 IEEE Symposium on Security and Privacy + +[hyrax]: https://eprint.iacr.org/2017/1132 + ### Marlin variant of the Papamanthou-Shi-Tamassia multivariate PC Multivariate polynomial commitment based on the construction in the Papamanthou-Shi-Tamassia construction with batching and (optional) hiding property inspired by the univariate scheme in Marlin. diff --git a/src/error.rs b/src/error.rs index de7091eb..3712d2b2 100644 --- a/src/error.rs +++ b/src/error.rs @@ -93,6 +93,49 @@ pub enum Error { /// Index of the offending polynomial. label: String, }, + + /// This means a failure in verifying the commitment or the opening. + InvalidCommitment, + + /// This means during opening or verification, a commitment of incorrect + /// size (for example, with an insufficient number of entries) was + /// encountered + IncorrectCommitmentSize { + /// Encountered commitment size + encountered: usize, + /// Expected commitment size + expected: usize, + }, + + /// For PCS which rely on Fiat-Shamir to be rendered non-interactive, + /// these are errors that result from incorrect transcript manipulation. + TranscriptError, + + /// This means the required soundness error bound is inherently impossible. + /// E.g., the field is not big enough. + InvalidParameters(String), + + /// Error resulting from hashing in linear code - based PCS. + HashingError, + + /// This means a commitment with a certain label was matched with a + /// a polynomial which has a different label - which shouldn't happen + MismatchedLabels { + /// The label of the commitment + commitment_label: String, + /// The label of the polynomial + polynomial_label: String, + }, + + /// This means multivariate polynomial with a certain number of variables + /// was matched (for instance, during commitment, opening or verification) + /// to a point with a different number of variables. + MismatchedNumVars { + /// The number of variables of the polynomial + poly_nv: usize, + /// The number of variables of the point + point_nv: usize, + }, } impl core::fmt::Display for Error { @@ -179,6 +222,28 @@ impl core::fmt::Display for Error { support up to degree ({:?})", label, poly_degree, supported_degree ), Error::IncorrectInputLength(err) => write!(f, "{}", err), + Error::InvalidCommitment => write!(f, "Failed to verify the commitment"), + Error::IncorrectCommitmentSize { + encountered, + expected, + } => write!( + f, + "the commitment has size {}, but size {} was expected", + encountered, expected + ), + Error::TranscriptError => write!(f, "Incorrect transcript manipulation"), + Error::InvalidParameters(err) => write!(f, "{}", err), + Error::HashingError => write!(f, "Error resulting from hashing"), + Error::MismatchedLabels { commitment_label, polynomial_label } => + write!(f, "Mismatched labels: commitment label: {}, polynomial label: {}", + commitment_label, + polynomial_label + ), + Error::MismatchedNumVars { poly_nv, point_nv } => + write!(f, "Mismatched number of variables: polynomial has {}, point has {}", + poly_nv, + point_nv, + ), } } } diff --git a/src/hyrax/data_structures.rs b/src/hyrax/data_structures.rs new file mode 100644 index 00000000..c1f91b42 --- /dev/null +++ b/src/hyrax/data_structures.rs @@ -0,0 +1,141 @@ +use ark_ec::AffineRepr; +use ark_ff::PrimeField; +use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; +use ark_std::{rand::RngCore, vec::Vec}; + +use crate::{ + PCCommitment, PCCommitterKey, PCPreparedCommitment, PCPreparedVerifierKey, PCRandomness, + PCUniversalParams, PCVerifierKey, +}; + +/// `UniversalParams` amounts to a Pederson commitment key of sufficient length +#[derive(Derivative, CanonicalSerialize, CanonicalDeserialize)] +#[derivative(Default(bound = ""), Clone(bound = ""), Debug(bound = ""))] +pub struct HyraxUniversalParams { + /// A list of generators of the group. + pub com_key: Vec, + /// A generator of the group. + pub h: G, +} + +impl PCUniversalParams for HyraxUniversalParams { + fn max_degree(&self) -> usize { + // Only MLEs are supported + 1 + } +} + +/// The committer key, which coincides with the universal parameters +pub type HyraxCommitterKey = HyraxUniversalParams; + +/// The verifier key, which coincides with the committer key +pub type HyraxVerifierKey = HyraxCommitterKey; + +impl PCCommitterKey for HyraxCommitterKey { + fn max_degree(&self) -> usize { + // Only MLEs are supported + 1 + } + fn supported_degree(&self) -> usize { + // Only MLEs are supported + 1 + } +} + +impl PCVerifierKey for HyraxVerifierKey { + // Only MLEs are supported + fn max_degree(&self) -> usize { + 1 + } + // Only MLEs are supported + fn supported_degree(&self) -> usize { + 1 + } +} + +/// Nothing to do to prepare this prover-verifier key. +pub type HyraxPreparedVerifierKey = HyraxVerifierKey; + +impl PCPreparedVerifierKey> for HyraxPreparedVerifierKey { + /// Simply clone the prover-verifier key + fn prepare(vk: &HyraxVerifierKey) -> Self { + vk.clone() + } +} + +/// Hyrax commitment to a polynomial consisting of one multi-commit per row of +/// the coefficient matrix +#[derive(Derivative, CanonicalSerialize, CanonicalDeserialize)] +#[derivative(Default(bound = ""), Clone(bound = ""), Debug(bound = ""))] +pub struct HyraxCommitment { + /// A list of multi-commits to each row of the matrix representing the + /// polynomial. + pub row_coms: Vec, +} + +impl PCCommitment for HyraxCommitment { + #[inline] + fn empty() -> Self { + HyraxCommitment { + row_coms: Vec::new(), + } + } + + // The degree bound is always 1, since only multilinear polynomials are + // supported + fn has_degree_bound(&self) -> bool { + true + } +} + +/// No preparation is needed for Hyrax commitments +pub type HyraxPreparedCommitment = HyraxCommitment; + +impl PCPreparedCommitment> for HyraxPreparedCommitment { + /// Simply clone the prover-verifier key + fn prepare(vk: &HyraxCommitment) -> Self { + vk.clone() + } +} + +pub(crate) type HyraxRandomness = Vec; + +/// A vector of scalars, each of which multiplies the distinguished group +/// element in the Pederson commitment key for a different commitment +impl PCRandomness for HyraxRandomness { + fn empty() -> Self { + unimplemented!() + } + + fn rand( + num_queries: usize, + _has_degree_bound: bool, + _num_vars: Option, + rng: &mut R, + ) -> Self { + (0..num_queries).map(|_| F::rand(rng)).collect() + } +} + +/// Proof of a Hyrax opening, containing various commitments +/// and auxiliary values generated randomly during the opening +#[derive(Derivative, CanonicalSerialize, CanonicalDeserialize)] +#[derivative(Default(bound = ""), Clone(bound = ""), Debug(bound = ""))] +pub struct HyraxProof { + /// Commitment to the evaluation of the polynomial at the requested point + pub com_eval: G, + /// Commitment to auxiliary random vector `d` + pub com_d: G, + /// Commitment to auxiliary random scalar `b` + pub com_b: G, + /// Auxiliary random vector + pub z: Vec, + /// Auxiliary random scalar + pub z_d: G::ScalarField, + /// Auxiliary random scalar + pub z_b: G::ScalarField, + /// The hiding scalar r_eval is not part of a Hyrax PCS proof as described + /// in the reference article. Cf. the "Modification note" at the beginning + /// of `mod.rs` + pub r_eval: G::ScalarField, +} diff --git a/src/hyrax/mod.rs b/src/hyrax/mod.rs new file mode 100644 index 00000000..37462274 --- /dev/null +++ b/src/hyrax/mod.rs @@ -0,0 +1,568 @@ +mod data_structures; +mod utils; +pub use data_structures::*; + +#[cfg(test)] +mod tests; + +use ark_crypto_primitives::sponge::poseidon::PoseidonSponge; +use ark_ec::{AffineRepr, CurveGroup, VariableBaseMSM}; +use ark_ff::PrimeField; +use ark_poly::MultilinearExtension; +use ark_std::{rand::RngCore, string::ToString, vec::Vec, UniformRand}; +use blake2::Blake2s256; +use core::marker::PhantomData; +use digest::Digest; + +#[cfg(feature = "parallel")] +use rayon::prelude::*; + +use crate::hyrax::utils::tensor_prime; +use crate::utils::{inner_product, scalar_by_vector, vector_sum, IOPTranscript, Matrix}; + +use crate::{ + challenge::ChallengeGenerator, hyrax::utils::flat_to_matrix_column_major, Error, + LabeledCommitment, LabeledPolynomial, PolynomialCommitment, +}; + +/// String of bytes used to seed the randomness during the setup function. +/// Note that the latter should never be used in production environments. +pub const PROTOCOL_NAME: &'static [u8] = b"Hyrax protocol"; + +/// Hyrax polynomial committment scheme: +/// A polynomial commitment scheme based on the hardness of the +/// discrete logarithm problem in prime-order groups. This is a +/// Fiat-Shamired version of the PCS described in the Hyrax paper +/// [[WTsTW17]][hyrax]. +/// +/// [hyrax]: https://eprint.iacr.org/2017/1132.pdf +/// +/// ### Modification note +/// +/// In the PCS contained in the cited article, the verifier never learns the +/// actual evaluation of the polynomial at the requested point, but is instead +/// convinced that a previously received Pedersen commitment is indeed a +/// commitment to said evaluation - this is what the SNARK proposed therein +/// necessitates. However, the Arkworks framework requies the verifier to +/// actually learn that value, which is why we have added the opening of +/// the commitment at the end of the protocol. This likely does not result in +/// an optimal non-hiding PCS, but we feel it is the most faithful adaptation +/// of the original PCS that can be implemented with the current restrictions. +/// +/// ### Future optimisations +/// +/// - Due to the homomorphic nature of Pedersen commitments, it is likely some +/// of the following methods can be designed more efficiently than their +/// default implementations: batch_open, batch_check, open_combinations, +/// check_combinations. This is not discussed in the reference article, but +/// the IPA and KZG modules might be a good starting point. +/// - On a related note to the previous point, there might be a more efficient +/// way to open several polynomials at a single point than the currently +/// implemented method, where only the computation of the vectors L and R is +/// shared across polynomials. +/// - The cited article proposes an optimisation in the section `Reducing the +/// cost of proof-of-dot-prod`. It allows for non-square matrices (and hence +/// removes the requirement for the number of variables to be even) and +/// introduces a tradeoff between proof size and verifier time. It is +/// probably worth pursuing. +pub struct HyraxPC< + // The elliptic curve used for Pedersen commitments (only EC groups are + // supported as of now). + G: AffineRepr, + // A polynomial type representing multilinear polynomials + P: MultilinearExtension, +> { + _phantom: PhantomData<(G, P)>, +} + +impl> HyraxPC { + /// Pedersen commitment to a vector of scalars as described in appendix A.1 + /// of the reference article. + /// The caller must either directly pass hiding exponent `r` inside Some, + /// or provide an rng so that `r` can be sampled. + /// If there are `n` scalars, the first `n` elements of the key will be + /// multiplied by them in the same order, and its `n + 1`th element will be + /// multiplied by `r`. + /// + /// # Panics + /// + /// Panics if both `r` and `rng` are None. + fn pedersen_commit( + key: &HyraxCommitterKey, + scalars: &[G::ScalarField], + r: Option, + rng: Option<&mut dyn RngCore>, + ) -> (G, G::ScalarField) { + // Cannot use unwrap_or, since its argument is always evaluated + let r = match r { + Some(v) => v, + None => G::ScalarField::rand(rng.expect("Either r or rng must be provided")), + }; + + let mut scalars_ext = Vec::from(scalars); + scalars_ext.push(r); + + // Trimming the key to the length of the coefficient vector + let mut points_ext = key.com_key[0..scalars.len()].to_vec(); + points_ext.push(key.h); + + let scalars_bigint = ark_std::cfg_iter!(scalars) + .map(|s| s.into_bigint()) + .collect::>(); + + // Multi-exponentiation in the group of points of the EC + let com = ::msm_bigint(&points_ext, &scalars_bigint); + + (com.into(), r) + } +} + +impl> + PolynomialCommitment< + G::ScalarField, + P, + // Dummy sponge - required by the trait, not used in this implementation + PoseidonSponge, + > for HyraxPC +{ + type UniversalParams = HyraxUniversalParams; + type CommitterKey = HyraxCommitterKey; + type VerifierKey = HyraxVerifierKey; + type PreparedVerifierKey = HyraxPreparedVerifierKey; + type Commitment = HyraxCommitment; + type PreparedCommitment = HyraxPreparedCommitment; + type Randomness = HyraxRandomness; + type Proof = Vec>; + type BatchProof = Vec; + type Error = Error; + + /// Outputs mock universal parameters for the Hyrax polynomial commitment + /// scheme. It does *not* return random keys across calls and should never + /// be used in settings where security is required - it is only useful for + /// testing. + /// + /// # Panics + /// + /// Panics if `num_vars` is None or contains an odd value. + fn setup( + _max_degree: usize, + num_vars: Option, + _rng: &mut R, + ) -> Result { + if num_vars.is_none() { + return Err(Error::InvalidNumberOfVariables); + } + + let n = num_vars.unwrap(); + + if n % 2 == 1 { + // Only polynomials with an even number of variables are + // supported in this implementation + return Err(Error::InvalidNumberOfVariables); + } + + // Number of rows (or, equivalently, colums) of a square matrix + // containing the coefficients of an n-variate ML polynomial + let dim = 1 << n / 2; + + // The following block of code is largely taking from the IPA module + // in this crate. It generates random points (not guaranteed to be + // generators, since the point at infinity should theoretically occur) + let points: Vec<_> = ark_std::cfg_into_iter!(0u64..dim + 1) + .map(|i| { + let mut hash = + Blake2s256::digest([PROTOCOL_NAME, &i.to_le_bytes()].concat().as_slice()); + let mut p = G::from_random_bytes(&hash); + let mut j = 0u64; + while p.is_none() { + let mut bytes = PROTOCOL_NAME.to_vec(); + bytes.extend(i.to_le_bytes()); + bytes.extend(j.to_le_bytes()); + hash = Blake2s256::digest(bytes.as_slice()); + p = G::from_random_bytes(&hash); + j += 1; + } + let point = p.unwrap(); + point.mul_by_cofactor_to_group() + }) + .collect(); + + // Converting from projective to affine representation + let mut points = G::Group::normalize_batch(&points); + + let h: G = points.pop().unwrap(); + + Ok(HyraxUniversalParams { com_key: points, h }) + } + + /// Trims a key into a prover key and a verifier key. This should only + /// amount to discarding some of the points in said key if the prover + /// and verifier only wish to commit to polynomials with fewer variables + /// than the key can support. Since the number of variables is not + /// considered in the prototype, this function currently simply clones the + /// key. + fn trim( + pp: &Self::UniversalParams, + _supported_degree: usize, + _supported_hiding_bound: usize, + _enforced_degree_bounds: Option<&[usize]>, + ) -> Result<(Self::CommitterKey, Self::VerifierKey), Self::Error> { + Ok((pp.clone(), pp.clone())) + } + + /// Produces a list of commitments to the passed polynomials. Cf. the + /// section "Square-root commitment scheme" from the reference article. + /// + /// # Panics + /// + /// Panics if `rng` is None, since Hyrax requires randomness in order to + /// commit to a polynomial + fn commit<'a>( + ck: &Self::CommitterKey, + polynomials: impl IntoIterator>, + rng: Option<&mut dyn RngCore>, + ) -> Result< + ( + Vec>, + Vec, + ), + Self::Error, + > + where + P: 'a, + { + let mut coms = Vec::new(); + let mut rands = Vec::new(); + + let rng_inner = rng.expect("Committing to polynomials requires a random generator"); + + for l_poly in polynomials { + let mut com_rands = Vec::new(); + + let label = l_poly.label(); + let poly = l_poly.polynomial(); + + let n = poly.num_vars(); + let dim = 1 << n / 2; + + if n % 2 == 1 { + // Only polynomials with an even number of variables are + // supported in this implementation + return Err(Error::InvalidNumberOfVariables); + } + + if n > ck.com_key.len() { + return Err(Error::InvalidNumberOfVariables); + } + + let m = flat_to_matrix_column_major(&poly.to_evaluations(), dim, dim); + + // Commiting to the matrix with one multi-commitment per row + let row_coms = m + .iter() + .map(|row| { + let (c, r) = Self::pedersen_commit(ck, row, None, Some(rng_inner)); + // Storing the randomness used in the commitment + com_rands.push(r); + c + }) + .collect(); + + let com = HyraxCommitment { row_coms }; + let l_comm = LabeledCommitment::new(label.to_string(), com, Some(1)); + + coms.push(l_comm); + rands.push(com_rands); + } + + Ok((coms, rands)) + } + + /// Opens a list of polynomial commitments at a desired point. This + /// requires the list of original polynomials (`labeled_polynomials`) as + /// well as the random values using by the Pedersen multi-commits during + /// the commitment phase (`randomness`). Cf. sections "Square-root + /// commitment scheme" and appendix A.2 from the reference article. + /// + /// # Panics + /// + /// Panics if + /// - `rng` is None, since Hyrax requires randomness in order to + /// open the commitment to a polynomial. + /// - The point doesn't have an even number of variables. + /// - The labels of a commitment doesn't match that of the corresponding + /// polynomial. + /// - The number of variables of a polynomial doesn't match that of the + /// point. + /// + /// # Disregarded arguments + /// - `opening_challenges` + fn open<'a>( + ck: &Self::CommitterKey, + labeled_polynomials: impl IntoIterator>, + commitments: impl IntoIterator>, + point: &'a P::Point, + // Not used and not generic on the cryptographic sponge S + _opening_challenges: &mut ChallengeGenerator< + G::ScalarField, + PoseidonSponge, + >, + rands: impl IntoIterator, + rng: Option<&mut dyn RngCore>, + ) -> Result + where + Self::Commitment: 'a, + Self::Randomness: 'a, + P: 'a, + { + let n = point.len(); + + if n % 2 == 1 { + // Only polynomials with an even number of variables are + // supported in this implementation + return Err(Error::InvalidNumberOfVariables); + } + + let dim = 1 << n / 2; + + // Reversing the point is necessary because the MLE interface returns + // evaluations in little-endian order + let point_rev: Vec = point.iter().rev().cloned().collect(); + + let point_lower = &point_rev[n / 2..]; + let point_upper = &point_rev[..n / 2]; + + // Deriving the tensors which result in the evaluation of the polynomial + // when they are multiplied by the coefficient matrix. + let l = tensor_prime(point_lower); + let r = tensor_prime(point_upper); + + let mut proofs = Vec::new(); + + let rng_inner = rng.expect("Opening polynomials requires randomness"); + + for (l_poly, (l_com, randomness)) in labeled_polynomials + .into_iter() + .zip(commitments.into_iter().zip(rands.into_iter())) + { + let label = l_poly.label(); + if label != l_com.label() { + return Err(Error::MismatchedLabels { + commitment_label: l_com.label().to_string(), + polynomial_label: label.to_string(), + }); + } + + let poly = l_poly.polynomial(); + let com = l_com.commitment(); + + if poly.num_vars() != n { + return Err(Error::MismatchedNumVars { + poly_nv: poly.num_vars(), + point_nv: n, + }); + } + + // Initialising the transcript + let mut transcript: IOPTranscript = IOPTranscript::new(b"transcript"); + + // Absorbing public parameters + transcript.append_serializable_element(b"public parameters", ck)?; + + // Absorbing the commitment to the polynomial + transcript.append_serializable_element(b"commitment", &com.row_coms)?; + + // Absorbing the point + transcript.append_serializable_element(b"point", point)?; + + // Commiting to the matrix formed by the polynomial coefficients + let t_aux = flat_to_matrix_column_major(&poly.to_evaluations(), dim, dim); + let t = Matrix::new_from_rows(t_aux); + + let lt = t.row_mul(&l); + + // t_prime coincides witht he Pedersen commitment to lt with the + // randomnes r_lt computed here + let r_lt = cfg_iter!(l) + .zip(cfg_iter!(randomness)) + .map(|(l, r)| *l * r) + .sum::(); + + let eval = inner_product(<, &r); + + // Singleton commit + let (com_eval, r_eval) = Self::pedersen_commit(ck, &[eval], None, Some(rng_inner)); + + // ******** Dot product argument ******** + // Appendix A.2 in the reference article + + let d: Vec = + (0..dim).map(|_| G::ScalarField::rand(rng_inner)).collect(); + + let b = inner_product(&r, &d); + + // Multi-commit + let (com_d, r_d) = Self::pedersen_commit(ck, &d, None, Some(rng_inner)); + + // Singleton commit + let (com_b, r_b) = Self::pedersen_commit(ck, &[b], None, Some(rng_inner)); + + // Absorbing the commitment to the evaluation + transcript.append_serializable_element(b"com_eval", &com_eval)?; + + // Absorbing the two auxiliary commitments + transcript.append_serializable_element(b"com_d", &com_d)?; + transcript.append_serializable_element(b"com_b", &com_b)?; + + // Receive the random challenge c from the verifier, i.e. squeeze + // it from the transcript. + let c = transcript.get_and_append_challenge(b"c").unwrap(); + + let z = vector_sum(&d, &scalar_by_vector(c, <)); + let z_d = c * r_lt + r_d; + let z_b = c * r_eval + r_b; + + // ******** Opening ******** + // This is *not* part of the Hyrax PCS as described in the reference + // article. Cf. the "Modification note" at the beginning of this file. + // From the prover's perspective, opening amounts to adding r_eval to + // the proof. + + proofs.push(HyraxProof { + com_eval, + com_d, + com_b, + z, + z_d, + z_b, + r_eval, + }); + } + + Ok(proofs) + } + + /// Verifies a list of opening proofs and confirms the evaluation of the + /// committed polynomials at the desired point. + /// + /// # Panics + /// - If the point doesn't have an even number of variables. + /// - If the length of a commitment does not correspond to the length of the + /// point (specifically, commitment length should be 2^(point-length/2)). + /// + /// # Disregarded arguments + /// - `opening_challenges` + /// - `rng` + fn check<'a>( + vk: &Self::VerifierKey, + commitments: impl IntoIterator>, + point: &'a P::Point, + values: impl IntoIterator, + proof: &Self::Proof, + // Not used and not generic on the cryptographic sponge S + _opening_challenges: &mut ChallengeGenerator< + G::ScalarField, + PoseidonSponge, + >, + _rng: Option<&mut dyn RngCore>, + ) -> Result + where + Self::Commitment: 'a, + { + let n = point.len(); + + if n % 2 == 1 { + // Only polynomials with an even number of variables are + // supported in this implementation + return Err(Error::InvalidNumberOfVariables); + } + + // Reversing the point is necessary because the MLE interface returns + // evaluations in little-endian order + let point_rev: Vec = point.iter().rev().cloned().collect(); + + let point_lower = &point_rev[n / 2..]; + let point_upper = &point_rev[..n / 2]; + + // Deriving the tensors which result in the evaluation of the polynomial + // when they are multiplied by the coefficient matrix. + let l = tensor_prime(point_lower); + let r = tensor_prime(point_upper); + + for (com, (claim, h_proof)) in commitments + .into_iter() + .zip(values.into_iter().zip(proof.iter())) + { + let row_coms = &com.commitment().row_coms; + + // extract each field from h_proof + let HyraxProof { + com_eval, + com_d, + com_b, + z, + z_d, + z_b, + r_eval, + } = h_proof; + + if row_coms.len() != 1 << n / 2 { + return Err(Error::IncorrectCommitmentSize { + encountered: row_coms.len(), + expected: 1 << n / 2, + }); + } + + // Computing t_prime with a multi-exponentiation + let l_bigint = cfg_iter!(l) + .map(|chi| chi.into_bigint()) + .collect::>(); + let t_prime: G = ::msm_bigint(row_coms, &l_bigint).into(); + + // Construct transcript and squeeze the challenge c from it + + let mut transcript: IOPTranscript = IOPTranscript::new(b"transcript"); + + // Absorbing public parameters + transcript.append_serializable_element(b"public parameters", vk)?; + + // Absorbing the commitment to the polynomial + transcript.append_serializable_element(b"commitment", row_coms)?; + + // Absorbing the point + transcript.append_serializable_element(b"point", point)?; + + // Absorbing the commitment to the evaluation + transcript.append_serializable_element(b"com_eval", com_eval)?; + + // Absorbing the two auxiliary commitments + transcript.append_serializable_element(b"com_d", com_d)?; + transcript.append_serializable_element(b"com_b", com_b)?; + + // Receive the random challenge c from the verifier, i.e. squeeze + // it from the transcript. + let c = transcript.get_and_append_challenge(b"c").unwrap(); + + // First check + let com_z_zd = Self::pedersen_commit(vk, z, Some(*z_d), None).0; + if com_z_zd != (t_prime.mul(c) + com_d).into() { + return Ok(false); + } + + // Second check + let com_dp = Self::pedersen_commit(vk, &[inner_product(&r, z)], Some(*z_b), None).0; + if com_dp != (com_eval.mul(c) + com_b).into() { + return Ok(false); + } + + // Third check: opening + let exp = Self::pedersen_commit(vk, &[claim], Some(*r_eval), None).0; + + if *com_eval != exp { + return Ok(false); + } + } + + Ok(true) + } +} diff --git a/src/hyrax/tests.rs b/src/hyrax/tests.rs new file mode 100644 index 00000000..f471b49f --- /dev/null +++ b/src/hyrax/tests.rs @@ -0,0 +1,219 @@ +use ark_bls12_377::G1Affine; +use ark_crypto_primitives::sponge::poseidon::PoseidonSponge; +use ark_ec::AffineRepr; +use ark_ed_on_bls12_381::EdwardsAffine; +use ark_ff::PrimeField; +use ark_poly::{DenseMultilinearExtension, MultilinearExtension}; +use ark_std::test_rng; +use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; + +use crate::challenge::ChallengeGenerator; +use crate::hyrax::HyraxPC; + +use crate::utils::test_sponge; +use crate::{LabeledPolynomial, PolynomialCommitment}; + +use crate::tests::*; + +// The test structure is largely taken from the multilinear_ligero module +// inside this crate + +// ****************** types ****************** + +type Fr = ::ScalarField; +type Hyrax381 = HyraxPC>; + +type Fq = ::ScalarField; +type Hyrax377 = HyraxPC>; + +// ******** auxiliary test functions ******** + +fn rand_poly( + _: usize, // degree: unused + num_vars: Option, + rng: &mut ChaCha20Rng, +) -> DenseMultilinearExtension { + match num_vars { + Some(n) => DenseMultilinearExtension::rand(n, rng), + None => panic!("Must specify the number of variables"), + } +} + +fn constant_poly( + _: usize, // degree: unused + num_vars: Option, + rng: &mut ChaCha20Rng, +) -> DenseMultilinearExtension { + match num_vars { + Some(0) => DenseMultilinearExtension::rand(0, rng), + _ => panic!("Must specify the number of variables: 0"), + } +} + +fn rand_point(num_vars: Option, rng: &mut ChaCha20Rng) -> Vec { + match num_vars { + Some(n) => (0..n).map(|_| F::rand(rng)).collect(), + None => panic!("Must specify the number of variables"), + } +} + +// ****************** tests ****************** + +#[test] +fn test_hyrax_construction() { + // Desired number of variables (must be even!) + let n = 8; + + let chacha = &mut ChaCha20Rng::from_rng(test_rng()).unwrap(); + + let pp = Hyrax381::setup(1, Some(n), chacha).unwrap(); + + let (ck, vk) = Hyrax381::trim(&pp, 1, 1, None).unwrap(); + + let l_poly = LabeledPolynomial::new( + "test_poly".to_string(), + rand_poly::(0, Some(n), chacha), + None, + None, + ); + + let (c, rands) = Hyrax381::commit(&ck, &[l_poly.clone()], Some(chacha)).unwrap(); + + let point: Vec = rand_point(Some(n), chacha); + let value = l_poly.evaluate(&point); + + // Dummy argument + let mut test_sponge = test_sponge::(); + let mut challenge_generator: ChallengeGenerator> = + ChallengeGenerator::new_univariate(&mut test_sponge); + + let proof = Hyrax381::open( + &ck, + &[l_poly], + &c, + &point, + &mut (challenge_generator.clone()), + &rands, + Some(chacha), + ) + .unwrap(); + + assert!(Hyrax381::check( + &vk, + &c, + &point, + [value], + &proof, + &mut challenge_generator, + Some(chacha), + ) + .unwrap()); +} + +#[test] +fn hyrax_single_poly_test() { + single_poly_test::<_, _, Hyrax381, _>( + Some(10), + rand_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-381"); + single_poly_test::<_, _, Hyrax377, _>( + Some(10), + rand_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-377"); +} + +#[test] +fn hyrax_constant_poly_test() { + single_poly_test::<_, _, Hyrax377, _>( + Some(0), + constant_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-377"); + single_poly_test::<_, _, Hyrax381, _>( + Some(0), + constant_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-381"); +} + +#[test] +fn hyrax_full_end_to_end_test() { + full_end_to_end_test::<_, _, Hyrax377, _>( + Some(8), + rand_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-377"); + full_end_to_end_test::<_, _, Hyrax381, _>( + Some(10), + rand_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-381"); +} + +#[test] +fn hyrax_single_equation_test() { + single_equation_test::<_, _, Hyrax377, _>( + Some(6), + rand_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-377"); + single_equation_test::<_, _, Hyrax381, _>( + Some(6), + rand_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-381"); +} + +#[test] +fn hyrax_two_equation_test() { + two_equation_test::<_, _, Hyrax377, _>( + Some(10), + rand_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-377"); + two_equation_test::<_, _, Hyrax381, _>( + Some(10), + rand_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-381"); +} + +#[test] +fn hyrax_full_end_to_end_equation_test() { + full_end_to_end_equation_test::<_, _, Hyrax377, _>( + Some(8), + rand_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-377"); + full_end_to_end_equation_test::<_, _, Hyrax381, _>( + Some(8), + rand_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-381"); +} diff --git a/src/hyrax/utils.rs b/src/hyrax/utils.rs new file mode 100644 index 00000000..69642e44 --- /dev/null +++ b/src/hyrax/utils.rs @@ -0,0 +1,38 @@ +use ark_ff::Field; +use ark_std::vec::Vec; + +#[cfg(feature = "parallel")] +use rayon::prelude::*; + +/// Transforms a flat vector into a matrix in column-major order. The latter is +/// given as a list of rows. +/// +/// For example, if flat = [1, 2, 3, 4, 5, 6] and n = 2, m = 3, then +/// the output is [[1, 3, 5], [2, 4, 6]]. +pub(crate) fn flat_to_matrix_column_major(flat: &[T], n: usize, m: usize) -> Vec> { + assert_eq!(flat.len(), n * m, "n * m should coincide with flat.len()"); + let mut res = Vec::new(); + + for row in 0..n { + res.push((0..m).map(|col| flat[col * n + row]).collect()) + } + res +} + +// This function computes all evaluations of the MLE EQ(i, values) for i +// between 0...0 and 1...1 (n-bit strings). This results in essentially +// the same as the tensor_vec function in the `linear_codes/utils.rs`, +// the difference being the endianness of the order of the output. +pub(crate) fn tensor_prime(values: &[F]) -> Vec { + if values.is_empty() { + return vec![F::one()]; + } + + let tail = tensor_prime(&values[1..]); + let val = values[0]; + + cfg_iter!(tail) + .map(|v| *v * (F::one() - val)) + .chain(cfg_iter!(tail).map(|v| *v * val)) + .collect() +} diff --git a/src/lib.rs b/src/lib.rs index fe417e94..05e53e4a 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -34,6 +34,9 @@ use ark_std::{ pub mod data_structures; pub use data_structures::*; +/// Useful functions +pub(crate) mod utils; + /// R1CS constraints for polynomial constraints. #[cfg(feature = "r1cs")] mod constraints; @@ -126,6 +129,16 @@ pub use marlin::marlin_pst13_pc; /// [bdfg]: https://eprint.iacr.org/2020/081.pdf pub mod streaming_kzg; +/// A polynomial commitment scheme based on the hardness of the +/// discrete logarithm problem in prime-order groups. This is a +/// Fiat-Shamired version of the PCS described in the Hyrax paper +/// [[WTsTW17]][hyrax], with the difference that, unlike in the +/// cited reference, the evaluation of the polynomial at the point +/// of interest is indeed revealed to the verifier at the end. +/// +/// [hyrax]: https://eprint.iacr.org/2017/1132.pdf +pub mod hyrax; + /// `QuerySet` is the set of queries that are to be made to a set of labeled polynomials/equations /// `p` that have previously been committed to. Each element of a `QuerySet` is a pair of /// `(label, (point_label, point))`, where `label` is the label of a polynomial in `p`, diff --git a/src/streaming_kzg/data_structures.rs b/src/streaming_kzg/data_structures.rs index 5923a40e..7adaf005 100644 --- a/src/streaming_kzg/data_structures.rs +++ b/src/streaming_kzg/data_structures.rs @@ -2,9 +2,10 @@ use ark_ff::Field; use ark_std::borrow::Borrow; use ark_std::vec::Vec; -use crate::streaming_kzg::ceil_div; use ark_std::iterable::Iterable; +use crate::utils::ceil_div; + /// A `Streamer` folding a vector of coefficients /// with the given challenges, and producing a stream of items /// `(i, v)` where `i` indicates the depth, and `v` is the next coefficient. diff --git a/src/streaming_kzg/mod.rs b/src/streaming_kzg/mod.rs index e3bdb2af..8fd494e2 100644 --- a/src/streaming_kzg/mod.rs +++ b/src/streaming_kzg/mod.rs @@ -284,12 +284,6 @@ pub(crate) fn vanishing_polynomial(points: &[F]) -> DensePolynomial .fold(one, |x, y| x.naive_mul(&y)) } -/// Return ceil(x / y). -pub(crate) fn ceil_div(x: usize, y: usize) -> usize { - // XXX. warning: this expression can overflow. - (x + y - 1) / y -} - /// Compute a linear combination of the polynomials `polynomials` with the given challenges. pub(crate) fn linear_combination( polynomials: &[PP], diff --git a/src/streaming_kzg/space.rs b/src/streaming_kzg/space.rs index ab50adfd..cc1d36d2 100644 --- a/src/streaming_kzg/space.rs +++ b/src/streaming_kzg/space.rs @@ -6,7 +6,8 @@ use ark_std::borrow::Borrow; use ark_std::collections::VecDeque; use ark_std::vec::Vec; -use crate::streaming_kzg::{ceil_div, vanishing_polynomial, FoldedPolynomialTree}; +use crate::streaming_kzg::{vanishing_polynomial, FoldedPolynomialTree}; +use crate::utils::ceil_div; use ark_ec::scalar_mul::variable_base::{ChunkedPippenger, HashMapPippenger, VariableBaseMSM}; use ark_std::iterable::{Iterable, Reverse}; diff --git a/src/utils.rs b/src/utils.rs new file mode 100644 index 00000000..437f993e --- /dev/null +++ b/src/utils.rs @@ -0,0 +1,222 @@ +use core::marker::PhantomData; + +#[cfg(not(feature = "std"))] +use num_traits::Float; + +#[cfg(feature = "parallel")] +use rayon::{ + iter::{IntoParallelRefIterator, ParallelIterator}, + prelude::IndexedParallelIterator, +}; + +use ark_ff::{Field, PrimeField}; +use ark_serialize::CanonicalSerialize; +use ark_std::vec::Vec; +use merlin::Transcript; + +use crate::Error; + +/// Takes as input a struct, and converts them to a series of bytes. All traits +/// that implement `CanonicalSerialize` can be automatically converted to bytes +/// in this manner. +/// From jellyfish lib +#[macro_export] +macro_rules! to_bytes { + ($x:expr) => {{ + let mut buf = ark_std::vec![]; + ark_serialize::CanonicalSerialize::serialize_compressed($x, &mut buf).map(|_| buf) + }}; +} + +/// Return ceil(x / y). +pub(crate) fn ceil_div(x: usize, y: usize) -> usize { + // XXX. warning: this expression can overflow. + (x + y - 1) / y +} + +#[derive(Debug)] +pub(crate) struct Matrix { + pub(crate) n: usize, + pub(crate) m: usize, + entries: Vec>, +} + +impl Matrix { + /// Returns a Matrix given a list of its rows, each in turn represented as a list of field elements. + /// + /// # Panics + /// Panics if the sub-lists do not all have the same length. + pub(crate) fn new_from_rows(row_list: Vec>) -> Self { + let m = row_list[0].len(); + + for row in row_list.iter().skip(1) { + assert_eq!( + row.len(), + m, + "Invalid matrix construction: not all rows have the same length" + ); + } + + Self { + n: row_list.len(), + m, + entries: row_list, + } + } + + /// Returns the entry in position (i, j). **Indexing starts at 0 in both coordinates**, + /// i.e. the first element is in position (0, 0) and the last one in (n - 1, j - 1), + /// where n and m are the number of rows and columns, respectively. + /// + /// Index bound checks are waived for efficiency and behaviour under invalid indexing is undefined + #[cfg(test)] + pub(crate) fn entry(&self, i: usize, j: usize) -> F { + self.entries[i][j] + } + + /// Returns the product v * self, where v is interpreted as a row vector. In other words, + /// it returns a linear combination of the rows of self with coefficients given by v. + /// + /// Panics if the length of v is different from the number of rows of self. + pub(crate) fn row_mul(&self, v: &[F]) -> Vec { + assert_eq!( + v.len(), + self.n, + "Invalid row multiplication: vector has {} elements whereas each matrix column has {}", + v.len(), + self.n + ); + + (0..self.m) + .map(|col| { + inner_product( + v, + &(0..self.n) + .map(|row| self.entries[row][col]) + .collect::>(), + ) + }) + .collect() + } +} + +#[inline] +pub(crate) fn inner_product(v1: &[F], v2: &[F]) -> F { + ark_std::cfg_iter!(v1) + .zip(v2) + .map(|(li, ri)| *li * ri) + .sum() +} + +#[inline] +pub(crate) fn scalar_by_vector(s: F, v: &[F]) -> Vec { + ark_std::cfg_iter!(v).map(|x| *x * s).collect() +} + +#[inline] +pub(crate) fn vector_sum(v1: &[F], v2: &[F]) -> Vec { + ark_std::cfg_iter!(v1) + .zip(v2) + .map(|(li, ri)| *li + ri) + .collect() +} + +/// The following struct is taken from jellyfish repository. Once they change +/// their dependency on `crypto-primitive`, we use their crate instead of +/// a copy-paste. We needed the newer `crypto-primitive` for serializing. +#[derive(Clone)] +pub(crate) struct IOPTranscript { + transcript: Transcript, + is_empty: bool, + #[doc(hidden)] + phantom: PhantomData, +} + +// TODO: merge this with jf_plonk::transcript +impl IOPTranscript { + /// Create a new IOP transcript. + pub(crate) fn new(label: &'static [u8]) -> Self { + Self { + transcript: Transcript::new(label), + is_empty: true, + phantom: PhantomData, + } + } + + /// Append the message to the transcript. + pub(crate) fn append_message(&mut self, label: &'static [u8], msg: &[u8]) -> Result<(), Error> { + self.transcript.append_message(label, msg); + self.is_empty = false; + Ok(()) + } + + /// Append the message to the transcript. + pub(crate) fn append_serializable_element( + &mut self, + label: &'static [u8], + group_elem: &S, + ) -> Result<(), Error> { + self.append_message( + label, + &to_bytes!(group_elem).map_err(|_| Error::TranscriptError)?, + ) + } + + /// Generate the challenge from the current transcript + /// and append it to the transcript. + /// + /// The output field element is statistical uniform as long + /// as the field has a size less than 2^384. + pub(crate) fn get_and_append_challenge(&mut self, label: &'static [u8]) -> Result { + // we need to reject when transcript is empty + if self.is_empty { + return Err(Error::TranscriptError); + } + + let mut buf = [0u8; 64]; + self.transcript.challenge_bytes(label, &mut buf); + let challenge = F::from_le_bytes_mod_order(&buf); + self.append_serializable_element(label, &challenge)?; + Ok(challenge) + } +} + +#[inline] +#[cfg(test)] +pub(crate) fn to_field(v: Vec) -> Vec { + v.iter().map(|x| F::from(*x)).collect::>() +} + +// TODO: replace by https://github.com/arkworks-rs/crypto-primitives/issues/112. +#[cfg(test)] +use ark_crypto_primitives::sponge::poseidon::PoseidonSponge; + +#[cfg(test)] +pub(crate) fn test_sponge() -> PoseidonSponge { + use ark_crypto_primitives::sponge::{poseidon::PoseidonConfig, CryptographicSponge}; + use ark_std::test_rng; + + let full_rounds = 8; + let partial_rounds = 31; + let alpha = 17; + + let mds = vec![ + vec![F::one(), F::zero(), F::one()], + vec![F::one(), F::one(), F::zero()], + vec![F::zero(), F::one(), F::one()], + ]; + + let mut v = Vec::new(); + let mut ark_rng = test_rng(); + + for _ in 0..(full_rounds + partial_rounds) { + let mut res = Vec::new(); + + for _ in 0..3 { + res.push(F::rand(&mut ark_rng)); + } + v.push(res); + } + let config = PoseidonConfig::new(full_rounds, partial_rounds, alpha, mds, v, 2, 1); + PoseidonSponge::new(&config) +} From fba3d8017f0e1f3249cf08439309bffa53fafcc3 Mon Sep 17 00:00:00 2001 From: mmagician Date: Wed, 25 Oct 2023 12:09:15 +0200 Subject: [PATCH 02/75] Add univariate and multilinear Ligero PCS MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Hossein Moghaddas Co-authored-by: Antonio Mejías Gil --- Cargo.toml | 26 +- README.md | 5 + src/error.rs | 18 + src/lib.rs | 8 + src/linear_codes/data_structures.rs | 126 ++++ src/linear_codes/ligero.rs | 143 +++++ src/linear_codes/mod.rs | 616 +++++++++++++++++++ src/linear_codes/multilinear_ligero/mod.rs | 90 +++ src/linear_codes/multilinear_ligero/tests.rs | 294 +++++++++ src/linear_codes/univariate_ligero/mod.rs | 90 +++ src/linear_codes/univariate_ligero/tests.rs | 403 ++++++++++++ src/linear_codes/utils.rs | 246 ++++++++ src/streaming_kzg/data_structures.rs | 3 +- src/streaming_kzg/mod.rs | 6 - src/streaming_kzg/space.rs | 3 +- src/utils.rs | 343 +++++++++++ 16 files changed, 2403 insertions(+), 17 deletions(-) create mode 100644 src/linear_codes/data_structures.rs create mode 100644 src/linear_codes/ligero.rs create mode 100644 src/linear_codes/mod.rs create mode 100644 src/linear_codes/multilinear_ligero/mod.rs create mode 100644 src/linear_codes/multilinear_ligero/tests.rs create mode 100644 src/linear_codes/univariate_ligero/mod.rs create mode 100644 src/linear_codes/univariate_ligero/tests.rs create mode 100644 src/linear_codes/utils.rs create mode 100644 src/utils.rs diff --git a/Cargo.toml b/Cargo.toml index 4b58457e..a2c2a0db 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -15,22 +15,24 @@ ark-serialize = { version = "^0.4.0", default-features = false, features = [ "de ark-ff = { version = "^0.4.0", default-features = false } ark-ec = { version = "^0.4.0", default-features = false } ark-poly = {version = "^0.4.0", default-features = false } -ark-crypto-primitives = {version = "^0.4.0", default-features = false, features = ["sponge"] } +ark-crypto-primitives = { version = "^0.4.0", default-features = false, features = ["sponge","merkle_tree" ] } ark-std = { version = "^0.4.0", default-features = false } +blake2 = { version = "0.10", default-features = false } ark-relations = { version = "^0.4.0", default-features = false, optional = true } ark-r1cs-std = { version = "^0.4.0", default-features = false, optional = true } -hashbrown = { version = "0.13", default-features = false, optional = true } +hashbrown = { version = "0.14", default-features = false, optional = true } digest = "0.10" derivative = { version = "2", features = [ "use_core" ] } rayon = { version = "1", optional = true } +merlin = { version = "3.0.0", default-features = false } [dev-dependencies] ark-ed-on-bls12-381 = { version = "^0.4.0", default-features = false } ark-bls12-381 = { version = "^0.4.0", default-features = false, features = [ "curve" ] } ark-bls12-377 = { version = "^0.4.0", default-features = false, features = [ "curve" ] } -blake2 = { version = "0.10", default-features = false } +ark-bn254 = { version = "^0.4.0", default-features = false, features = [ "curve" ] } rand_chacha = { version = "0.3.0", default-features = false } [profile.release] @@ -46,18 +48,24 @@ incremental = true debug = true [features] -default = [ "std", "parallel" ] -std = [ "ark-ff/std", "ark-ec/std", "ark-poly/std", "ark-std/std", "ark-relations/std", "ark-serialize/std", "ark-crypto-primitives/std"] -r1cs = [ "ark-relations", "ark-r1cs-std", "hashbrown", "ark-crypto-primitives/r1cs"] -print-trace = [ "ark-std/print-trace" ] -parallel = [ "std", "ark-ff/parallel", "ark-ec/parallel", "ark-poly/parallel", "ark-std/parallel", "rayon" ] +default = ["std", "parallel"] +std = ["ark-ff/std", "ark-ec/std", "ark-poly/std", "ark-std/std", "ark-relations/std", "ark-serialize/std", "ark-crypto-primitives/std"] +r1cs = ["ark-relations", "ark-r1cs-std", "hashbrown", "ark-crypto-primitives/r1cs"] +print-trace = ["ark-std/print-trace"] +parallel = ["std", "ark-ff/parallel", "ark-ec/parallel", "ark-poly/parallel", "ark-std/parallel", "rayon"] + +[target.'cfg(target_arch = "aarch64")'.dependencies] +num-traits = { version = "0.2", default-features = false, features = ["libm"] } [patch.crates-io] ark-ff = { git = "https://github.com/arkworks-rs/algebra/" } ark-ec = { git = "https://github.com/arkworks-rs/algebra/" } +ark-poly = { git = "https://github.com/arkworks-rs/algebra/" } ark-serialize = { git = "https://github.com/arkworks-rs/algebra/" } + ark-crypto-primitives = { git = "https://github.com/arkworks-rs/crypto-primitives" } ark-r1cs-std = { git = "https://github.com/arkworks-rs/r1cs-std/" } ark-bls12-377 = { git = "https://github.com/arkworks-rs/curves/" } -ark-bls12-381 = { git = "https://github.com/arkworks-rs/curves/" } \ No newline at end of file +ark-bls12-381 = { git = "https://github.com/arkworks-rs/curves/" } +ark-bn254 = { git = "https://github.com/arkworks-rs/curves/" } diff --git a/README.md b/README.md index 7a4d582c..e9100359 100644 --- a/README.md +++ b/README.md @@ -181,6 +181,7 @@ Unless you explicitly state otherwise, any contribution that you submit to this [aurora-light]: https://ia.cr/2019/601 [pcd-acc]: https://ia.cr/2020/499 [pst]: https://ia.cr/2011/587 +[ligero]: https://ia.cr/2022/1608 ## Reference papers @@ -208,6 +209,10 @@ TCC 2020 Charalampos Papamanthou, Elaine Shi, Roberto Tamassia TCC 2013 +[Ligero: Lightweight Sublinear Arguments Without a Trusted Setup][ligero] +Scott Ames, Carmit Hazay, Yuval Ishai, Muthuramakrishnan Venkitasubramaniam +CCS 2017 + ## Acknowledgements This work was supported by: an Engineering and Physical Sciences Research Council grant; a Google Faculty Award; the RISELab at UC Berkeley; and donations from the Ethereum Foundation and the Interchain Foundation. diff --git a/src/error.rs b/src/error.rs index de7091eb..d7e83a52 100644 --- a/src/error.rs +++ b/src/error.rs @@ -93,6 +93,20 @@ pub enum Error { /// Index of the offending polynomial. label: String, }, + + /// This means a failure in verifying the commitment or the opening. + InvalidCommitment, + + /// For PCS which rely on Fiat-Shamir to be rendered non-interactive, + /// these are errors that result from incorrect transcript manipulation. + TranscriptError, + + /// This means the required soundness error bound is inherently impossible. + /// E.g., the field is not big enough. + InvalidParameters(String), + + /// Error resulting from hashing in linear code - based PCS. + HashingError, } impl core::fmt::Display for Error { @@ -179,6 +193,10 @@ impl core::fmt::Display for Error { support up to degree ({:?})", label, poly_degree, supported_degree ), Error::IncorrectInputLength(err) => write!(f, "{}", err), + Error::InvalidCommitment => write!(f, "Failed to verify the commitment"), + Error::TranscriptError => write!(f, "Incorrect transcript manipulation"), + Error::InvalidParameters(err) => write!(f, "{}", err), + Error::HashingError => write!(f, "Error resulting from hashing") } } } diff --git a/src/lib.rs b/src/lib.rs index fe417e94..4ea46875 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -34,6 +34,9 @@ use ark_std::{ pub mod data_structures; pub use data_structures::*; +/// Useful functions +pub(crate) mod utils; + /// R1CS constraints for polynomial constraints. #[cfg(feature = "r1cs")] mod constraints; @@ -126,6 +129,11 @@ pub use marlin::marlin_pst13_pc; /// [bdfg]: https://eprint.iacr.org/2020/081.pdf pub mod streaming_kzg; +/// Scheme based on the Ligero construction in [[Ligero]][ligero]. +/// +/// [ligero]: https://eprint.iacr.org/2022/1608 +pub mod linear_codes; + /// `QuerySet` is the set of queries that are to be made to a set of labeled polynomials/equations /// `p` that have previously been committed to. Each element of a `QuerySet` is a pair of /// `(label, (point_label, point))`, where `label` is the label of a polynomial in `p`, diff --git a/src/linear_codes/data_structures.rs b/src/linear_codes/data_structures.rs new file mode 100644 index 00000000..3da4a47b --- /dev/null +++ b/src/linear_codes/data_structures.rs @@ -0,0 +1,126 @@ +use crate::{ + PCCommitment, PCPreparedCommitment, PCPreparedVerifierKey, PCRandomness, PCVerifierKey, +}; +use ark_crypto_primitives::{ + crh::CRHScheme, + merkle_tree::{Config, LeafParam, Path, TwoToOneParam}, +}; +use ark_ff::PrimeField; +use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; +use ark_std::marker::PhantomData; +use ark_std::rand::RngCore; +use ark_std::vec::Vec; + +#[derive(Derivative, CanonicalSerialize, CanonicalDeserialize)] +#[derivative(Clone(bound = ""), Debug(bound = ""))] +/// The public parameters for Ligero PCS. +pub struct LigeroPCParams { + pub(crate) _field: PhantomData, + /// The security parameter + pub(crate) sec_param: usize, + /// The inverse of the code rate. + pub(crate) rho_inv: usize, + /// This is a flag which determines if the random linear combination is done. + pub(crate) check_well_formedness: bool, + /// Parameters for hash function of Merkle tree leaves + #[derivative(Debug = "ignore")] + pub(crate) leaf_hash_params: LeafParam, + /// Parameters for hash function of Merke tree combining two nodes into one + #[derivative(Debug = "ignore")] + pub(crate) two_to_one_params: TwoToOneParam, + // Parameters for obtaining leaf digest from leaf value. + #[derivative(Debug = "ignore")] + pub(crate) col_hash_params: H::Parameters, +} + +pub(crate) type LinCodePCPreparedVerifierKey = (); + +impl PCPreparedVerifierKey for LinCodePCPreparedVerifierKey { + fn prepare(_vk: &Unprepared) -> Self {} +} +#[derive(Derivative, CanonicalSerialize, CanonicalDeserialize)] +#[derivative(Default(bound = ""), Clone(bound = ""), Debug(bound = ""))] +pub(crate) struct Metadata { + pub(crate) n_rows: usize, + pub(crate) n_cols: usize, + pub(crate) n_ext_cols: usize, +} + +/// The commitment to a polynomial is a root of the merkle tree, +/// where each node is a hash of the column of the encoded coefficient matrix U. +#[derive(Derivative, CanonicalSerialize, CanonicalDeserialize)] +#[derivative(Default(bound = ""), Clone(bound = ""), Debug(bound = ""))] +pub struct LinCodePCCommitment { + // number of rows resp. columns of the square matrix containing the coefficients of the polynomial + pub(crate) metadata: Metadata, + pub(crate) root: C::InnerDigest, +} + +impl PCCommitment for LinCodePCCommitment { + fn empty() -> Self { + LinCodePCCommitment::default() + } + + fn has_degree_bound(&self) -> bool { + false + } +} + +pub(crate) type LinCodePCPreparedCommitment = LinCodePCCommitment; + +impl PCPreparedCommitment + for LinCodePCPreparedCommitment +{ + fn prepare(_cm: &Unprepared) -> Self { + LinCodePCPreparedCommitment::default() + } +} + +pub(crate) type LinCodePCRandomness = (); + +impl PCRandomness for LinCodePCRandomness { + fn empty() -> Self { + unimplemented!() + } + + fn rand( + _num_queries: usize, + _has_degree_bound: bool, + _num_vars: Option, + _rng: &mut R, + ) -> Self { + unimplemented!() + } +} + +/// Proof of an individual linear code well-formedness check or opening +#[derive(Derivative, CanonicalSerialize, CanonicalDeserialize)] +#[derivative(Default(bound = ""), Clone(bound = ""), Debug(bound = ""))] +pub(crate) struct LinCodePCProofSingle +where + F: PrimeField, + C: Config, +{ + /// For each of the indices in q, `paths` contains the path from the root of the merkle tree to the leaf + pub(crate) paths: Vec>, + + /// v, s.t. E(v) = w + pub(crate) v: Vec, + + pub(crate) columns: Vec>, +} + +/// The Proof type for linear code PCS, which amounts to an array of individual proofs +#[derive(Derivative, CanonicalSerialize, CanonicalDeserialize)] +#[derivative(Default(bound = ""), Clone(bound = ""), Debug(bound = ""))] +pub struct LinCodePCProof +where + F: PrimeField, + C: Config, +{ + pub(crate) opening: LinCodePCProofSingle, + pub(crate) well_formedness: Option>, +} + +// Multiple poly at one point +pub(crate) type LPCPArray = Vec>; diff --git a/src/linear_codes/ligero.rs b/src/linear_codes/ligero.rs new file mode 100644 index 00000000..5102c146 --- /dev/null +++ b/src/linear_codes/ligero.rs @@ -0,0 +1,143 @@ +use super::LigeroPCParams; +use super::LinCodeParametersInfo; +use crate::utils::ceil_div; +use crate::{PCCommitterKey, PCUniversalParams, PCVerifierKey}; + +use ark_crypto_primitives::crh::{CRHScheme, TwoToOneCRHScheme}; +use ark_crypto_primitives::merkle_tree::{Config, LeafParam, TwoToOneParam}; +use ark_ff::PrimeField; +use ark_poly::EvaluationDomain; +use ark_poly::GeneralEvaluationDomain; +use ark_std::marker::PhantomData; +#[cfg(not(feature = "std"))] +use num_traits::Float; + +impl LigeroPCParams +where + F: PrimeField, + C: Config, + H: CRHScheme, +{ + /// Create new UniversalParams + pub fn new( + sec_param: usize, + rho_inv: usize, + check_well_formedness: bool, + leaf_hash_params: LeafParam, + two_to_one_params: TwoToOneParam, + col_hash_params: H::Parameters, + ) -> Self { + Self { + _field: PhantomData, + sec_param, + rho_inv, + check_well_formedness, + leaf_hash_params, + two_to_one_params, + col_hash_params, + } + } +} + +impl PCUniversalParams for LigeroPCParams +where + F: PrimeField, + C: Config, + H: CRHScheme, +{ + fn max_degree(&self) -> usize { + if F::TWO_ADICITY < self.rho_inv as u32 { + 0 + } else if (F::TWO_ADICITY - self.rho_inv as u32) * 2 < 64 { + 2_usize.pow((F::TWO_ADICITY - self.rho_inv as u32) * 2) + } else { + usize::MAX + } + } +} + +impl PCCommitterKey for LigeroPCParams +where + F: PrimeField, + C: Config, + H: CRHScheme, +{ + fn max_degree(&self) -> usize { + if (F::TWO_ADICITY - self.rho_inv as u32) * 2 < 64 { + 2_usize.pow((F::TWO_ADICITY - self.rho_inv as u32) * 2) + } else { + usize::MAX + } + } + + fn supported_degree(&self) -> usize { + as PCCommitterKey>::max_degree(self) + } +} + +impl PCVerifierKey for LigeroPCParams +where + F: PrimeField, + C: Config, + H: CRHScheme, +{ + fn max_degree(&self) -> usize { + if (F::TWO_ADICITY - self.rho_inv as u32) * 2 < 64 { + 2_usize.pow((F::TWO_ADICITY - self.rho_inv as u32) * 2) + } else { + usize::MAX + } + } + + fn supported_degree(&self) -> usize { + as PCVerifierKey>::max_degree(self) + } +} + +impl LinCodeParametersInfo for LigeroPCParams +where + F: PrimeField, + C: Config, + H: CRHScheme, +{ + fn check_well_formedness(&self) -> bool { + self.check_well_formedness + } + + fn distance(&self) -> (usize, usize) { + (self.rho_inv - 1, self.rho_inv) + } + + fn sec_param(&self) -> usize { + self.sec_param + } + + /// Compute the a suitable (for instance, FFT-friendly over F) matrix with at least n entries. + /// The return pair (n, m) corresponds to the dimensions n x m. + fn compute_dimensions(&self, n: usize) -> (usize, usize) { + assert_eq!( + (n as f64) as usize, + n, + "n cannot be converted to f64: aborting" + ); + + let aux = (n as f64).sqrt().ceil() as usize; + let n_cols = GeneralEvaluationDomain::::new(aux) + .expect("Field F does not admit FFT with m elements") + .size(); + + (ceil_div(n, n_cols), n_cols) + } + + fn leaf_hash_params(&self) -> &<::LeafHash as CRHScheme>::Parameters { + &self.leaf_hash_params + } + + fn two_to_one_params(&self) -> &<::TwoToOneHash as TwoToOneCRHScheme>::Parameters { + &self.two_to_one_params + } + + fn col_hash_params(&self) -> &::Parameters { + &self.col_hash_params + } +} diff --git a/src/linear_codes/mod.rs b/src/linear_codes/mod.rs new file mode 100644 index 00000000..a6b40fbc --- /dev/null +++ b/src/linear_codes/mod.rs @@ -0,0 +1,616 @@ +use crate::utils::{inner_product, IOPTranscript, Matrix}; +use crate::{ + Error, LabeledCommitment, LabeledPolynomial, PCCommitterKey, PCUniversalParams, PCVerifierKey, + PolynomialCommitment, +}; + +use ark_crypto_primitives::crh::{CRHScheme, TwoToOneCRHScheme}; +use ark_crypto_primitives::merkle_tree::MerkleTree; +use ark_crypto_primitives::{merkle_tree::Config, sponge::CryptographicSponge}; +use ark_ff::PrimeField; +use ark_poly::Polynomial; +use ark_std::borrow::Borrow; +use ark_std::marker::PhantomData; +use ark_std::rand::RngCore; +use ark_std::string::ToString; +use ark_std::vec::Vec; + +mod utils; + +mod multilinear_ligero; +mod univariate_ligero; + +pub use multilinear_ligero::MultilinearLigero; +pub use univariate_ligero::UnivariateLigero; + +mod data_structures; +mod ligero; +use data_structures::*; + +pub use data_structures::{LigeroPCParams, LinCodePCProof}; +#[cfg(any(feature = "benches", test))] +pub use utils::{FieldToBytesColHasher, LeafIdentityHasher}; + +use utils::{calculate_t, get_indices_from_transcript, hash_column}; + +const FIELD_SIZE_ERROR: &str = "This field is not suitable for the proposed parameters"; + +/// For linear code PC schemes, the universal paramters, committer key +/// and verifier key are all the same. This trait abstracts the common +/// information contained in these. +pub trait LinCodeParametersInfo +where + C: Config, + H: CRHScheme, +{ + /// Get the security parameter. + fn sec_param(&self) -> usize; + + /// Get the distance of the code. + fn distance(&self) -> (usize, usize); + + /// See whether there should be a well-formedness check. + fn check_well_formedness(&self) -> bool; + + /// Compute the dimensions of the coefficient matrix. + fn compute_dimensions(&self, n: usize) -> (usize, usize); + + /// Get the hash parameters for obtaining leaf digest from leaf value. + fn leaf_hash_params(&self) -> &<::LeafHash as CRHScheme>::Parameters; + + /// Get the parameters for hashing nodes in the merkle tree. + fn two_to_one_params(&self) -> &<::TwoToOneHash as TwoToOneCRHScheme>::Parameters; + + /// Get the parameters for hashing a vector of values, + /// representing a column of the coefficient matrix, into a leaf value. + fn col_hash_params(&self) -> &H::Parameters; +} + +/// A trait for linear codes. +pub trait LinearEncode +where + F: PrimeField, + C: Config, + H: CRHScheme, + P: Polynomial, +{ + /// For schemes like Brakedown and Ligero, PCCommiiterKey and + /// PCVerifierKey and PCUniversalParams are all the same. + type LinCodePCParams: PCUniversalParams + + PCCommitterKey + + PCVerifierKey + + LinCodeParametersInfo; + + /// Does a default setup for the PCS. + fn setup( + max_degree: usize, + num_vars: Option, + rng: &mut R, + leaf_hash_params: <::LeafHash as CRHScheme>::Parameters, + two_to_one_params: <::TwoToOneHash as TwoToOneCRHScheme>::Parameters, + col_hash_params: H::Parameters, + ) -> Self::LinCodePCParams; + + /// Encode a message, which is interpreted as a vector of coefficients + /// of a polynomial of degree m - 1. + fn encode(msg: &[F], param: &Self::LinCodePCParams) -> Vec; + + /// Represent the polynomial as either coefficients, + /// in the univariate case, or evaluations over + /// the Boolean hypercube, in the multilinear case. + fn poly_to_vec(polynomial: &P) -> Vec; + + /// Represent the query point as a vector of Field elements. + fn point_to_vec(point: P::Point) -> Vec; + + /// Arrange the coefficients of the polynomial into a matrix, + /// and apply encoding to each row. + /// Returns the tuple (original_matrix, encoded_matrix). + fn compute_matrices(polynomial: &P, param: &Self::LinCodePCParams) -> (Matrix, Matrix) { + let mut coeffs = Self::poly_to_vec(polynomial); + + // 1. Computing the matrix dimensions. + let (n_rows, n_cols) = param.compute_dimensions(coeffs.len()); + + // padding the coefficient vector with zeroes + coeffs.resize(n_rows * n_cols, F::zero()); + + let mat = Matrix::new_from_flat(n_rows, n_cols, &coeffs); + + // 2. Apply encoding row-wise + let ext_mat = + Matrix::new_from_rows(mat.rows().iter().map(|r| Self::encode(r, param)).collect()); + + (mat, ext_mat) + } + + /// Tensor the query point z in the following sense: + /// For a polynomial p(X) represented by a matrix M + /// with n rows and m columns such that M_{i,j} = p_{i + n*j}, + /// we define the tensoring of `z`: (a, b) = tensor(z, n, m) such that: + /// p(z) = b^T.M.a + /// returns the evaluation of p at z. + fn tensor(z: &P::Point, n: usize, m: usize) -> (Vec, Vec); +} + +/// Any linear-code-based commitment scheme. +pub struct LinearCodePCS +where + F: PrimeField, + C: Config, + S: CryptographicSponge, + P: Polynomial, + H: CRHScheme, + L: LinearEncode, +{ + _phantom: PhantomData<(L, F, P, S, C, H)>, +} + +impl PolynomialCommitment for LinearCodePCS +where + L: LinearEncode, + F: PrimeField, + P: Polynomial, + S: CryptographicSponge, + C: Config + 'static, + Vec: Borrow<::Input>, + H::Output: Into, + C::Leaf: Sized + Clone + Default, + H: CRHScheme, +{ + type UniversalParams = L::LinCodePCParams; + + type CommitterKey = L::LinCodePCParams; + + type VerifierKey = L::LinCodePCParams; + + type PreparedVerifierKey = LinCodePCPreparedVerifierKey; + + type Commitment = LinCodePCCommitment; + + type PreparedCommitment = LinCodePCPreparedCommitment; + + type Randomness = LinCodePCRandomness; + + type Proof = LPCPArray; + + type BatchProof = Vec; + + type Error = Error; + + /// This is only a default setup with reasonable parameters. + /// To create your own public parameters (from which vk/ck can be derived by `trim`), + /// see the documentation for `LigeroPCUniversalParams`. + fn setup( + max_degree: usize, + num_vars: Option, + rng: &mut R, + ) -> Result { + let leaf_hash_params = ::setup(rng).unwrap(); + let two_to_one_params = ::setup(rng) + .unwrap() + .clone(); + let col_hash_params = ::setup(rng).unwrap(); + let pp = L::setup::( + max_degree, + num_vars, + rng, + leaf_hash_params, + two_to_one_params, + col_hash_params, + ); + let real_max_degree = ::max_degree(&pp); + if max_degree > real_max_degree || real_max_degree == 0 { + return Err(Error::InvalidParameters(FIELD_SIZE_ERROR.to_string())); + } + Ok(pp) + } + + fn trim( + pp: &Self::UniversalParams, + _supported_degree: usize, + _supported_hiding_bound: usize, + _enforced_degree_bounds: Option<&[usize]>, + ) -> Result<(Self::CommitterKey, Self::VerifierKey), Self::Error> { + if ::max_degree(pp) == 0 { + return Err(Error::InvalidParameters(FIELD_SIZE_ERROR.to_string())); + } + Ok((pp.clone(), pp.clone())) + } + + fn commit<'a>( + ck: &Self::CommitterKey, + polynomials: impl IntoIterator>, + _rng: Option<&mut dyn RngCore>, + ) -> Result< + ( + Vec>, + Vec, + ), + Self::Error, + > + where + P: 'a, + { + let mut commitments = Vec::new(); + + for labeled_polynomial in polynomials.into_iter() { + let polynomial = labeled_polynomial.polynomial(); + + // 1. Arrange the coefficients of the polynomial into a matrix, + // and apply encoding to get `ext_mat`. + let (mat, ext_mat) = L::compute_matrices(polynomial, ck); + + // 2. Create the Merkle tree from the hashes of each column. + let col_tree = create_merkle_tree::( + &ext_mat, + ck.leaf_hash_params(), + ck.two_to_one_params(), + ck.col_hash_params(), + )?; + + // 3. Obtain the MT root and add it to the transcript. + let root = col_tree.root(); + + let mut transcript: IOPTranscript = IOPTranscript::new(b"transcript"); + + transcript + .append_serializable_element(b"root", &root) + .map_err(|_| Error::TranscriptError)?; + + let n_rows = mat.n; + let n_cols = mat.m; + let n_ext_cols = ext_mat.m; + + // 4. The commitment is just the root, but since each commitment could be to a differently-sized polynomial, we also add some metadata. + let commitment = LinCodePCCommitment { + metadata: Metadata { + n_rows, + n_cols, + n_ext_cols, + }, + root, + }; + + commitments.push(LabeledCommitment::new( + labeled_polynomial.label().clone(), + commitment, + None, + )); + } + let com_len = &commitments.len(); + Ok((commitments, vec![(); *com_len])) + } + + fn open<'a>( + ck: &Self::CommitterKey, + labeled_polynomials: impl IntoIterator>, + commitments: impl IntoIterator>, + point: &'a P::Point, + _challenge_generator: &mut crate::challenge::ChallengeGenerator, + _rands: impl IntoIterator, + _rng: Option<&mut dyn RngCore>, + ) -> Result + where + P: 'a, + Self::Randomness: 'a, + Self::Commitment: 'a, + { + let mut proof_array = LPCPArray::default(); + let labeled_commitments: Vec<&'a LabeledCommitment> = + commitments.into_iter().collect(); + let labeled_polynomials: Vec<&'a LabeledPolynomial> = + labeled_polynomials.into_iter().collect(); + + if labeled_commitments.len() != labeled_polynomials.len() { + return Err(Error::IncorrectInputLength(format!( + "Mismatched lengths: {} commitments, {} polynomials", + labeled_commitments.len(), + labeled_polynomials.len() + ))); + } + + for i in 0..labeled_polynomials.len() { + let polynomial = labeled_polynomials[i].polynomial(); + let commitment = labeled_commitments[i].commitment(); + let n_rows = commitment.metadata.n_rows; + let n_cols = commitment.metadata.n_cols; + let root = &commitment.root; + + // 1. Arrange the coefficients of the polynomial into a matrix, + // and apply encoding to get `ext_mat`. + let (mat, ext_mat) = L::compute_matrices(polynomial, ck); + + // 2. Create the Merkle tree from the hashes of each column. + let col_tree = create_merkle_tree::( + &ext_mat, + ck.leaf_hash_params(), + ck.two_to_one_params(), + ck.col_hash_params(), + )?; + + // 3. Generate vector `b` to left-multiply the matrix. + let (_, b) = L::tensor(point, n_cols, n_rows); + + let mut transcript = IOPTranscript::new(b"transcript"); + transcript + .append_serializable_element(b"root", root) + .map_err(|_| Error::TranscriptError)?; + + // If we are checking well-formedness, we need to compute the well-formedness proof (which is just r.M) and append it to the transcript. + let well_formedness = if ck.check_well_formedness() { + let mut r = Vec::new(); + for _ in 0..n_rows { + r.push( + transcript + .get_and_append_challenge(b"r") + .map_err(|_| Error::TranscriptError)?, + ); + } + let v = mat.row_mul(&r); + + transcript + .append_serializable_element(b"v", &v) + .map_err(|_| Error::TranscriptError)?; + Some(v) + } else { + None + }; + + let point_vec = L::point_to_vec(point.clone()); + for element in point_vec.iter() { + transcript + .append_serializable_element(b"point", element) + .map_err(|_| Error::TranscriptError)?; + } + + proof_array.push(LinCodePCProof { + // Compute the opening proof and append b.M to the transcript. + opening: generate_proof( + ck.sec_param(), + ck.distance(), + &b, + &mat, + &ext_mat, + &col_tree, + &mut transcript, + )?, + well_formedness, + }); + } + + Ok(proof_array) + } + + fn check<'a>( + vk: &Self::VerifierKey, + commitments: impl IntoIterator>, + point: &'a P::Point, + values: impl IntoIterator, + proof_array: &Self::Proof, + _challenge_generator: &mut crate::challenge::ChallengeGenerator, + _rng: Option<&mut dyn RngCore>, + ) -> Result + where + Self::Commitment: 'a, + { + let labeled_commitments: Vec<&'a LabeledCommitment> = + commitments.into_iter().collect(); + let values: Vec = values.into_iter().collect(); + + if labeled_commitments.len() != proof_array.len() + || labeled_commitments.len() != values.len() + { + return Err(Error::IncorrectInputLength( + format!( + "Mismatched lengths: {} proofs were provided for {} commitments with {} claimed values",labeled_commitments.len(), proof_array.len(), values.len() + ) + )); + } + let leaf_hash_params: &<::LeafHash as CRHScheme>::Parameters = + vk.leaf_hash_params(); + let two_to_one_params: &<::TwoToOneHash as TwoToOneCRHScheme>::Parameters = + vk.two_to_one_params(); + + for (i, labeled_commitment) in labeled_commitments.iter().enumerate() { + let commitment = labeled_commitment.commitment(); + let n_rows = commitment.metadata.n_rows; + let n_cols = commitment.metadata.n_cols; + let n_ext_cols = commitment.metadata.n_ext_cols; + let root = &commitment.root; + let t = calculate_t::(vk.sec_param(), vk.distance(), n_ext_cols)?; + + let mut transcript = IOPTranscript::new(b"transcript"); + transcript + .append_serializable_element(b"root", &commitment.root) + .map_err(|_| Error::TranscriptError)?; + + let out = if vk.check_well_formedness() { + if proof_array[i].well_formedness.is_none() { + return Err(Error::InvalidCommitment); + } + let tmp = &proof_array[i].well_formedness.as_ref(); + let well_formedness = tmp.unwrap(); + let mut r = Vec::with_capacity(n_rows); + for _ in 0..n_rows { + r.push( + transcript + .get_and_append_challenge(b"r") + .map_err(|_| Error::TranscriptError)?, + ); + } + // Upon sending `v` to the Verifier, add it to the sponge. The claim is that v = r.M. + transcript + .append_serializable_element(b"v", well_formedness) + .map_err(|_| Error::TranscriptError)?; + + (Some(well_formedness), Some(r)) + } else { + (None, None) + }; + + // 1. Seed the transcript with the point and the recieved vector + // TODO Consider removing the evaluation point from the transcript. + let point_vec = L::point_to_vec(point.clone()); + for element in point_vec.iter() { + transcript + .append_serializable_element(b"point", element) + .map_err(|_| Error::TranscriptError)?; + } + transcript + .append_serializable_element(b"v", &proof_array[i].opening.v) + .map_err(|_| Error::TranscriptError)?; + + // 2. Ask random oracle for the `t` indices where the checks happen. + let indices = get_indices_from_transcript::(n_ext_cols, t, &mut transcript)?; + + // 3. Hash the received columns into leaf hashes. + let col_hashes: Vec = proof_array[i] + .opening + .columns + .iter() + .map(|c| hash_column::(c.clone(), vk.col_hash_params()).unwrap()) + .collect(); + + // 4. Verify the paths for each of the leaf hashes - this is only run once, + // even if we have a well-formedness check (i.e., we save sending and checking the columns). + // See "Concrete optimizations to the commitment scheme", p.12 of [Brakedown](https://eprint.iacr.org/2021/1043.pdf). + for (j, (leaf, q_j)) in col_hashes.iter().zip(indices.iter()).enumerate() { + let path = &proof_array[i].opening.paths[j]; + if path.leaf_index != *q_j { + return Err(Error::InvalidCommitment); + } + + path.verify(leaf_hash_params, two_to_one_params, root, leaf.clone()) + .map_err(|_| Error::InvalidCommitment)?; + } + + // Helper closure: checks if a.b = c. + let check_inner_product = |a, b, c| -> Result<(), Error> { + if inner_product(a, b) != c { + return Err(Error::InvalidCommitment); + } + + Ok(()) + }; + + // 5. Compute the encoding w = E(v). + let w = L::encode(&proof_array[i].opening.v, vk); + + // 6. Compute `a`, `b` to right- and left- multiply with the matrix `M`. + let (a, b) = L::tensor(point, n_cols, n_rows); + + // 7. Probabilistic checks that whatever the prover sent, + // matches with what the verifier computed for himself. + // Note: we sacrifice some code repetition in order not to repeat execution. + if let (Some(well_formedness), Some(r)) = out { + let w_well_formedness = L::encode(well_formedness, vk); + for (transcript_index, matrix_index) in indices.iter().enumerate() { + check_inner_product( + &r, + &proof_array[i].opening.columns[transcript_index], + w_well_formedness[*matrix_index], + )?; + check_inner_product( + &b, + &proof_array[i].opening.columns[transcript_index], + w[*matrix_index], + )?; + } + } else { + for (transcript_index, matrix_index) in indices.iter().enumerate() { + check_inner_product( + &b, + &proof_array[i].opening.columns[transcript_index], + w[*matrix_index], + )?; + } + } + + if inner_product(&proof_array[i].opening.v, &a) != values[i] { + eprintln!("Function check: claimed value in position {i} does not match the evaluation of the committed polynomial in the same position"); + return Ok(false); + } + } + + Ok(true) + } +} + +// TODO maybe this can go to utils +fn create_merkle_tree( + ext_mat: &Matrix, + leaf_hash_params: &<::LeafHash as CRHScheme>::Parameters, + two_to_one_params: &<::TwoToOneHash as TwoToOneCRHScheme>::Parameters, + col_hash_params: &H::Parameters, +) -> Result, Error> +where + F: PrimeField, + C: Config, + H: CRHScheme, + Vec: Borrow<::Input>, + H::Output: Into, + C::Leaf: Default + Clone, +{ + let mut col_hashes: Vec = Vec::new(); + let ext_mat_cols = ext_mat.cols(); + + for col in ext_mat_cols.into_iter() { + let col_digest = hash_column::(col, col_hash_params)?; + col_hashes.push(col_digest); + } + + // pad the column hashes with zeroes + let next_pow_of_two = col_hashes.len().next_power_of_two(); + col_hashes.resize(next_pow_of_two, ::default()); + + MerkleTree::::new(leaf_hash_params, two_to_one_params, col_hashes) + .map_err(|_| Error::HashingError) +} + +fn generate_proof( + sec_param: usize, + distance: (usize, usize), + b: &[F], + mat: &Matrix, + ext_mat: &Matrix, + col_tree: &MerkleTree, + transcript: &mut IOPTranscript, +) -> Result, Error> +where + F: PrimeField, + C: Config, +{ + let t = calculate_t::(sec_param, distance, ext_mat.m)?; + + // 1. left-multiply the matrix by `b`. + let v = mat.row_mul(b); + + transcript + .append_serializable_element(b"v", &v) + .map_err(|_| Error::TranscriptError)?; + + // 2. Generate t column indices to test the linear combination on. + let indices = get_indices_from_transcript(ext_mat.m, t, transcript)?; + + // 3. Compute Merkle tree paths for the requested columns. + let mut queried_columns = Vec::with_capacity(t); + let mut paths = Vec::with_capacity(t); + + let ext_mat_cols = ext_mat.cols(); + + for i in indices { + queried_columns.push(ext_mat_cols[i].clone()); + paths.push( + col_tree + .generate_proof(i) + .map_err(|_| Error::TranscriptError)?, + ); + } + + Ok(LinCodePCProofSingle { + paths, + v, + columns: queried_columns, + }) +} diff --git a/src/linear_codes/multilinear_ligero/mod.rs b/src/linear_codes/multilinear_ligero/mod.rs new file mode 100644 index 00000000..ed0c4ab1 --- /dev/null +++ b/src/linear_codes/multilinear_ligero/mod.rs @@ -0,0 +1,90 @@ +use super::{ + utils::{reed_solomon, tensor_vec}, + LigeroPCParams, LinearEncode, +}; + +use ark_crypto_primitives::{ + crh::{CRHScheme, TwoToOneCRHScheme}, + merkle_tree::Config, + sponge::CryptographicSponge, +}; +use ark_ff::{FftField, PrimeField}; +use ark_poly::{MultilinearExtension, Polynomial}; +use ark_std::log2; +use ark_std::marker::PhantomData; +use ark_std::vec::Vec; + +mod tests; + +/// The multilinear Ligero polynomial commitment scheme based on [[Ligero]][ligero]. +/// The scheme defaults to the naive batching strategy. +/// +/// Note: The scheme currently does not support hiding. +/// +/// [ligero]: https://eprint.iacr.org/2022/1608.pdf +pub struct MultilinearLigero< + F: PrimeField, + C: Config, + S: CryptographicSponge, + P: MultilinearExtension, + H: CRHScheme, +> { + _phantom: PhantomData<(F, C, S, P, H)>, +} + +impl LinearEncode for MultilinearLigero +where + F: PrimeField + FftField, + C: Config, + S: CryptographicSponge, + P: MultilinearExtension, +

>::Point: Into>, + H: CRHScheme, +{ + type LinCodePCParams = LigeroPCParams; + + fn setup( + _max_degree: usize, + _num_vars: Option, + _rng: &mut R, + leaf_hash_params: <::LeafHash as CRHScheme>::Parameters, + two_to_one_params: <::TwoToOneHash as TwoToOneCRHScheme>::Parameters, + col_hash_params: H::Parameters, + ) -> Self::LinCodePCParams { + Self::LinCodePCParams::new( + 128, + 2, + true, + leaf_hash_params, + two_to_one_params, + col_hash_params, + ) + } + + fn encode(msg: &[F], param: &Self::LinCodePCParams) -> Vec { + reed_solomon(msg, param.rho_inv) + } + + fn poly_to_vec(polynomial: &P) -> Vec { + polynomial.to_evaluations() + } + + fn point_to_vec(point:

>::Point) -> Vec { + point + } + + /// For a multilinear polynomial in n+m variables it returns a tuple for k={n,m}: + /// ((1-z_1)*(1-z_2)*...*(1_z_k), z_1*(1-z_2)*...*(1-z_k), ..., z_1*z_2*...*z_k) + fn tensor( + point: &

>::Point, + left_len: usize, + _right_len: usize, + ) -> (Vec, Vec) { + let point: Vec = Self::point_to_vec(point.clone()); + + let split = log2(left_len) as usize; + let left = &point[..split]; + let right = &point[split..]; + (tensor_vec(left), tensor_vec(right)) + } +} diff --git a/src/linear_codes/multilinear_ligero/tests.rs b/src/linear_codes/multilinear_ligero/tests.rs new file mode 100644 index 00000000..e8017d7e --- /dev/null +++ b/src/linear_codes/multilinear_ligero/tests.rs @@ -0,0 +1,294 @@ +#[cfg(test)] +mod tests { + + use crate::linear_codes::LinearCodePCS; + use crate::utils::test_sponge; + use crate::{ + challenge::ChallengeGenerator, + linear_codes::{utils::*, LigeroPCParams, MultilinearLigero, PolynomialCommitment}, + LabeledPolynomial, + }; + use ark_bls12_377::Fq; + use ark_bls12_377::Fr; + use ark_bls12_381::Fr as Fr381; + use ark_crypto_primitives::{ + crh::{sha256::Sha256, CRHScheme, TwoToOneCRHScheme}, + merkle_tree::{ByteDigestConverter, Config}, + sponge::poseidon::PoseidonSponge, + }; + use ark_ff::{Field, PrimeField}; + use ark_poly::evaluations::multivariate::{MultilinearExtension, SparseMultilinearExtension}; + use ark_std::test_rng; + use blake2::Blake2s256; + use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; + + type LeafH = LeafIdentityHasher; + type CompressH = Sha256; + type ColHasher = FieldToBytesColHasher; + + struct MerkleTreeParams; + + impl Config for MerkleTreeParams { + type Leaf = Vec; + + type LeafDigest = ::Output; + type LeafInnerDigestConverter = ByteDigestConverter; + type InnerDigest = ::Output; + + type LeafHash = LeafH; + type TwoToOneHash = CompressH; + } + + type MTConfig = MerkleTreeParams; + type Sponge = PoseidonSponge; + + type LigeroPCS = LinearCodePCS< + MultilinearLigero< + F, + MTConfig, + Sponge, + SparseMultilinearExtension, + ColHasher, + >, + F, + SparseMultilinearExtension, + Sponge, + MTConfig, + ColHasher, + >; + + fn rand_poly( + _: usize, + num_vars: Option, + rng: &mut ChaCha20Rng, + ) -> SparseMultilinearExtension { + match num_vars { + Some(n) => SparseMultilinearExtension::rand(n, rng), + None => unimplemented!(), // should not happen in ML case! + } + } + + fn constant_poly( + _: usize, + num_vars: Option, + rng: &mut ChaCha20Rng, + ) -> SparseMultilinearExtension { + // f1 = (1-x1)(1-x2)(1-x3)(1-x5)[(1-x6)*x4 + 2(1-x4)*x6] + match num_vars { + Some(n) => { + let points = vec![(1, Fr::rand(rng))]; + SparseMultilinearExtension::from_evaluations(n, &points) + } + None => unimplemented!(), // should not happen in ML case! + } + } + + #[test] + fn test_construction() { + let mut rng = &mut test_rng(); + let num_vars = 10; + // just to make sure we have the right degree given the FFT domain for our field + let leaf_hash_params = ::setup(&mut rng).unwrap(); + let two_to_one_params = ::setup(&mut rng) + .unwrap() + .clone(); + let col_hash_params = as CRHScheme>::setup(&mut rng).unwrap(); + let check_well_formedness = true; + + let pp: LigeroPCParams> = LigeroPCParams::new( + 128, + 4, + check_well_formedness, + leaf_hash_params, + two_to_one_params, + col_hash_params, + ); + + let (ck, vk) = LigeroPCS::::trim(&pp, 0, 0, None).unwrap(); + + let rand_chacha = &mut ChaCha20Rng::from_rng(test_rng()).unwrap(); + let labeled_poly = LabeledPolynomial::new( + "test".to_string(), + rand_poly(1, Some(num_vars), rand_chacha), + Some(num_vars), + Some(num_vars), + ); + + let mut test_sponge = test_sponge::(); + let (c, rands) = LigeroPCS::::commit(&ck, &[labeled_poly.clone()], None).unwrap(); + + let point = rand_point(Some(num_vars), rand_chacha); + + let value = labeled_poly.evaluate(&point); + + let mut challenge_generator: ChallengeGenerator> = + ChallengeGenerator::new_univariate(&mut test_sponge); + + let proof = LigeroPCS::::open( + &ck, + &[labeled_poly], + &c, + &point, + &mut (challenge_generator.clone()), + &rands, + None, + ) + .unwrap(); + assert!(LigeroPCS::::check( + &vk, + &c, + &point, + [value], + &proof, + &mut challenge_generator, + None + ) + .unwrap()); + } + + #[test] + fn test_calculate_t_with_good_parameters() { + assert!(calculate_t::(128, (3, 4), 2_usize.pow(32)).unwrap() < 200); + assert!(calculate_t::(256, (3, 4), 2_usize.pow(32)).unwrap() < 400); + } + + #[test] + fn test_calculate_t_with_bad_parameters() { + calculate_t::( + (Fq::MODULUS_BIT_SIZE - 60) as usize, + (3, 4), + 2_usize.pow(60), + ) + .unwrap_err(); + calculate_t::(400, (3, 4), 2_usize.pow(32)).unwrap_err(); + } + + fn rand_point(num_vars: Option, rng: &mut ChaCha20Rng) -> Vec { + match num_vars { + Some(n) => (0..n).map(|_| F::rand(rng)).collect(), + None => unimplemented!(), // should not happen! + } + } + + #[test] + fn single_poly_test() { + use crate::tests::*; + single_poly_test::<_, _, LigeroPCS, _>( + Some(5), + rand_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-377"); + single_poly_test::<_, _, LigeroPCS, _>( + Some(10), + rand_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-381"); + } + + #[test] + fn constant_poly_test() { + use crate::tests::*; + single_poly_test::<_, _, LigeroPCS, _>( + Some(10), + constant_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-377"); + single_poly_test::<_, _, LigeroPCS, _>( + Some(5), + constant_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-381"); + } + + #[test] + fn full_end_to_end_test() { + use crate::tests::*; + full_end_to_end_test::<_, _, LigeroPCS, _>( + Some(8), + rand_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-377"); + println!("Finished bls12-377"); + full_end_to_end_test::<_, _, LigeroPCS, _>( + Some(3), + rand_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-381"); + println!("Finished bls12-381"); + } + + #[test] + fn single_equation_test() { + use crate::tests::*; + single_equation_test::<_, _, LigeroPCS, _>( + Some(10), + rand_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-377"); + println!("Finished bls12-377"); + single_equation_test::<_, _, LigeroPCS, _>( + Some(5), + rand_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-381"); + println!("Finished bls12-381"); + } + + #[test] + fn two_equation_test() { + use crate::tests::*; + two_equation_test::<_, _, LigeroPCS, _>( + Some(5), + rand_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-377"); + println!("Finished bls12-377"); + two_equation_test::<_, _, LigeroPCS, _>( + Some(10), + rand_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-381"); + println!("Finished bls12-381"); + } + + #[test] + fn full_end_to_end_equation_test() { + use crate::tests::*; + full_end_to_end_equation_test::<_, _, LigeroPCS, _>( + Some(5), + rand_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-377"); + println!("Finished bls12-377"); + full_end_to_end_equation_test::<_, _, LigeroPCS, _>( + Some(8), + rand_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-381"); + println!("Finished bls12-381"); + } +} diff --git a/src/linear_codes/univariate_ligero/mod.rs b/src/linear_codes/univariate_ligero/mod.rs new file mode 100644 index 00000000..973a5c30 --- /dev/null +++ b/src/linear_codes/univariate_ligero/mod.rs @@ -0,0 +1,90 @@ +use super::utils::reed_solomon; +use super::{LigeroPCParams, LinearEncode}; + +use ark_crypto_primitives::crh::{CRHScheme, TwoToOneCRHScheme}; +use ark_crypto_primitives::{merkle_tree::Config, sponge::CryptographicSponge}; +use ark_ff::PrimeField; +use ark_poly::DenseUVPolynomial; +use ark_std::marker::PhantomData; +use ark_std::vec::Vec; + +mod tests; + +/// The univariate Ligero polynomial commitment scheme based on [[Ligero]][ligero]. +/// The scheme defaults to the naive batching strategy. +/// +/// Note: The scheme currently does not support hiding. +/// +/// [ligero]: https://eprint.iacr.org/2022/1608.pdf +pub struct UnivariateLigero< + F: PrimeField, + C: Config, + S: CryptographicSponge, + P: DenseUVPolynomial, + H: CRHScheme, +> { + _phantom: PhantomData<(F, C, S, P, H)>, +} + +impl LinearEncode for UnivariateLigero +where + F: PrimeField, + C: Config, + S: CryptographicSponge, + P: DenseUVPolynomial, + P::Point: Into, + H: CRHScheme, +{ + type LinCodePCParams = LigeroPCParams; + + fn setup( + _max_degree: usize, + _num_vars: Option, + _rng: &mut R, + leaf_hash_params: <::LeafHash as CRHScheme>::Parameters, + two_to_one_params: <::TwoToOneHash as TwoToOneCRHScheme>::Parameters, + col_hash_params: H::Parameters, + ) -> Self::LinCodePCParams { + Self::LinCodePCParams::new( + 128, + 4, + true, + leaf_hash_params, + two_to_one_params, + col_hash_params, + ) + } + + fn encode(msg: &[F], param: &Self::LinCodePCParams) -> Vec { + reed_solomon(msg, param.rho_inv) + } + + /// For a univariate polynomial, we simply return the list of coefficients. + fn poly_to_vec(polynomial: &P) -> Vec { + polynomial.coeffs().to_vec() + } + + fn point_to_vec(point: P::Point) -> Vec { + vec![point] + } + + /// For a univariate polynomial it returns a tuple: + /// ((1, z, z^2, ..., z^n), (1, z^n, z^(2n), ..., z^((m-1)n))) + fn tensor(z: &F, left: usize, right: usize) -> (Vec, Vec) { + let mut left_out = Vec::with_capacity(left); + let mut pow_a = F::one(); + for _ in 0..left { + left_out.push(pow_a); + pow_a *= z; + } + + let mut right_out = Vec::with_capacity(right); + let mut pow_b = F::one(); + for _ in 0..right { + right_out.push(pow_b); + pow_b *= pow_a; + } + + (left_out, right_out) + } +} diff --git a/src/linear_codes/univariate_ligero/tests.rs b/src/linear_codes/univariate_ligero/tests.rs new file mode 100644 index 00000000..b0fb67c8 --- /dev/null +++ b/src/linear_codes/univariate_ligero/tests.rs @@ -0,0 +1,403 @@ +#[cfg(test)] +mod tests { + + use crate::ark_std::UniformRand; + use crate::linear_codes::LinearCodePCS; + use crate::utils::test_sponge; + use crate::{ + challenge::ChallengeGenerator, + linear_codes::{utils::*, LigeroPCParams, PolynomialCommitment, UnivariateLigero}, + LabeledPolynomial, + }; + use ark_bls12_377::Fq; + use ark_bls12_377::Fr; + use ark_bls12_381::Fr as Fr381; + use ark_crypto_primitives::{ + crh::{sha256::Sha256, CRHScheme, TwoToOneCRHScheme}, + merkle_tree::{ByteDigestConverter, Config}, + sponge::poseidon::PoseidonSponge, + }; + use ark_ff::{Field, PrimeField}; + use ark_poly::{univariate::DensePolynomial, DenseUVPolynomial}; + use ark_std::test_rng; + use blake2::Blake2s256; + use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; + + type LeafH = LeafIdentityHasher; + type CompressH = Sha256; + type ColHasher = FieldToBytesColHasher; + + struct MerkleTreeParams; + + impl Config for MerkleTreeParams { + type Leaf = Vec; + + type LeafDigest = ::Output; + type LeafInnerDigestConverter = ByteDigestConverter; + type InnerDigest = ::Output; + + type LeafHash = LeafH; + type TwoToOneHash = CompressH; + } + + type MTConfig = MerkleTreeParams; + type Sponge = PoseidonSponge; + + type LigeroPCS = LinearCodePCS< + UnivariateLigero, ColHasher>, + Fr, + DensePolynomial, + Sponge, + MTConfig, + ColHasher, + >; + + type LigeroPcsF = LinearCodePCS< + UnivariateLigero, ColHasher>, + F, + DensePolynomial, + Sponge, + MTConfig, + ColHasher, + >; + + fn rand_poly( + degree: usize, + _: Option, + rng: &mut ChaCha20Rng, + ) -> DensePolynomial { + DensePolynomial::rand(degree, rng) + } + + fn constant_poly( + _: usize, + _: Option, + rng: &mut ChaCha20Rng, + ) -> DensePolynomial { + DensePolynomial::from_coefficients_slice(&[Fr::rand(rng)]) + } + + #[test] + fn test_construction() { + let degree = 4; + let mut rng = &mut test_rng(); + // just to make sure we have the right degree given the FFT domain for our field + let leaf_hash_params = ::setup(&mut rng).unwrap(); + let two_to_one_params = ::setup(&mut rng) + .unwrap() + .clone(); + let col_hash_params = as CRHScheme>::setup(&mut rng).unwrap(); + let check_well_formedness = true; + + let pp: LigeroPCParams> = LigeroPCParams::new( + 128, + 4, + check_well_formedness, + leaf_hash_params, + two_to_one_params, + col_hash_params, + ); + + let (ck, vk) = LigeroPCS::trim(&pp, 0, 0, None).unwrap(); + + let rand_chacha = &mut ChaCha20Rng::from_rng(test_rng()).unwrap(); + let labeled_poly = LabeledPolynomial::new( + "test".to_string(), + rand_poly(degree, None, rand_chacha), + None, + None, + ); + + let mut test_sponge = test_sponge::(); + let (c, rands) = LigeroPCS::commit(&ck, &[labeled_poly.clone()], None).unwrap(); + + let point = Fr::rand(rand_chacha); + + let value = labeled_poly.evaluate(&point); + + let mut challenge_generator: ChallengeGenerator> = + ChallengeGenerator::new_univariate(&mut test_sponge); + + let proof = LigeroPCS::open( + &ck, + &[labeled_poly], + &c, + &point, + &mut (challenge_generator.clone()), + &rands, + None, + ) + .unwrap(); + assert!(LigeroPCS::check( + &vk, + &c, + &point, + [value], + &proof, + &mut challenge_generator, + None + ) + .unwrap()); + } + + #[test] + fn test_calculate_t_with_good_parameters() { + assert!(calculate_t::(128, (3, 4), 2_usize.pow(32)).unwrap() < 200); + assert!(calculate_t::(256, (3, 4), 2_usize.pow(32)).unwrap() < 400); + } + + #[test] + fn test_calculate_t_with_bad_parameters() { + calculate_t::( + (Fq::MODULUS_BIT_SIZE - 60) as usize, + (3, 4), + 2_usize.pow(60), + ) + .unwrap_err(); + calculate_t::(400, (3, 4), 2_usize.pow(32)).unwrap_err(); + } + + fn rand_point(_: Option, rng: &mut ChaCha20Rng) -> F { + F::rand(rng) + } + + #[test] + fn single_poly_test() { + use crate::tests::*; + single_poly_test::<_, _, LigeroPCS, _>( + None, + rand_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-377"); + single_poly_test::<_, _, LigeroPcsF, _>( + None, + rand_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-381"); + } + + #[test] + fn constant_poly_test() { + use crate::tests::*; + single_poly_test::<_, _, LigeroPCS, _>( + None, + constant_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-377"); + single_poly_test::<_, _, LigeroPcsF, _>( + None, + constant_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-381"); + } + + #[test] + fn quadratic_poly_degree_bound_multiple_queries_test() { + use crate::tests::*; + quadratic_poly_degree_bound_multiple_queries_test::<_, _, LigeroPCS, _>( + rand_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-377"); + quadratic_poly_degree_bound_multiple_queries_test::<_, _, LigeroPcsF, _>( + rand_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-381"); + } + + #[test] + fn linear_poly_degree_bound_test() { + use crate::tests::*; + linear_poly_degree_bound_test::<_, _, LigeroPCS, _>( + rand_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-377"); + linear_poly_degree_bound_test::<_, _, LigeroPcsF, _>( + rand_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-381"); + } + + #[test] + fn single_poly_degree_bound_test() { + use crate::tests::*; + single_poly_degree_bound_test::<_, _, LigeroPCS, _>( + rand_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-377"); + single_poly_degree_bound_test::<_, _, LigeroPcsF, _>( + rand_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-381"); + } + + #[test] + fn single_poly_degree_bound_multiple_queries_test() { + use crate::tests::*; + single_poly_degree_bound_multiple_queries_test::<_, _, LigeroPCS, _>( + rand_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-377"); + single_poly_degree_bound_multiple_queries_test::<_, _, LigeroPcsF, _>( + rand_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-381"); + } + + #[test] + fn two_polys_degree_bound_single_query_test() { + use crate::tests::*; + two_polys_degree_bound_single_query_test::<_, _, LigeroPCS, _>( + rand_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-377"); + two_polys_degree_bound_single_query_test::<_, _, LigeroPcsF, _>( + rand_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-381"); + } + + #[test] + fn full_end_to_end_test() { + use crate::tests::*; + full_end_to_end_test::<_, _, LigeroPCS, _>( + None, + rand_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-377"); + println!("Finished bls12-377"); + full_end_to_end_test::<_, _, LigeroPcsF, _>( + None, + rand_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-381"); + println!("Finished bls12-381"); + } + + #[test] + fn single_equation_test() { + use crate::tests::*; + single_equation_test::<_, _, LigeroPCS, _>( + None, + rand_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-377"); + println!("Finished bls12-377"); + single_equation_test::<_, _, LigeroPcsF, _>( + None, + rand_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-381"); + println!("Finished bls12-381"); + } + + #[test] + fn two_equation_test() { + use crate::tests::*; + two_equation_test::<_, _, LigeroPCS, _>( + None, + rand_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-377"); + println!("Finished bls12-377"); + two_equation_test::<_, _, LigeroPcsF, _>( + None, + rand_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-381"); + println!("Finished bls12-381"); + } + + #[test] + fn two_equation_degree_bound_test() { + use crate::tests::*; + two_equation_degree_bound_test::<_, _, LigeroPCS, _>( + rand_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-377"); + println!("Finished bls12-377"); + two_equation_degree_bound_test::<_, _, LigeroPcsF, _>( + rand_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-381"); + println!("Finished bls12-381"); + } + + #[test] + fn full_end_to_end_equation_test() { + use crate::tests::*; + full_end_to_end_equation_test::<_, _, LigeroPCS, _>( + None, + rand_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-377"); + println!("Finished bls12-377"); + full_end_to_end_equation_test::<_, _, LigeroPcsF, _>( + None, + rand_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-381"); + println!("Finished bls12-381"); + } + + #[test] + #[should_panic] + fn bad_degree_bound_test() { + use crate::tests::*; + use ark_bls12_381::Fq as Fq381; + bad_degree_bound_test::<_, _, LigeroPcsF, _>( + rand_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-377"); + println!("Finished bls12-377"); + } +} diff --git a/src/linear_codes/utils.rs b/src/linear_codes/utils.rs new file mode 100644 index 00000000..99c7a068 --- /dev/null +++ b/src/linear_codes/utils.rs @@ -0,0 +1,246 @@ +use core::borrow::Borrow; + +use crate::utils::IOPTranscript; +use crate::{utils::ceil_div, Error}; + +use ark_crypto_primitives::{crh::CRHScheme, merkle_tree::Config}; +use ark_ff::{FftField, PrimeField}; + +use ark_poly::{EvaluationDomain, GeneralEvaluationDomain}; +use ark_std::string::ToString; +use ark_std::vec::Vec; + +#[cfg(not(feature = "std"))] +use num_traits::Float; + +#[cfg(any(feature = "benches", test))] +use { + crate::to_bytes, + ark_serialize::CanonicalSerialize, + ark_std::{marker::PhantomData, rand::RngCore}, + digest::Digest, +}; + +/// Apply reed-solomon encoding to msg. +/// Assumes msg.len() is equal to the order of some FFT domain in F. +/// Returns a vector of length equal to the smallest FFT domain of size at least msg.len() * RHO_INV. +pub(crate) fn reed_solomon( + // msg, of length m, is interpreted as a vector of coefficients of a polynomial of degree m - 1 + msg: &[F], + rho_inv: usize, +) -> Vec { + let m = msg.len(); + + let extended_domain = GeneralEvaluationDomain::::new(m * rho_inv).unwrap_or_else(|| { + panic!( + "The field F cannot accomodate FFT for msg.len() * RHO_INV = {} elements (too many)", + m * rho_inv + ) + }); + + extended_domain.fft(msg) +} + +#[inline] +pub(crate) fn get_num_bytes(n: usize) -> usize { + ceil_div((usize::BITS - n.leading_zeros()) as usize, 8) +} + +#[inline] +pub(crate) fn hash_column(array: Vec, params: &H::Parameters) -> Result +where + F: PrimeField, + C: Config, + H: CRHScheme, + Vec: Borrow<::Input>, + C::Leaf: Sized, + H::Output: Into, +{ + H::evaluate(params, array) + .map_err(|_| Error::HashingError) + .map(|x| x.into()) +} + +/// Generate `t` (not necessarily distinct) random points in `[0, n)` +/// using the current state of the `transcript`. +pub(crate) fn get_indices_from_transcript( + n: usize, + t: usize, + transcript: &mut IOPTranscript, +) -> Result, Error> { + let bytes_to_squeeze = get_num_bytes(n); + let mut indices = Vec::with_capacity(t); + for _ in 0..t { + let mut bytes: Vec = vec![0; bytes_to_squeeze]; + transcript + .get_and_append_byte_challenge(b"i", &mut bytes) + .map_err(|_| Error::TranscriptError)?; + + // get the usize from Vec: + let ind = bytes.iter().fold(0, |acc, &x| (acc << 8) + x as usize); + // modulo the number of columns in the encoded matrix + indices.push(ind % n); + } + Ok(indices) +} + +#[inline] +pub(crate) fn calculate_t( + sec_param: usize, + distance: (usize, usize), + codeword_len: usize, +) -> Result { + // Took from the analysis by BCI+20 and Ligero + // We will find the smallest $t$ such that + // $(1-\delta)^t + (\rho+\delta)^t + \frac{n}{F} < 2^{-\lambda}$. + // With $\delta = \frac{1-\rho}{2}$, the expreesion is + // $2 * (\frac{1+\rho}{2})^t + \frac{n}{F} < 2^(-\lambda)$. + + let field_bits = F::MODULUS_BIT_SIZE as i32; + let sec_param = sec_param as i32; + + let residual = codeword_len as f64 / 2.0_f64.powi(field_bits); + let rhs = (2.0_f64.powi(-sec_param) - residual).log2(); + if !(rhs.is_normal()) { + return Err(Error::InvalidParameters("For the given codeword length and the required security guarantee, the field is not big enough.".to_string())); + } + let nom = rhs - 1.0; + let denom = (1.0 - 0.5 * distance.0 as f64 / distance.1 as f64).log2(); + if !(denom.is_normal()) { + return Err(Error::InvalidParameters( + "The distance is wrong".to_string(), + )); + } + let t = (nom / denom).ceil() as usize; + Ok(if t < codeword_len { t } else { codeword_len }) +} + +/// Only needed for benches and tests. +#[cfg(any(feature = "benches", test))] +pub struct LeafIdentityHasher; + +#[cfg(any(feature = "benches", test))] +impl CRHScheme for LeafIdentityHasher { + type Input = Vec; + type Output = Vec; + type Parameters = (); + + fn setup(_: &mut R) -> Result { + Ok(()) + } + + fn evaluate>( + _: &Self::Parameters, + input: T, + ) -> Result { + Ok(input.borrow().to_vec().into()) + } +} + +/// Only needed for benches and tests. +#[cfg(any(feature = "benches", test))] +pub struct FieldToBytesColHasher +where + F: PrimeField + CanonicalSerialize, + D: Digest, +{ + _phantom: PhantomData<(F, D)>, +} + +#[cfg(any(feature = "benches", test))] +impl CRHScheme for FieldToBytesColHasher +where + F: PrimeField + CanonicalSerialize, + D: Digest, +{ + type Input = Vec; + type Output = Vec; + type Parameters = (); + + fn setup(_rng: &mut R) -> Result { + Ok(()) + } + + fn evaluate>( + _parameters: &Self::Parameters, + input: T, + ) -> Result { + let mut dig = D::new(); + dig.update(to_bytes!(input.borrow()).unwrap()); + Ok(dig.finalize().to_vec()) + } +} + +pub(crate) fn tensor_vec(values: &[F]) -> Vec { + let one = F::one(); + let anti_values: Vec = values.iter().map(|v| one - *v).collect(); + + let mut layer: Vec = vec![one]; + + for i in 0..values.len() { + let mut new_layer = Vec::new(); + for v in &layer { + new_layer.push(*v * anti_values[i]); + } + for v in &layer { + new_layer.push(*v * values[i]); + } + layer = new_layer; + } + + layer +} + +#[cfg(test)] +pub(crate) mod tests { + + use super::*; + use ark_bls12_377::Fr; + use ark_poly::{ + domain::general::GeneralEvaluationDomain, univariate::DensePolynomial, DenseUVPolynomial, + Polynomial, + }; + use ark_std::test_rng; + use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; + + #[test] + fn test_reed_solomon() { + let rho_inv = 3; + // `i` is the min number of evaluations we need to interpolate a poly of degree `i - 1` + for i in 1..10 { + let deg = (1 << i) - 1; + + let rand_chacha = &mut ChaCha20Rng::from_rng(test_rng()).unwrap(); + let mut pol = DensePolynomial::rand(deg, rand_chacha); + + while pol.degree() != deg { + pol = DensePolynomial::rand(deg, rand_chacha); + } + + let coeffs = &pol.coeffs; + + // size of evals might be larger than deg + 1 (the min. number of evals needed to interpolate): we could still do R-S encoding on smaller evals, but the resulting polynomial will differ, so for this test to work we should pass it in full + let m = deg + 1; + + let encoded = reed_solomon(&coeffs, rho_inv); + + let large_domain = GeneralEvaluationDomain::::new(m * rho_inv).unwrap(); + + // the encoded elements should agree with the evaluations of the polynomial in the larger domain + for j in 0..(rho_inv * m) { + assert_eq!(pol.evaluate(&large_domain.element(j)), encoded[j]); + } + } + } + + #[test] + fn test_get_num_bytes() { + assert_eq!(get_num_bytes(0), 0); + assert_eq!(get_num_bytes(1), 1); + assert_eq!(get_num_bytes(9), 1); + assert_eq!(get_num_bytes(1 << 11), 2); + assert_eq!(get_num_bytes(1 << 32 - 1), 4); + assert_eq!(get_num_bytes(1 << 32), 5); + assert_eq!(get_num_bytes(1 << 32 + 1), 5); + } +} diff --git a/src/streaming_kzg/data_structures.rs b/src/streaming_kzg/data_structures.rs index 5923a40e..7adaf005 100644 --- a/src/streaming_kzg/data_structures.rs +++ b/src/streaming_kzg/data_structures.rs @@ -2,9 +2,10 @@ use ark_ff::Field; use ark_std::borrow::Borrow; use ark_std::vec::Vec; -use crate::streaming_kzg::ceil_div; use ark_std::iterable::Iterable; +use crate::utils::ceil_div; + /// A `Streamer` folding a vector of coefficients /// with the given challenges, and producing a stream of items /// `(i, v)` where `i` indicates the depth, and `v` is the next coefficient. diff --git a/src/streaming_kzg/mod.rs b/src/streaming_kzg/mod.rs index e3bdb2af..8fd494e2 100644 --- a/src/streaming_kzg/mod.rs +++ b/src/streaming_kzg/mod.rs @@ -284,12 +284,6 @@ pub(crate) fn vanishing_polynomial(points: &[F]) -> DensePolynomial .fold(one, |x, y| x.naive_mul(&y)) } -/// Return ceil(x / y). -pub(crate) fn ceil_div(x: usize, y: usize) -> usize { - // XXX. warning: this expression can overflow. - (x + y - 1) / y -} - /// Compute a linear combination of the polynomials `polynomials` with the given challenges. pub(crate) fn linear_combination( polynomials: &[PP], diff --git a/src/streaming_kzg/space.rs b/src/streaming_kzg/space.rs index ab50adfd..cc1d36d2 100644 --- a/src/streaming_kzg/space.rs +++ b/src/streaming_kzg/space.rs @@ -6,7 +6,8 @@ use ark_std::borrow::Borrow; use ark_std::collections::VecDeque; use ark_std::vec::Vec; -use crate::streaming_kzg::{ceil_div, vanishing_polynomial, FoldedPolynomialTree}; +use crate::streaming_kzg::{vanishing_polynomial, FoldedPolynomialTree}; +use crate::utils::ceil_div; use ark_ec::scalar_mul::variable_base::{ChunkedPippenger, HashMapPippenger, VariableBaseMSM}; use ark_std::iterable::{Iterable, Reverse}; diff --git a/src/utils.rs b/src/utils.rs new file mode 100644 index 00000000..6a819bf0 --- /dev/null +++ b/src/utils.rs @@ -0,0 +1,343 @@ +use core::marker::PhantomData; + +#[cfg(not(feature = "std"))] +use num_traits::Float; + +#[cfg(feature = "parallel")] +use rayon::{ + iter::{IntoParallelRefIterator, ParallelIterator}, + prelude::IndexedParallelIterator, +}; + +use ark_ff::{Field, PrimeField}; +use ark_serialize::CanonicalSerialize; +use ark_std::vec::Vec; +use merlin::Transcript; + +use crate::Error; + +/// Takes as input a struct, and converts them to a series of bytes. All traits +/// that implement `CanonicalSerialize` can be automatically converted to bytes +/// in this manner. +/// From jellyfish lib +#[macro_export] +macro_rules! to_bytes { + ($x:expr) => {{ + let mut buf = ark_std::vec![]; + ark_serialize::CanonicalSerialize::serialize_compressed($x, &mut buf).map(|_| buf) + }}; +} + +/// Return ceil(x / y). +pub(crate) fn ceil_div(x: usize, y: usize) -> usize { + // XXX. warning: this expression can overflow. + (x + y - 1) / y +} + +#[derive(Debug)] +pub struct Matrix { + pub(crate) n: usize, + pub(crate) m: usize, + entries: Vec>, +} + +impl Matrix { + /// Returns a Matrix of dimensions n x m given a list of n * m field elements. + /// The list should be ordered row-first, i.e. [a11, ..., a1m, a21, ..., a2m, ...]. + /// + /// # Panics + /// Panics if the dimensions do not match the length of the list + pub(crate) fn new_from_flat(n: usize, m: usize, entry_list: &[F]) -> Self { + assert_eq!( + entry_list.len(), + n * m, + "Invalid matrix construction: dimensions are {} x {} but entry vector has {} entries", + n, + m, + entry_list.len() + ); + + // TODO more efficient to run linearly? + let entries: Vec> = (0..n) + .map(|row| (0..m).map(|col| entry_list[m * row + col]).collect()) + .collect(); + + Self { n, m, entries } + } + + /// Returns a Matrix given a list of its rows, each in turn represented as a list of field elements. + /// + /// # Panics + /// Panics if the sub-lists do not all have the same length. + pub(crate) fn new_from_rows(row_list: Vec>) -> Self { + let m = row_list[0].len(); + + for row in row_list.iter().skip(1) { + assert_eq!( + row.len(), + m, + "Invalid matrix construction: not all rows have the same length" + ); + } + + Self { + n: row_list.len(), + m, + entries: row_list, + } + } + + /// Returns the entry in position (i, j). **Indexing starts at 0 in both coordinates**, + /// i.e. the first element is in position (0, 0) and the last one in (n - 1, j - 1), + /// where n and m are the number of rows and columns, respectively. + /// + /// Index bound checks are waived for efficiency and behaviour under invalid indexing is undefined + #[cfg(test)] + pub(crate) fn entry(&self, i: usize, j: usize) -> F { + self.entries[i][j] + } + + /// Returns self as a list of rows + pub(crate) fn rows(&self) -> Vec> { + self.entries.clone() + } + + /// Returns self as a list of columns + pub(crate) fn cols(&self) -> Vec> { + (0..self.m) + .map(|col| (0..self.n).map(|row| self.entries[row][col]).collect()) + .collect() + } + + /// Returns the product v * self, where v is interpreted as a row vector. In other words, + /// it returns a linear combination of the rows of self with coefficients given by v. + /// + /// Panics if the length of v is different from the number of rows of self. + pub(crate) fn row_mul(&self, v: &[F]) -> Vec { + assert_eq!( + v.len(), + self.n, + "Invalid row multiplication: vector has {} elements whereas each matrix column has {}", + v.len(), + self.n + ); + + (0..self.m) + .map(|col| { + inner_product( + v, + &(0..self.n) + .map(|row| self.entries[row][col]) + .collect::>(), + ) + }) + .collect() + } +} + +#[inline] +pub(crate) fn inner_product(v1: &[F], v2: &[F]) -> F { + ark_std::cfg_iter!(v1) + .zip(v2) + .map(|(li, ri)| *li * ri) + .sum() +} + +/// The following struct is taken from jellyfish repository. Once they change +/// their dependency on `crypto-primitive`, we use their crate instead of +/// a copy-paste. We needed the newer `crypto-primitive` for serializing. +#[derive(Clone)] +pub(crate) struct IOPTranscript { + transcript: Transcript, + is_empty: bool, + #[doc(hidden)] + phantom: PhantomData, +} + +// TODO: merge this with jf_plonk::transcript +impl IOPTranscript { + /// Create a new IOP transcript. + pub(crate) fn new(label: &'static [u8]) -> Self { + Self { + transcript: Transcript::new(label), + is_empty: true, + phantom: PhantomData, + } + } + + /// Append the message to the transcript. + pub(crate) fn append_message(&mut self, label: &'static [u8], msg: &[u8]) -> Result<(), Error> { + self.transcript.append_message(label, msg); + self.is_empty = false; + Ok(()) + } + + /// Append the message to the transcript. + pub(crate) fn append_serializable_element( + &mut self, + label: &'static [u8], + group_elem: &S, + ) -> Result<(), Error> { + self.append_message( + label, + &to_bytes!(group_elem).map_err(|_| Error::TranscriptError)?, + ) + } + + /// Generate the challenge from the current transcript + /// and append it to the transcript. + /// + /// The output field element is statistical uniform as long + /// as the field has a size less than 2^384. + pub(crate) fn get_and_append_challenge(&mut self, label: &'static [u8]) -> Result { + // we need to reject when transcript is empty + if self.is_empty { + return Err(Error::TranscriptError); + } + + let mut buf = [0u8; 64]; + self.transcript.challenge_bytes(label, &mut buf); + let challenge = F::from_le_bytes_mod_order(&buf); + self.append_serializable_element(label, &challenge)?; + Ok(challenge) + } + + /// Generate the challenge from the current transcript + /// and append it to the transcript. + /// + /// Without exposing the internal field `transcript`, + /// this is a wrapper around getting bytes as opposed to field elements. + pub(crate) fn get_and_append_byte_challenge( + &mut self, + label: &'static [u8], + dest: &mut [u8], + ) -> Result<(), Error> { + // we need to reject when transcript is empty + if self.is_empty { + return Err(Error::TranscriptError); + } + + self.transcript.challenge_bytes(label, dest); + self.append_message(label, dest)?; + Ok(()) + } +} + +#[inline] +#[cfg(test)] +pub(crate) fn to_field(v: Vec) -> Vec { + v.iter().map(|x| F::from(*x)).collect::>() +} + +// TODO: replace by https://github.com/arkworks-rs/crypto-primitives/issues/112. +#[cfg(test)] +use ark_crypto_primitives::sponge::poseidon::PoseidonSponge; + +#[cfg(test)] +pub(crate) fn test_sponge() -> PoseidonSponge { + use ark_crypto_primitives::sponge::{poseidon::PoseidonConfig, CryptographicSponge}; + use ark_std::test_rng; + + let full_rounds = 8; + let partial_rounds = 31; + let alpha = 17; + + let mds = vec![ + vec![F::one(), F::zero(), F::one()], + vec![F::one(), F::one(), F::zero()], + vec![F::zero(), F::one(), F::one()], + ]; + + let mut v = Vec::new(); + let mut ark_rng = test_rng(); + + for _ in 0..(full_rounds + partial_rounds) { + let mut res = Vec::new(); + + for _ in 0..3 { + res.push(F::rand(&mut ark_rng)); + } + v.push(res); + } + let config = PoseidonConfig::new(full_rounds, partial_rounds, alpha, mds, v, 2, 1); + PoseidonSponge::new(&config) +} + +#[cfg(test)] +pub(crate) mod tests { + + use super::*; + + use ark_bls12_377::Fr; + + #[test] + fn test_matrix_constructor_flat() { + let entries: Vec = to_field(vec![10, 100, 4, 67, 44, 50]); + let mat = Matrix::new_from_flat(2, 3, &entries); + assert_eq!(mat.entry(1, 2), Fr::from(50)); + } + + #[test] + fn test_matrix_constructor_flat_square() { + let entries: Vec = to_field(vec![10, 100, 4, 67]); + let mat = Matrix::new_from_flat(2, 2, &entries); + assert_eq!(mat.entry(1, 1), Fr::from(67)); + } + + #[test] + #[should_panic(expected = "dimensions are 2 x 3 but entry vector has 5 entries")] + fn test_matrix_constructor_flat_panic() { + let entries: Vec = to_field(vec![10, 100, 4, 67, 44]); + Matrix::new_from_flat(2, 3, &entries); + } + + #[test] + fn test_matrix_constructor_rows() { + let rows: Vec> = vec![ + to_field(vec![10, 100, 4]), + to_field(vec![23, 1, 0]), + to_field(vec![55, 58, 9]), + ]; + let mat = Matrix::new_from_rows(rows); + assert_eq!(mat.entry(2, 0), Fr::from(55)); + } + + #[test] + #[should_panic(expected = "not all rows have the same length")] + fn test_matrix_constructor_rows_panic() { + let rows: Vec> = vec![ + to_field(vec![10, 100, 4]), + to_field(vec![23, 1, 0]), + to_field(vec![55, 58]), + ]; + Matrix::new_from_rows(rows); + } + + #[test] + fn test_cols() { + let rows: Vec> = vec![ + to_field(vec![4, 76]), + to_field(vec![14, 92]), + to_field(vec![17, 89]), + ]; + + let mat = Matrix::new_from_rows(rows); + + assert_eq!(mat.cols()[1], to_field(vec![76, 92, 89])); + } + + #[test] + fn test_row_mul() { + let rows: Vec> = vec![ + to_field(vec![10, 100, 4]), + to_field(vec![23, 1, 0]), + to_field(vec![55, 58, 9]), + ]; + + let mat = Matrix::new_from_rows(rows); + let v: Vec = to_field(vec![12, 41, 55]); + // by giving the result in the integers and then converting to Fr + // we ensure the test will still pass even if Fr changes + assert_eq!(mat.row_mul(&v), to_field::(vec![4088, 4431, 543])); + } +} From 9bbd74ebf96f5d229aebde2bcaa184993ac69c4c Mon Sep 17 00:00:00 2001 From: Hossein Moghaddas Date: Wed, 25 Oct 2023 12:14:03 +0200 Subject: [PATCH 03/75] Add Brakedown --- Cargo.toml | 33 +- README.md | 5 + src/error.rs | 22 + src/lib.rs | 9 + src/linear_codes/brakedown.rs | 351 ++++++++++ src/linear_codes/data_structures.rs | 155 +++++ src/linear_codes/mod.rs | 618 ++++++++++++++++++ src/linear_codes/multilinear_brakedown/mod.rs | 127 ++++ .../multilinear_brakedown/tests.rs | 294 +++++++++ src/linear_codes/utils.rs | 308 +++++++++ src/streaming_kzg/data_structures.rs | 3 +- src/streaming_kzg/mod.rs | 6 - src/streaming_kzg/space.rs | 3 +- src/utils.rs | 359 ++++++++++ 14 files changed, 2273 insertions(+), 20 deletions(-) create mode 100644 src/linear_codes/brakedown.rs create mode 100644 src/linear_codes/data_structures.rs create mode 100644 src/linear_codes/mod.rs create mode 100644 src/linear_codes/multilinear_brakedown/mod.rs create mode 100644 src/linear_codes/multilinear_brakedown/tests.rs create mode 100644 src/linear_codes/utils.rs create mode 100644 src/utils.rs diff --git a/Cargo.toml b/Cargo.toml index 4b58457e..ee9bf24a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -15,24 +15,27 @@ ark-serialize = { version = "^0.4.0", default-features = false, features = [ "de ark-ff = { version = "^0.4.0", default-features = false } ark-ec = { version = "^0.4.0", default-features = false } ark-poly = {version = "^0.4.0", default-features = false } -ark-crypto-primitives = {version = "^0.4.0", default-features = false, features = ["sponge"] } +ark-crypto-primitives = { version = "^0.4.0", default-features = false, features = ["sponge","merkle_tree" ] } ark-std = { version = "^0.4.0", default-features = false } +blake2 = { version = "0.10", default-features = false } ark-relations = { version = "^0.4.0", default-features = false, optional = true } ark-r1cs-std = { version = "^0.4.0", default-features = false, optional = true } -hashbrown = { version = "0.13", default-features = false, optional = true } +hashbrown = { version = "0.14", default-features = false, optional = true } digest = "0.10" derivative = { version = "2", features = [ "use_core" ] } rayon = { version = "1", optional = true } +merlin = { version = "3.0.0", default-features = false } [dev-dependencies] ark-ed-on-bls12-381 = { version = "^0.4.0", default-features = false } ark-bls12-381 = { version = "^0.4.0", default-features = false, features = [ "curve" ] } ark-bls12-377 = { version = "^0.4.0", default-features = false, features = [ "curve" ] } -blake2 = { version = "0.10", default-features = false } +ark-bn254 = { version = "^0.4.0", default-features = false, features = [ "curve" ] } rand_chacha = { version = "0.3.0", default-features = false } + [profile.release] opt-level = 3 lto = "thin" @@ -46,18 +49,24 @@ incremental = true debug = true [features] -default = [ "std", "parallel" ] -std = [ "ark-ff/std", "ark-ec/std", "ark-poly/std", "ark-std/std", "ark-relations/std", "ark-serialize/std", "ark-crypto-primitives/std"] -r1cs = [ "ark-relations", "ark-r1cs-std", "hashbrown", "ark-crypto-primitives/r1cs"] -print-trace = [ "ark-std/print-trace" ] -parallel = [ "std", "ark-ff/parallel", "ark-ec/parallel", "ark-poly/parallel", "ark-std/parallel", "rayon" ] +default = ["std", "parallel"] +std = ["ark-ff/std", "ark-ec/std", "ark-poly/std", "ark-std/std", "ark-relations/std", "ark-serialize/std", "ark-crypto-primitives/std"] +r1cs = ["ark-relations", "ark-r1cs-std", "hashbrown", "ark-crypto-primitives/r1cs"] +print-trace = ["ark-std/print-trace"] +parallel = ["std", "ark-ff/parallel", "ark-ec/parallel", "ark-poly/parallel", "ark-std/parallel", "rayon"] + +[target.'cfg(target_arch = "aarch64")'.dependencies] +num-traits = { version = "0.2", default-features = false, features = ["libm"] } [patch.crates-io] -ark-ff = { git = "https://github.com/arkworks-rs/algebra/" } -ark-ec = { git = "https://github.com/arkworks-rs/algebra/" } -ark-serialize = { git = "https://github.com/arkworks-rs/algebra/" } +ark-ff = { git = "https://github.com/HungryCatsStudio/algebra", branch = "ml-is-poly-vec"} +ark-ec = { git = "https://github.com/HungryCatsStudio/algebra", branch = "ml-is-poly-vec"} +ark-poly = { git = "https://github.com/HungryCatsStudio/algebra", branch = "ml-is-poly-vec"} +ark-serialize = { git = "https://github.com/HungryCatsStudio/algebra", branch = "ml-is-poly-vec"} + ark-crypto-primitives = { git = "https://github.com/arkworks-rs/crypto-primitives" } ark-r1cs-std = { git = "https://github.com/arkworks-rs/r1cs-std/" } ark-bls12-377 = { git = "https://github.com/arkworks-rs/curves/" } -ark-bls12-381 = { git = "https://github.com/arkworks-rs/curves/" } \ No newline at end of file +ark-bls12-381 = { git = "https://github.com/arkworks-rs/curves/" } +ark-bn254 = { git = "https://github.com/arkworks-rs/curves/" } diff --git a/README.md b/README.md index 7a4d582c..d562a26d 100644 --- a/README.md +++ b/README.md @@ -181,6 +181,7 @@ Unless you explicitly state otherwise, any contribution that you submit to this [aurora-light]: https://ia.cr/2019/601 [pcd-acc]: https://ia.cr/2020/499 [pst]: https://ia.cr/2011/587 +[brakedown]: https://ia.cr/2021/1043 ## Reference papers @@ -208,6 +209,10 @@ TCC 2020 Charalampos Papamanthou, Elaine Shi, Roberto Tamassia TCC 2013 +[Brakedown: Linear-time and field-agnostic SNARKs for R1CS][brakedown] +Alexander Golovnev, Jonathan Lee, Srinath Setty, Justin Thaler, Riad S. Wahby +CRYPTO 2023 + ## Acknowledgements This work was supported by: an Engineering and Physical Sciences Research Council grant; a Google Faculty Award; the RISELab at UC Berkeley; and donations from the Ethereum Foundation and the Interchain Foundation. diff --git a/src/error.rs b/src/error.rs index de7091eb..2b133964 100644 --- a/src/error.rs +++ b/src/error.rs @@ -93,6 +93,23 @@ pub enum Error { /// Index of the offending polynomial. label: String, }, + + /// This means a failure in verifying the commitment or the opening. + InvalidCommitment, + + /// For PCS which rely on Fiat-Shamir to be rendered non-interactive, + /// these are errors that result from incorrect transcript manipulation. + TranscriptError, + + /// This means the required soundness error bound is inherently impossible. + /// E.g., the field is not big enough. + InvalidParameters(String), + + /// Error resulting from hashing in linear code - based PCS. + HashingError, + + /// Shows that encoding is not feasible + EncodingError, } impl core::fmt::Display for Error { @@ -179,6 +196,11 @@ impl core::fmt::Display for Error { support up to degree ({:?})", label, poly_degree, supported_degree ), Error::IncorrectInputLength(err) => write!(f, "{}", err), + Error::InvalidCommitment => write!(f, "Failed to verify the commitment"), + Error::TranscriptError => write!(f, "Incorrect transcript manipulation"), + Error::InvalidParameters(err) => write!(f, "{}", err), + Error::HashingError => write!(f, "Error resulting from hashing"), + Error::EncodingError => write!(f, "Encoding failed"), } } } diff --git a/src/lib.rs b/src/lib.rs index fe417e94..1645f9f6 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -34,6 +34,9 @@ use ark_std::{ pub mod data_structures; pub use data_structures::*; +/// Useful functions +pub(crate) mod utils; + /// R1CS constraints for polynomial constraints. #[cfg(feature = "r1cs")] mod constraints; @@ -126,6 +129,12 @@ pub use marlin::marlin_pst13_pc; /// [bdfg]: https://eprint.iacr.org/2020/081.pdf pub mod streaming_kzg; +/// Schemes based on the Ligero construction in [[Ligero]][ligero]. +/// +/// [ligero]: https://eprint.iacr.org/2022/1608 +/// [brakedown]: https://eprint.iacr.org/2021/1043.pdf +pub mod linear_codes; + /// `QuerySet` is the set of queries that are to be made to a set of labeled polynomials/equations /// `p` that have previously been committed to. Each element of a `QuerySet` is a pair of /// `(label, (point_label, point))`, where `label` is the label of a polynomial in `p`, diff --git a/src/linear_codes/brakedown.rs b/src/linear_codes/brakedown.rs new file mode 100644 index 00000000..625efb07 --- /dev/null +++ b/src/linear_codes/brakedown.rs @@ -0,0 +1,351 @@ +use super::utils::SprsMat; +use super::BrakedownPCParams; +use super::LinCodeParametersInfo; +use crate::linear_codes::utils::calculate_t; +use crate::utils::ceil_div; +use crate::utils::{ceil_mul, ent}; +use crate::{PCCommitterKey, PCUniversalParams, PCVerifierKey}; + +use ark_crypto_primitives::crh::{CRHScheme, TwoToOneCRHScheme}; +use ark_crypto_primitives::merkle_tree::{Config, LeafParam, TwoToOneParam}; +use ark_ff::PrimeField; +use ark_std::log2; +use ark_std::rand::RngCore; +use ark_std::vec::Vec; +#[cfg(not(feature = "std"))] +use num_traits::Float; + +impl PCUniversalParams for BrakedownPCParams +where + F: PrimeField, + C: Config, + H: CRHScheme, +{ + fn max_degree(&self) -> usize { + usize::MAX + } +} + +impl PCCommitterKey for BrakedownPCParams +where + F: PrimeField, + C: Config, + H: CRHScheme, +{ + fn max_degree(&self) -> usize { + usize::MAX + } + + fn supported_degree(&self) -> usize { + as PCCommitterKey>::max_degree(self) + } +} + +impl PCVerifierKey for BrakedownPCParams +where + F: PrimeField, + C: Config, + H: CRHScheme, +{ + fn max_degree(&self) -> usize { + usize::MAX + } + + fn supported_degree(&self) -> usize { + as PCVerifierKey>::max_degree(self) + } +} + +impl LinCodeParametersInfo for BrakedownPCParams +where + F: PrimeField, + C: Config, + H: CRHScheme, +{ + fn check_well_formedness(&self) -> bool { + self.check_well_formedness + } + + fn distance(&self) -> (usize, usize) { + (self.rho_inv.1 * self.beta.0, self.rho_inv.0 * self.beta.1) + } + + fn sec_param(&self) -> usize { + self.sec_param + } + + fn compute_dimensions(&self, _n: usize) -> (usize, usize) { + (self.n, self.m) + } + + fn leaf_hash_params(&self) -> &<::LeafHash as CRHScheme>::Parameters { + &self.leaf_hash_params + } + + fn two_to_one_params(&self) -> &<::TwoToOneHash as TwoToOneCRHScheme>::Parameters { + &self.two_to_one_params + } + + fn col_hash_params(&self) -> &::Parameters { + &self.col_hash_params + } +} + +impl BrakedownPCParams +where + F: PrimeField, + C: Config, + H: CRHScheme, +{ + /// Create a default UniversalParams, with the values from Fig. 2 from the paper. + pub fn default( + rng: &mut R, + poly_len: usize, + check_well_formedness: bool, + leaf_hash_params: LeafParam, + two_to_one_params: TwoToOneParam, + col_hash_params: H::Parameters, + ) -> Self { + let sec_param = 128; + let a = (178, 1000); + let b = (61, 1000); + let r = (1521, 1000); + let base_len = 30; + let t = calculate_t::(sec_param, (b.0 * r.1, b.1 * r.0), poly_len).unwrap(); // we want to get a rough idea what t is + let n = 1 << log2((ceil_div(2 * poly_len, t) as f64).sqrt().ceil() as usize); + let m = ceil_div(poly_len, n); + let c = Self::cn_const(a, b); + let d = Self::dn_const(a, b, r); + let ct = Constants { a, b, r, c, d }; + let (a_dims, b_dims) = Self::mat_size(m, base_len, &ct); + let a_mats = Self::make_all(rng, &a_dims); + let b_mats = Self::make_all(rng, &b_dims); + + Self::new( + sec_param, + a, + b, + r, + base_len, + n, + m, + a_dims, + b_dims, + a_mats, + b_mats, + check_well_formedness, + leaf_hash_params, + two_to_one_params, + col_hash_params, + ) + } + + /// This function creates a UniversalParams. It does not check if the paramters are consistent/correct. + pub fn new( + sec_param: usize, + a: (usize, usize), + b: (usize, usize), + r: (usize, usize), + base_len: usize, + n: usize, + m: usize, + a_dims: Vec<(usize, usize, usize)>, + b_dims: Vec<(usize, usize, usize)>, + a_mats: Vec>, + b_mats: Vec>, + check_well_formedness: bool, + leaf_hash_params: LeafParam, + two_to_one_params: TwoToOneParam, + col_hash_params: H::Parameters, + ) -> Self { + let m_ext = if a_dims.is_empty() { + ceil_mul(m, r) + } else { + Self::codeword_len(&a_dims, &b_dims) + }; + let start = a_dims + .iter() + .scan(0, |acc, &(row, _, _)| { + *acc += row; + Some(*acc) + }) + .collect::>(); + let end = b_dims + .iter() + .scan(m_ext, |acc, &(_, col, _)| { + *acc -= col; + Some(*acc) + }) + .collect::>(); + + Self { + sec_param, + alpha: a, + beta: b, + rho_inv: r, + base_len, + n, + m, + m_ext, + a_dims, + b_dims, + start, + end, + a_mats, + b_mats, + check_well_formedness, + leaf_hash_params, + two_to_one_params, + col_hash_params, + } + } + /// mu = rho_inv - 1 - rho_inv * alpha + fn mu(a: (usize, usize), r: (usize, usize)) -> f64 { + let nom = r.0 * (a.1 - a.0) - r.1 * a.1; + let den = r.1 * a.1; + nom as f64 / den as f64 + } + /// nu = beta + alpha * beta + 0.03 + fn nu(a: (usize, usize), b: (usize, usize)) -> f64 { + let c = (3usize, 100usize); + let nom = b.0 * (a.1 + a.0) * c.1 + c.0 * b.1 * a.1; + let den = b.1 * a.1 * c.1; + nom as f64 / den as f64 + } + /// cn_const + fn cn_const(a: (usize, usize), b: (usize, usize)) -> (f64, f64) { + let a = div(a); + let b = div(b); + let arg = 1.28 * b / a; + let nom = ent(b) + a * ent(arg); + let den = -b * arg.log2(); + (nom, den) + } + /// cn + fn cn(n: usize, ct: &Constants) -> usize { + use ark_std::cmp::{max, min}; + let b = ct.b; + let c = ct.c; + min( + max(ceil_mul(n, (32 * b.0, 25 * b.1)), 4 + ceil_mul(n, b)), + ((110f64 / (n as f64) + c.0) / c.1).ceil() as usize, + ) + } + /// dn_const + fn dn_const(a: (usize, usize), b: (usize, usize), r: (usize, usize)) -> (f64, f64) { + let m = Self::mu(a, r); + let n = Self::nu(a, b); + let a = div(a); + let b = div(b); + let r = div(r); + let nm = n / m; + let nom = r * a * ent(b / r) + m * ent(nm); + let den = -a * b * nm.log2(); + (nom, den) + } + /// dn + fn dn(n: usize, ct: &Constants) -> usize { + use ark_std::cmp::min; + let b = ct.b; + let r = ct.r; + let d = ct.d; + min( + ceil_mul(n, (2 * b.0, b.1)) + + ((ceil_mul(n, r) - n + 110) as f64 / F::MODULUS_BIT_SIZE as f64).ceil() as usize, // 2 * beta * n + n * (r - 1 + 110/n) + ((110f64 / (n as f64) + d.0) / d.1).ceil() as usize, + ) + } + fn mat_size( + mut n: usize, + base_len: usize, + ct: &Constants, + ) -> (Vec<(usize, usize, usize)>, Vec<(usize, usize, usize)>) { + let mut a_dims: Vec<(usize, usize, usize)> = Vec::default(); + let a = ct.a; + let r = ct.r; + + while n >= base_len { + let m = ceil_mul(n, a); + let cn = Self::cn(n, ct); + let cn = if cn < m { cn } else { m }; // can't generate more nonzero entries than there are columns + a_dims.push((n, m, cn)); + n = m; + } + + let b_dims = a_dims + .iter() + .map(|&(an, am, _)| { + let n = ceil_mul(am, r); + let m = ceil_mul(an, r) - an - n; + let dn = Self::dn(n, ct); + let dn = if dn < m { dn } else { m }; // can't generate more nonzero entries than there are columns + (n, m, dn) + }) + .collect::>(); + (a_dims, b_dims) + } + + /// This function computes the codeword length + /// Notice that it assumes the input is bigger than base_len (i.e., a_dim is not empty) + pub(crate) fn codeword_len( + a_dims: &[(usize, usize, usize)], + b_dims: &[(usize, usize, usize)], + ) -> usize { + b_dims.iter().map(|(_, col, _)| col).sum::() + // Output v of the recursive encoding + a_dims.iter().map(|(row, _, _)| row).sum::() + // Input x to the recursive encoding + b_dims.last().unwrap().0 // Output z of the last step of recursion + } + + /// Create a matrix with `n` rows and `m` columns and `d` non-zero entries in each row. + /// This function creates a list for entries of each columns and calls the constructor + /// from `SprsMat`. It leverages Fisher–Yates shuffle for choosing `d` indices in each + /// row. + fn make_mat(n: usize, m: usize, d: usize, rng: &mut R) -> SprsMat { + let mut tmp: Vec = (0..m).collect(); + let mut mat: Vec> = vec![vec![]; m]; + for i in 0..n { + // Fisher–Yates shuffle algorithm + let idxs = { + (0..d) + .map(|j| { + let r = rng.next_u64() as usize % (m - j); + tmp.swap(r, m - 1 - j); + tmp[m - 1 - j] + }) + .collect::>() + }; + // Sampling values for each non-zero entry + for j in idxs { + mat[j].push(( + i, + loop { + let r = F::rand(rng); + if r != F::zero() { + break r; + } + }, + )) + } + } + SprsMat::::new_from_columns(n, m, d, &mat) + } + + fn make_all(rng: &mut R, dims: &[(usize, usize, usize)]) -> Vec> { + dims.iter() + .map(|(n, m, d)| Self::make_mat(*n, *m, *d, rng)) + .collect::>() + } +} + +#[inline] +fn div(a: (usize, usize)) -> f64 { + a.0 as f64 / a.1 as f64 +} + +struct Constants { + a: (usize, usize), + b: (usize, usize), + r: (usize, usize), + c: (f64, f64), + d: (f64, f64), +} diff --git a/src/linear_codes/data_structures.rs b/src/linear_codes/data_structures.rs new file mode 100644 index 00000000..c176be71 --- /dev/null +++ b/src/linear_codes/data_structures.rs @@ -0,0 +1,155 @@ +use super::utils::SprsMat; +use crate::{ + PCCommitment, PCPreparedCommitment, PCPreparedVerifierKey, PCRandomness, PCVerifierKey, +}; +use ark_crypto_primitives::{ + crh::CRHScheme, + merkle_tree::{Config, LeafParam, Path, TwoToOneParam}, +}; +use ark_ff::PrimeField; +use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; +use ark_std::rand::RngCore; +use ark_std::vec::Vec; + +#[derive(Derivative, CanonicalSerialize, CanonicalDeserialize)] +#[derivative(Clone(bound = ""), Debug(bound = ""))] +/// The public parameters for Brakedown PCS. +pub struct BrakedownPCParams { + /// The security parameter + pub(crate) sec_param: usize, + /// alpha in the paper + pub(crate) alpha: (usize, usize), + /// beta in the paper + pub(crate) beta: (usize, usize), + /// The inverse of the code rate. + pub(crate) rho_inv: (usize, usize), + /// Threshold of the base case to encode with RS + pub(crate) base_len: usize, + /// Length of each column in the matrix that represents the polynomials + pub(crate) n: usize, + /// Length of each row in the matrix that represents the polynomials + pub(crate) m: usize, + /// Length of each row in the matrix that represents the polynomials, **after encoding** + pub(crate) m_ext: usize, + /// Constarints on A matrices. `a_dims[i]` is `(n, m, c)`, where `n` is + /// the number of rows, `m` is the number of columns, `c` is the number of + /// non-zero elements in each row, for the matrix A in the `i`th step of + /// the encoding. + pub(crate) a_dims: Vec<(usize, usize, usize)>, + /// Same as `a_dims`, but for B matrices. + pub(crate) b_dims: Vec<(usize, usize, usize)>, + /// By having `a_dims` and `b_dims`, we compute a vector of indices that + /// specfies where is the beginning of the sub-chunk that we need to + /// encode during the recursive encoding. Notice that we do not recurse + /// in this implementation, instead we do it iteratively. + pub(crate) start: Vec, + /// Same as `start`, but stores the end index of those chunks. + pub(crate) end: Vec, + /// A vector of all A matrices we need for encoding. + pub(crate) a_mats: Vec>, + /// A vector of all B matrices we need for encoding. + pub(crate) b_mats: Vec>, + /// This is a flag which determines if the random linear combination is done. + pub(crate) check_well_formedness: bool, + /// Parameters for hash function of Merkle tree leaves + #[derivative(Debug = "ignore")] + pub(crate) leaf_hash_params: LeafParam, + /// Parameters for hash function of Merke tree combining two nodes into one + #[derivative(Debug = "ignore")] + pub(crate) two_to_one_params: TwoToOneParam, + // Parameters for obtaining leaf digest from leaf value. + #[derivative(Debug = "ignore")] + pub(crate) col_hash_params: H::Parameters, +} + +pub(crate) type LinCodePCPreparedVerifierKey = (); + +impl PCPreparedVerifierKey for LinCodePCPreparedVerifierKey { + fn prepare(_vk: &Unprepared) -> Self {} +} +#[derive(Derivative, CanonicalSerialize, CanonicalDeserialize)] +#[derivative(Default(bound = ""), Clone(bound = ""), Debug(bound = ""))] +pub(crate) struct Metadata { + pub(crate) n_rows: usize, + pub(crate) n_cols: usize, + pub(crate) n_ext_cols: usize, +} + +/// The commitment to a polynomial is a root of the merkle tree, +/// where each node is a hash of the column of the encoded coefficient matrix U. +#[derive(Derivative, CanonicalSerialize, CanonicalDeserialize)] +#[derivative(Default(bound = ""), Clone(bound = ""), Debug(bound = ""))] +pub struct LinCodePCCommitment { + // number of rows resp. columns of the square matrix containing the coefficients of the polynomial + pub(crate) metadata: Metadata, + pub(crate) root: C::InnerDigest, +} + +impl PCCommitment for LinCodePCCommitment { + fn empty() -> Self { + LinCodePCCommitment::default() + } + + fn has_degree_bound(&self) -> bool { + false + } +} + +pub(crate) type LinCodePCPreparedCommitment = LinCodePCCommitment; + +impl PCPreparedCommitment + for LinCodePCPreparedCommitment +{ + fn prepare(_cm: &Unprepared) -> Self { + LinCodePCPreparedCommitment::default() + } +} + +pub(crate) type LinCodePCRandomness = (); + +impl PCRandomness for LinCodePCRandomness { + fn empty() -> Self { + unimplemented!() + } + + fn rand( + _num_queries: usize, + _has_degree_bound: bool, + _num_vars: Option, + _rng: &mut R, + ) -> Self { + unimplemented!() + } +} + +/// Proof of an individual linear code well-formedness check or opening +#[derive(Derivative, CanonicalSerialize, CanonicalDeserialize)] +#[derivative(Default(bound = ""), Clone(bound = ""), Debug(bound = ""))] +pub(crate) struct LinCodePCProofSingle +where + F: PrimeField, + C: Config, +{ + /// For each of the indices in q, `paths` contains the path from the root of the merkle tree to the leaf + pub(crate) paths: Vec>, + + /// v, s.t. E(v) = w + pub(crate) v: Vec, + + pub(crate) columns: Vec>, +} + +/// The Proof type for linear code PCS, which amounts to an array of individual proofs +#[derive(Derivative, CanonicalSerialize, CanonicalDeserialize)] +#[derivative(Default(bound = ""), Clone(bound = ""), Debug(bound = ""))] +pub struct LinCodePCProof +where + F: PrimeField, + C: Config, +{ + pub(crate) opening: LinCodePCProofSingle, + pub(crate) well_formedness: Option>, +} + +// Multiple poly at one point +pub(crate) type LPCPArray = Vec>; diff --git a/src/linear_codes/mod.rs b/src/linear_codes/mod.rs new file mode 100644 index 00000000..fa160620 --- /dev/null +++ b/src/linear_codes/mod.rs @@ -0,0 +1,618 @@ +use crate::utils::{inner_product, IOPTranscript, Matrix}; +use crate::{ + Error, LabeledCommitment, LabeledPolynomial, PCCommitterKey, PCUniversalParams, PCVerifierKey, + PolynomialCommitment, +}; + +use ark_crypto_primitives::crh::{CRHScheme, TwoToOneCRHScheme}; +use ark_crypto_primitives::merkle_tree::MerkleTree; +use ark_crypto_primitives::{merkle_tree::Config, sponge::CryptographicSponge}; +use ark_ff::PrimeField; +use ark_poly::Polynomial; +use ark_std::borrow::Borrow; +use ark_std::marker::PhantomData; +use ark_std::rand::RngCore; +use ark_std::string::ToString; +use ark_std::vec::Vec; + +mod utils; + +mod multilinear_brakedown; + +pub use multilinear_brakedown::MultilinearBrakedown; + +mod brakedown; +mod data_structures; +use data_structures::*; + +pub use data_structures::LinCodePCProof; +#[cfg(any(feature = "benches", test))] +pub use utils::{FieldToBytesColHasher, LeafIdentityHasher}; + +use utils::{calculate_t, get_indices_from_transcript, hash_column}; + +const FIELD_SIZE_ERROR: &str = "This field is not suitable for the proposed parameters"; + +/// For linear code PC schemes, the universal paramters, committer key +/// and verifier key are all the same. This trait abstracts the common +/// information contained in these. +pub trait LinCodeParametersInfo +where + C: Config, + H: CRHScheme, +{ + /// Get the security parameter. + fn sec_param(&self) -> usize; + + /// Get the distance of the code. + fn distance(&self) -> (usize, usize); + + /// See whether there should be a well-formedness check. + fn check_well_formedness(&self) -> bool; + + /// Compute the dimensions of the coefficient matrix. + fn compute_dimensions(&self, n: usize) -> (usize, usize); + + /// Get the hash parameters for obtaining leaf digest from leaf value. + fn leaf_hash_params(&self) -> &<::LeafHash as CRHScheme>::Parameters; + + /// Get the parameters for hashing nodes in the merkle tree. + fn two_to_one_params(&self) -> &<::TwoToOneHash as TwoToOneCRHScheme>::Parameters; + + /// Get the parameters for hashing a vector of values, + /// representing a column of the coefficient matrix, into a leaf value. + fn col_hash_params(&self) -> &H::Parameters; +} + +/// A trait for linear codes. +pub trait LinearEncode +where + F: PrimeField, + C: Config, + H: CRHScheme, + P: Polynomial, +{ + /// For schemes like Brakedown and Ligero, PCCommiiterKey and + /// PCVerifierKey and PCUniversalParams are all the same. + type LinCodePCParams: PCUniversalParams + + PCCommitterKey + + PCVerifierKey + + LinCodeParametersInfo; + + /// Does a default setup for the PCS. + fn setup( + max_degree: usize, + num_vars: Option, + rng: &mut R, + leaf_hash_params: <::LeafHash as CRHScheme>::Parameters, + two_to_one_params: <::TwoToOneHash as TwoToOneCRHScheme>::Parameters, + col_hash_params: H::Parameters, + ) -> Self::LinCodePCParams; + + /// Encode a message, which is interpreted as a vector of coefficients + /// of a polynomial of degree m - 1. + fn encode(msg: &[F], param: &Self::LinCodePCParams) -> Result, Error>; + + /// Represent the polynomial as either coefficients, + /// in the univariate case, or evaluations over + /// the Boolean hypercube, in the multilinear case. + fn poly_to_vec(polynomial: &P) -> Vec; + + /// Represent the query point as a vector of Field elements. + fn point_to_vec(point: P::Point) -> Vec; + + /// Arrange the coefficients of the polynomial into a matrix, + /// and apply encoding to each row. + /// Returns the tuple (original_matrix, encoded_matrix). + fn compute_matrices(polynomial: &P, param: &Self::LinCodePCParams) -> (Matrix, Matrix) { + let mut coeffs = Self::poly_to_vec(polynomial); + + // 1. Computing the matrix dimensions. + let (n_rows, n_cols) = param.compute_dimensions(coeffs.len()); + + // padding the coefficient vector with zeroes + coeffs.resize(n_rows * n_cols, F::zero()); + + let mat = Matrix::new_from_flat(n_rows, n_cols, &coeffs); + + // 2. Apply encoding row-wise + let ext_mat = Matrix::new_from_rows( + mat.rows() + .iter() + .map(|r| Self::encode(r, param).unwrap()) // Since we just computed the dimension, the error does not happen + .collect(), + ); + + (mat, ext_mat) + } + + /// Tensor the query point z in the following sense: + /// For a polynomial p(X) represented by a matrix M + /// with n rows and m columns such that M_{i,j} = p_{i + n*j}, + /// we define the tensoring of `z`: (a, b) = tensor(z, n, m) such that: + /// p(z) = b^T.M.a + /// returns the evaluation of p at z. + fn tensor(z: &P::Point, n: usize, m: usize) -> (Vec, Vec); +} + +/// Any linear-code-based commitment scheme. +pub struct LinearCodePCS +where + F: PrimeField, + C: Config, + S: CryptographicSponge, + P: Polynomial, + H: CRHScheme, + L: LinearEncode, +{ + _phantom: PhantomData<(L, F, P, S, C, H)>, +} + +impl PolynomialCommitment for LinearCodePCS +where + L: LinearEncode, + F: PrimeField, + P: Polynomial, + S: CryptographicSponge, + C: Config + 'static, + Vec: Borrow<::Input>, + H::Output: Into, + C::Leaf: Sized + Clone + Default, + H: CRHScheme, +{ + type UniversalParams = L::LinCodePCParams; + + type CommitterKey = L::LinCodePCParams; + + type VerifierKey = L::LinCodePCParams; + + type PreparedVerifierKey = LinCodePCPreparedVerifierKey; + + type Commitment = LinCodePCCommitment; + + type PreparedCommitment = LinCodePCPreparedCommitment; + + type Randomness = LinCodePCRandomness; + + type Proof = LPCPArray; + + type BatchProof = Vec; + + type Error = Error; + + /// This is only a default setup with reasonable parameters. + /// To create your own public parameters (from which vk/ck can be derived by `trim`), + /// see the documentation for `BrakedownPCUniversalParams`. + fn setup( + max_degree: usize, + num_vars: Option, + rng: &mut R, + ) -> Result { + let leaf_hash_params = ::setup(rng).unwrap(); + let two_to_one_params = ::setup(rng) + .unwrap() + .clone(); + let col_hash_params = ::setup(rng).unwrap(); + let pp = L::setup::( + max_degree, + num_vars, + rng, + leaf_hash_params, + two_to_one_params, + col_hash_params, + ); + let real_max_degree = ::max_degree(&pp); + if max_degree > real_max_degree || real_max_degree == 0 { + return Err(Error::InvalidParameters(FIELD_SIZE_ERROR.to_string())); + } + Ok(pp) + } + + fn trim( + pp: &Self::UniversalParams, + _supported_degree: usize, + _supported_hiding_bound: usize, + _enforced_degree_bounds: Option<&[usize]>, + ) -> Result<(Self::CommitterKey, Self::VerifierKey), Self::Error> { + if ::max_degree(pp) == 0 { + return Err(Error::InvalidParameters(FIELD_SIZE_ERROR.to_string())); + } + Ok((pp.clone(), pp.clone())) + } + + fn commit<'a>( + ck: &Self::CommitterKey, + polynomials: impl IntoIterator>, + _rng: Option<&mut dyn RngCore>, + ) -> Result< + ( + Vec>, + Vec, + ), + Self::Error, + > + where + P: 'a, + { + let mut commitments = Vec::new(); + + for labeled_polynomial in polynomials.into_iter() { + let polynomial = labeled_polynomial.polynomial(); + + // 1. Arrange the coefficients of the polynomial into a matrix, + // and apply encoding to get `ext_mat`. + let (mat, ext_mat) = L::compute_matrices(polynomial, ck); + + // 2. Create the Merkle tree from the hashes of each column. + let col_tree = create_merkle_tree::( + &ext_mat, + ck.leaf_hash_params(), + ck.two_to_one_params(), + ck.col_hash_params(), + )?; + + // 3. Obtain the MT root and add it to the transcript. + let root = col_tree.root(); + + let mut transcript: IOPTranscript = IOPTranscript::new(b"transcript"); + + transcript + .append_serializable_element(b"root", &root) + .map_err(|_| Error::TranscriptError)?; + + let n_rows = mat.n; + let n_cols = mat.m; + let n_ext_cols = ext_mat.m; + + // 4. The commitment is just the root, but since each commitment could be to a differently-sized polynomial, we also add some metadata. + let commitment = LinCodePCCommitment { + metadata: Metadata { + n_rows, + n_cols, + n_ext_cols, + }, + root, + }; + + commitments.push(LabeledCommitment::new( + labeled_polynomial.label().clone(), + commitment, + None, + )); + } + let com_len = &commitments.len(); + Ok((commitments, vec![(); *com_len])) + } + + fn open<'a>( + ck: &Self::CommitterKey, + labeled_polynomials: impl IntoIterator>, + commitments: impl IntoIterator>, + point: &'a P::Point, + _challenge_generator: &mut crate::challenge::ChallengeGenerator, + _rands: impl IntoIterator, + _rng: Option<&mut dyn RngCore>, + ) -> Result + where + P: 'a, + Self::Randomness: 'a, + Self::Commitment: 'a, + { + let mut proof_array = LPCPArray::default(); + let labeled_commitments: Vec<&'a LabeledCommitment> = + commitments.into_iter().collect(); + let labeled_polynomials: Vec<&'a LabeledPolynomial> = + labeled_polynomials.into_iter().collect(); + + if labeled_commitments.len() != labeled_polynomials.len() { + return Err(Error::IncorrectInputLength(format!( + "Mismatched lengths: {} commitments, {} polynomials", + labeled_commitments.len(), + labeled_polynomials.len() + ))); + } + + for i in 0..labeled_polynomials.len() { + let polynomial = labeled_polynomials[i].polynomial(); + let commitment = labeled_commitments[i].commitment(); + let n_rows = commitment.metadata.n_rows; + let n_cols = commitment.metadata.n_cols; + let root = &commitment.root; + + // 1. Arrange the coefficients of the polynomial into a matrix, + // and apply encoding to get `ext_mat`. + let (mat, ext_mat) = L::compute_matrices(polynomial, ck); + + // 2. Create the Merkle tree from the hashes of each column. + let col_tree = create_merkle_tree::( + &ext_mat, + ck.leaf_hash_params(), + ck.two_to_one_params(), + ck.col_hash_params(), + )?; + + // 3. Generate vector `b` to left-multiply the matrix. + let (_, b) = L::tensor(point, n_cols, n_rows); + + let mut transcript = IOPTranscript::new(b"transcript"); + transcript + .append_serializable_element(b"root", root) + .map_err(|_| Error::TranscriptError)?; + + // If we are checking well-formedness, we need to compute the well-formedness proof (which is just r.M) and append it to the transcript. + let well_formedness = if ck.check_well_formedness() { + let mut r = Vec::new(); + for _ in 0..n_rows { + r.push( + transcript + .get_and_append_challenge(b"r") + .map_err(|_| Error::TranscriptError)?, + ); + } + let v = mat.row_mul(&r); + + transcript + .append_serializable_element(b"v", &v) + .map_err(|_| Error::TranscriptError)?; + Some(v) + } else { + None + }; + + let point_vec = L::point_to_vec(point.clone()); + for element in point_vec.iter() { + transcript + .append_serializable_element(b"point", element) + .map_err(|_| Error::TranscriptError)?; + } + + proof_array.push(LinCodePCProof { + // Compute the opening proof and append b.M to the transcript. + opening: generate_proof( + ck.sec_param(), + ck.distance(), + &b, + &mat, + &ext_mat, + &col_tree, + &mut transcript, + )?, + well_formedness, + }); + } + + Ok(proof_array) + } + + fn check<'a>( + vk: &Self::VerifierKey, + commitments: impl IntoIterator>, + point: &'a P::Point, + values: impl IntoIterator, + proof_array: &Self::Proof, + _challenge_generator: &mut crate::challenge::ChallengeGenerator, + _rng: Option<&mut dyn RngCore>, + ) -> Result + where + Self::Commitment: 'a, + { + let labeled_commitments: Vec<&'a LabeledCommitment> = + commitments.into_iter().collect(); + let values: Vec = values.into_iter().collect(); + + if labeled_commitments.len() != proof_array.len() + || labeled_commitments.len() != values.len() + { + return Err(Error::IncorrectInputLength( + format!( + "Mismatched lengths: {} proofs were provided for {} commitments with {} claimed values",labeled_commitments.len(), proof_array.len(), values.len() + ) + )); + } + let leaf_hash_params: &<::LeafHash as CRHScheme>::Parameters = + vk.leaf_hash_params(); + let two_to_one_params: &<::TwoToOneHash as TwoToOneCRHScheme>::Parameters = + vk.two_to_one_params(); + + for (i, labeled_commitment) in labeled_commitments.iter().enumerate() { + let commitment = labeled_commitment.commitment(); + let n_rows = commitment.metadata.n_rows; + let n_cols = commitment.metadata.n_cols; + let n_ext_cols = commitment.metadata.n_ext_cols; + let root = &commitment.root; + let t = calculate_t::(vk.sec_param(), vk.distance(), n_ext_cols)?; + + let mut transcript = IOPTranscript::new(b"transcript"); + transcript + .append_serializable_element(b"root", &commitment.root) + .map_err(|_| Error::TranscriptError)?; + + let out = if vk.check_well_formedness() { + if proof_array[i].well_formedness.is_none() { + return Err(Error::InvalidCommitment); + } + let tmp = &proof_array[i].well_formedness.as_ref(); + let well_formedness = tmp.unwrap(); + let mut r = Vec::with_capacity(n_rows); + for _ in 0..n_rows { + r.push( + transcript + .get_and_append_challenge(b"r") + .map_err(|_| Error::TranscriptError)?, + ); + } + // Upon sending `v` to the Verifier, add it to the sponge. The claim is that v = r.M. + transcript + .append_serializable_element(b"v", well_formedness) + .map_err(|_| Error::TranscriptError)?; + + (Some(well_formedness), Some(r)) + } else { + (None, None) + }; + + // 1. Seed the transcript with the point and the recieved vector + // TODO Consider removing the evaluation point from the transcript. + let point_vec = L::point_to_vec(point.clone()); + for element in point_vec.iter() { + transcript + .append_serializable_element(b"point", element) + .map_err(|_| Error::TranscriptError)?; + } + transcript + .append_serializable_element(b"v", &proof_array[i].opening.v) + .map_err(|_| Error::TranscriptError)?; + + // 2. Ask random oracle for the `t` indices where the checks happen. + let indices = get_indices_from_transcript::(n_ext_cols, t, &mut transcript)?; + + // 3. Hash the received columns into leaf hashes. + let col_hashes: Vec = proof_array[i] + .opening + .columns + .iter() + .map(|c| hash_column::(c.clone(), vk.col_hash_params()).unwrap()) + .collect(); + + // 4. Verify the paths for each of the leaf hashes - this is only run once, + // even if we have a well-formedness check (i.e., we save sending and checking the columns). + // See "Concrete optimizations to the commitment scheme", p.12 of [Brakedown](https://eprint.iacr.org/2021/1043.pdf). + for (j, (leaf, q_j)) in col_hashes.iter().zip(indices.iter()).enumerate() { + let path = &proof_array[i].opening.paths[j]; + if path.leaf_index != *q_j { + return Err(Error::InvalidCommitment); + } + + path.verify(leaf_hash_params, two_to_one_params, root, leaf.clone()) + .map_err(|_| Error::InvalidCommitment)?; + } + + // Helper closure: checks if a.b = c. + let check_inner_product = |a, b, c| -> Result<(), Error> { + if inner_product(a, b) != c { + return Err(Error::InvalidCommitment); + } + + Ok(()) + }; + + // 5. Compute the encoding w = E(v). + let w = L::encode(&proof_array[i].opening.v, vk)?; + + // 6. Compute `a`, `b` to right- and left- multiply with the matrix `M`. + let (a, b) = L::tensor(point, n_cols, n_rows); + + // 7. Probabilistic checks that whatever the prover sent, + // matches with what the verifier computed for himself. + // Note: we sacrifice some code repetition in order not to repeat execution. + if let (Some(well_formedness), Some(r)) = out { + let w_well_formedness = L::encode(well_formedness, vk)?; + for (transcript_index, matrix_index) in indices.iter().enumerate() { + check_inner_product( + &r, + &proof_array[i].opening.columns[transcript_index], + w_well_formedness[*matrix_index], + )?; + check_inner_product( + &b, + &proof_array[i].opening.columns[transcript_index], + w[*matrix_index], + )?; + } + } else { + for (transcript_index, matrix_index) in indices.iter().enumerate() { + check_inner_product( + &b, + &proof_array[i].opening.columns[transcript_index], + w[*matrix_index], + )?; + } + } + + if inner_product(&proof_array[i].opening.v, &a) != values[i] { + eprintln!("Function check: claimed value in position {i} does not match the evaluation of the committed polynomial in the same position"); + return Ok(false); + } + } + + Ok(true) + } +} + +// TODO maybe this can go to utils +fn create_merkle_tree( + ext_mat: &Matrix, + leaf_hash_params: &<::LeafHash as CRHScheme>::Parameters, + two_to_one_params: &<::TwoToOneHash as TwoToOneCRHScheme>::Parameters, + col_hash_params: &H::Parameters, +) -> Result, Error> +where + F: PrimeField, + C: Config, + H: CRHScheme, + Vec: Borrow<::Input>, + H::Output: Into, + C::Leaf: Default + Clone, +{ + let mut col_hashes: Vec = Vec::new(); + let ext_mat_cols = ext_mat.cols(); + + for col in ext_mat_cols.into_iter() { + let col_digest = hash_column::(col, col_hash_params)?; + col_hashes.push(col_digest); + } + + // pad the column hashes with zeroes + let next_pow_of_two = col_hashes.len().next_power_of_two(); + col_hashes.resize(next_pow_of_two, ::default()); + + MerkleTree::::new(leaf_hash_params, two_to_one_params, col_hashes) + .map_err(|_| Error::HashingError) +} + +fn generate_proof( + sec_param: usize, + distance: (usize, usize), + b: &[F], + mat: &Matrix, + ext_mat: &Matrix, + col_tree: &MerkleTree, + transcript: &mut IOPTranscript, +) -> Result, Error> +where + F: PrimeField, + C: Config, +{ + let t = calculate_t::(sec_param, distance, ext_mat.m)?; + + // 1. left-multiply the matrix by `b`. + let v = mat.row_mul(b); + + transcript + .append_serializable_element(b"v", &v) + .map_err(|_| Error::TranscriptError)?; + + // 2. Generate t column indices to test the linear combination on. + let indices = get_indices_from_transcript(ext_mat.m, t, transcript)?; + + // 3. Compute Merkle tree paths for the requested columns. + let mut queried_columns = Vec::with_capacity(t); + let mut paths = Vec::with_capacity(t); + + let ext_mat_cols = ext_mat.cols(); + + for i in indices { + queried_columns.push(ext_mat_cols[i].clone()); + paths.push( + col_tree + .generate_proof(i) + .map_err(|_| Error::TranscriptError)?, + ); + } + + Ok(LinCodePCProofSingle { + paths, + v, + columns: queried_columns, + }) +} diff --git a/src/linear_codes/multilinear_brakedown/mod.rs b/src/linear_codes/multilinear_brakedown/mod.rs new file mode 100644 index 00000000..7d195ae5 --- /dev/null +++ b/src/linear_codes/multilinear_brakedown/mod.rs @@ -0,0 +1,127 @@ +use crate::Error; + +use super::utils::tensor_vec; +use super::{BrakedownPCParams, LinearEncode}; +use ark_crypto_primitives::crh::{CRHScheme, TwoToOneCRHScheme}; +use ark_crypto_primitives::{merkle_tree::Config, sponge::CryptographicSponge}; +use ark_ff::{Field, PrimeField}; +use ark_poly::{MultilinearExtension, Polynomial}; +use ark_std::log2; +use ark_std::marker::PhantomData; +use ark_std::rand::RngCore; +use ark_std::vec::Vec; + +mod tests; + +/// The univariate Brakedown polynomial commitment scheme based on [[Brakedown]][bd]. +/// The scheme defaults to the naive batching strategy. +/// +/// Note: The scheme currently does not support hiding. +/// +/// [bd]: https://eprint.iacr.org/2021/1043.pdf +pub struct MultilinearBrakedown< + F: PrimeField, + C: Config, + S: CryptographicSponge, + P: MultilinearExtension, + H: CRHScheme, +> { + _phantom: PhantomData<(F, C, S, P, H)>, +} + +impl LinearEncode for MultilinearBrakedown +where + F: PrimeField, + C: Config, + S: CryptographicSponge, + P: MultilinearExtension, +

>::Point: Into>, + H: CRHScheme, +{ + type LinCodePCParams = BrakedownPCParams; + + fn setup( + _max_degree: usize, + num_vars: Option, + rng: &mut R, + leaf_hash_params: <::LeafHash as CRHScheme>::Parameters, + two_to_one_params: <::TwoToOneHash as TwoToOneCRHScheme>::Parameters, + col_hash_params: H::Parameters, + ) -> Self::LinCodePCParams { + Self::LinCodePCParams::default( + rng, + 1 << num_vars.unwrap(), + true, + leaf_hash_params, + two_to_one_params, + col_hash_params, + ) + } + + fn encode(msg: &[F], pp: &Self::LinCodePCParams) -> Result, Error> { + if msg.len() != pp.m { + return Err(Error::EncodingError); + } + let cw_len = pp.m_ext; + let mut cw = Vec::with_capacity(cw_len); + cw.extend_from_slice(msg); + + // Multiply by matrices A + for (i, &s) in pp.start.iter().enumerate() { + let mut src = pp.a_mats[i].row_mul(&cw[s - pp.a_dims[i].0..s]); + cw.append(&mut src); + } + + // later we don't necessarily mutate in order, so we need the full vec now. + cw.resize(cw_len, F::zero()); + // RS encode the last one + let rss = *pp.start.last().unwrap_or(&0); + let rsie = rss + pp.a_dims.last().unwrap_or(&(0, pp.m, 0)).1; + let rsoe = *pp.end.last().unwrap_or(&cw_len); + naive_reed_solomon(&mut cw, rss, rsie, rsoe); + + // Come back + for (i, (&s, &e)) in pp.start.iter().zip(&pp.end).enumerate() { + let src = &pp.b_mats[i].row_mul(&cw[s..e]); + cw[e..e + pp.b_dims[i].1].copy_from_slice(src); + } + Ok(cw.to_vec()) + } + + fn poly_to_vec(polynomial: &P) -> Vec { + polynomial.to_evaluations() + } + + fn point_to_vec(point:

>::Point) -> Vec { + point + } + + /// For a multilinear polynomial in n+m variables it returns a tuple for k={n,m}: + /// ((1-z_1)*(1-z_2)*...*(1_z_k), z_1*(1-z_2)*...*(1-z_k), ..., z_1*z_2*...*z_k) + fn tensor( + point: &

>::Point, + left_len: usize, + _right_len: usize, + ) -> (Vec, Vec) { + let point: Vec = Self::point_to_vec(point.clone()); + + let split = log2(left_len) as usize; + let left = &point[..split]; + let right = &point[split..]; + (tensor_vec(left), tensor_vec(right)) + } +} + +// This RS encoding is on points 1, ..., oe - s without relying on FFTs +fn naive_reed_solomon(cw: &mut [F], s: usize, ie: usize, oe: usize) { + let mut res = vec![F::zero(); oe - s]; + let mut x = F::one(); + for r in res.iter_mut() { + for j in (s..ie).rev() { + *r *= x; + *r += cw[j]; + } + x += F::one(); + } + cw[s..oe].copy_from_slice(&res); +} diff --git a/src/linear_codes/multilinear_brakedown/tests.rs b/src/linear_codes/multilinear_brakedown/tests.rs new file mode 100644 index 00000000..6e94ad2d --- /dev/null +++ b/src/linear_codes/multilinear_brakedown/tests.rs @@ -0,0 +1,294 @@ +#[cfg(test)] +mod tests { + + use crate::linear_codes::LinearCodePCS; + use crate::utils::test_sponge; + use crate::{ + challenge::ChallengeGenerator, + linear_codes::{utils::*, BrakedownPCParams, MultilinearBrakedown, PolynomialCommitment}, + LabeledPolynomial, + }; + use ark_bls12_377::Fq; + use ark_bls12_377::Fr; + use ark_bls12_381::Fr as Fr381; + use ark_crypto_primitives::{ + crh::{sha256::Sha256, CRHScheme, TwoToOneCRHScheme}, + merkle_tree::{ByteDigestConverter, Config}, + sponge::poseidon::PoseidonSponge, + }; + use ark_ff::{Field, PrimeField}; + use ark_poly::evaluations::multivariate::{MultilinearExtension, SparseMultilinearExtension}; + use ark_std::test_rng; + use blake2::Blake2s256; + use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; + + type LeafH = LeafIdentityHasher; + type CompressH = Sha256; + type ColHasher = FieldToBytesColHasher; + + struct MerkleTreeParams; + + impl Config for MerkleTreeParams { + type Leaf = Vec; + + type LeafDigest = ::Output; + type LeafInnerDigestConverter = ByteDigestConverter; + type InnerDigest = ::Output; + + type LeafHash = LeafH; + type TwoToOneHash = CompressH; + } + + type MTConfig = MerkleTreeParams; + type Sponge = PoseidonSponge; + + type BrakedownPCS = LinearCodePCS< + MultilinearBrakedown< + F, + MTConfig, + Sponge, + SparseMultilinearExtension, + ColHasher, + >, + F, + SparseMultilinearExtension, + Sponge, + MTConfig, + ColHasher, + >; + + fn rand_poly( + _: usize, + num_vars: Option, + rng: &mut ChaCha20Rng, + ) -> SparseMultilinearExtension { + match num_vars { + Some(n) => SparseMultilinearExtension::rand(n, rng), + None => unimplemented!(), // should not happen in ML case! + } + } + + fn constant_poly( + _: usize, + num_vars: Option, + rng: &mut ChaCha20Rng, + ) -> SparseMultilinearExtension { + match num_vars { + Some(n) => { + let points = vec![(1, Fr::rand(rng))]; + SparseMultilinearExtension::from_evaluations(n, &points) + } + None => unimplemented!(), // should not happen in ML case! + } + } + + #[test] + fn test_construction() { + let mut rng = &mut test_rng(); + let num_vars = 11; + // just to make sure we have the right degree given the FFT domain for our field + let leaf_hash_params = ::setup(&mut rng).unwrap(); + let two_to_one_params = ::setup(&mut rng) + .unwrap() + .clone(); + let col_hash_params = as CRHScheme>::setup(&mut rng).unwrap(); + let check_well_formedness = true; + + let pp: BrakedownPCParams> = + BrakedownPCParams::default( + rng, + 1 << num_vars, + check_well_formedness, + leaf_hash_params, + two_to_one_params, + col_hash_params, + ); + + let (ck, vk) = BrakedownPCS::::trim(&pp, 0, 0, None).unwrap(); + + let rand_chacha = &mut ChaCha20Rng::from_rng(test_rng()).unwrap(); + let labeled_poly = LabeledPolynomial::new( + "test".to_string(), + rand_poly(1, Some(num_vars), rand_chacha), + Some(num_vars), + Some(num_vars), + ); + + let mut test_sponge = test_sponge::(); + let (c, rands) = BrakedownPCS::::commit(&ck, &[labeled_poly.clone()], None).unwrap(); + + let point = rand_point(Some(num_vars), rand_chacha); + + let value = labeled_poly.evaluate(&point); + + let mut challenge_generator: ChallengeGenerator> = + ChallengeGenerator::new_univariate(&mut test_sponge); + + let proof = BrakedownPCS::::open( + &ck, + &[labeled_poly], + &c, + &point, + &mut (challenge_generator.clone()), + &rands, + None, + ) + .unwrap(); + assert!(BrakedownPCS::::check( + &vk, + &c, + &point, + [value], + &proof, + &mut challenge_generator, + None + ) + .unwrap()); + } + + #[test] + fn test_calculate_t_with_good_parameters() { + assert!(calculate_t::(128, (3, 4), 2_usize.pow(32)).unwrap() < 200); + assert!(calculate_t::(256, (3, 4), 2_usize.pow(32)).unwrap() < 400); + } + + #[test] + fn test_calculate_t_with_bad_parameters() { + calculate_t::( + (Fq::MODULUS_BIT_SIZE - 60) as usize, + (3, 4), + 2_usize.pow(60), + ) + .unwrap_err(); + calculate_t::(400, (3, 4), 2_usize.pow(32)).unwrap_err(); + } + + fn rand_point(num_vars: Option, rng: &mut ChaCha20Rng) -> Vec { + match num_vars { + Some(n) => (0..n).map(|_| F::rand(rng)).collect(), + None => unimplemented!(), // should not happen! + } + } + + #[test] + fn single_poly_test() { + use crate::tests::*; + single_poly_test::<_, _, BrakedownPCS, _>( + Some(5), + rand_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-377"); + single_poly_test::<_, _, BrakedownPCS, _>( + Some(10), + rand_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-381"); + } + + #[test] + fn constant_poly_test() { + use crate::tests::*; + single_poly_test::<_, _, BrakedownPCS, _>( + Some(10), + constant_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-377"); + single_poly_test::<_, _, BrakedownPCS, _>( + Some(5), + constant_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-381"); + } + + #[test] + fn full_end_to_end_test() { + use crate::tests::*; + full_end_to_end_test::<_, _, BrakedownPCS, _>( + Some(8), + rand_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-377"); + println!("Finished bls12-377"); + full_end_to_end_test::<_, _, BrakedownPCS, _>( + Some(9), + rand_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-381"); + println!("Finished bls12-381"); + } + + #[test] + fn single_equation_test() { + use crate::tests::*; + single_equation_test::<_, _, BrakedownPCS, _>( + Some(10), + rand_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-377"); + println!("Finished bls12-377"); + single_equation_test::<_, _, BrakedownPCS, _>( + Some(5), + rand_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-381"); + println!("Finished bls12-381"); + } + + #[test] + fn two_equation_test() { + use crate::tests::*; + two_equation_test::<_, _, BrakedownPCS, _>( + Some(5), + rand_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-377"); + println!("Finished bls12-377"); + two_equation_test::<_, _, BrakedownPCS, _>( + Some(10), + rand_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-381"); + println!("Finished bls12-381"); + } + + #[test] + fn full_end_to_end_equation_test() { + use crate::tests::*; + full_end_to_end_equation_test::<_, _, BrakedownPCS, _>( + Some(5), + rand_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-377"); + println!("Finished bls12-377"); + full_end_to_end_equation_test::<_, _, BrakedownPCS, _>( + Some(8), + rand_poly::, + rand_point::, + poseidon_sponge_for_test, + ) + .expect("test failed for bls12-381"); + println!("Finished bls12-381"); + } +} diff --git a/src/linear_codes/utils.rs b/src/linear_codes/utils.rs new file mode 100644 index 00000000..e53df9a9 --- /dev/null +++ b/src/linear_codes/utils.rs @@ -0,0 +1,308 @@ +use core::borrow::Borrow; + +use crate::utils::IOPTranscript; +use crate::{utils::ceil_div, Error}; + +use ark_crypto_primitives::{crh::CRHScheme, merkle_tree::Config}; +use ark_ff::{Field, PrimeField}; + +use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; +use ark_std::string::ToString; +use ark_std::vec::Vec; + +#[cfg(not(feature = "std"))] +use num_traits::Float; + +#[cfg(any(feature = "benches", test))] +use { + crate::to_bytes, + ark_std::{marker::PhantomData, rand::RngCore}, + digest::Digest, +}; + +/// This is CSC format https://shorturl.at/fpL17 +#[derive(Derivative, CanonicalSerialize, CanonicalDeserialize)] +#[derivative(Clone(bound = ""), Debug(bound = ""))] +pub struct SprsMat { + /// Number of rows. + pub(crate) n: usize, + /// Number of columns. + pub(crate) m: usize, + /// Number of non-zero entries in each row. + pub(crate) d: usize, + /// Numbers of non-zero elements in each columns. + ind_ptr: Vec, + /// The indices in each columns where exists a non-zero element. + col_ind: Vec, + // The values of non-zero entries. + val: Vec, +} + +impl SprsMat { + /// Calulates v.M + pub(crate) fn row_mul(&self, v: &[F]) -> Vec { + (0..self.m) + .map(|j| { + let ij = self.ind_ptr[j]..self.ind_ptr[j + 1]; + self.col_ind[ij.clone()] + .iter() + .zip(&self.val[ij]) + .map(|(&idx, x)| v[idx] * x) + .sum::() + }) + .collect::>() + } + /// Create a new `SprsMat` from list of elements that represents the + /// matrix in column major order. `n` is the number of rows, `m` is + /// the number of columns, and `d` is NNZ in each row. + pub fn new_from_flat(n: usize, m: usize, d: usize, list: &[F]) -> Self { + let nnz = d * n; + let mut ind_ptr = vec![0; m + 1]; + let mut col_ind = Vec::::with_capacity(nnz); + let mut val = Vec::::with_capacity(nnz); + assert!(list.len() == m * n, "The dimension is incorrect."); + for i in 0..m { + for (c, &v) in list[i * n..(i + 1) * n].iter().enumerate() { + if v != F::zero() { + ind_ptr[i + 1] += 1; + col_ind.push(c); + val.push(v); + } + } + ind_ptr[i + 1] += ind_ptr[i]; + } + assert!(ind_ptr[m] <= nnz, "The dimension or NNZ is incorrect."); + Self { + n, + m, + d, + ind_ptr, + col_ind, + val, + } + } + pub fn new_from_columns(n: usize, m: usize, d: usize, list: &[Vec<(usize, F)>]) -> Self { + let nnz = d * n; + let mut ind_ptr = vec![0; m + 1]; + let mut col_ind = Vec::::with_capacity(nnz); + let mut val = Vec::::with_capacity(nnz); + assert!(list.len() == m, "The dimension is incorrect."); + for j in 0..m { + for (i, v) in list[j].iter() { + ind_ptr[j + 1] += 1; + col_ind.push(*i); + val.push(*v); + } + assert!(list[j].len() <= n, "The dimension is incorrect."); + ind_ptr[j + 1] += ind_ptr[j]; + } + assert!(ind_ptr[m] <= nnz, "The dimension or NNZ is incorrect."); + Self { + n, + m, + d, + ind_ptr, + col_ind, + val, + } + } +} + +#[inline] +pub(crate) fn get_num_bytes(n: usize) -> usize { + ceil_div((usize::BITS - n.leading_zeros()) as usize, 8) +} + +#[inline] +pub(crate) fn hash_column(array: Vec, params: &H::Parameters) -> Result +where + F: PrimeField, + C: Config, + H: CRHScheme, + Vec: Borrow<::Input>, + C::Leaf: Sized, + H::Output: Into, +{ + H::evaluate(params, array) + .map_err(|_| Error::HashingError) + .map(|x| x.into()) +} + +/// Generate `t` (not necessarily distinct) random points in `[0, n)` +/// using the current state of the `transcript`. +pub(crate) fn get_indices_from_transcript( + n: usize, + t: usize, + transcript: &mut IOPTranscript, +) -> Result, Error> { + let bytes_to_squeeze = get_num_bytes(n); + let mut indices = Vec::with_capacity(t); + for _ in 0..t { + let mut bytes: Vec = vec![0; bytes_to_squeeze]; + transcript + .get_and_append_byte_challenge(b"i", &mut bytes) + .map_err(|_| Error::TranscriptError)?; + + // get the usize from Vec: + let ind = bytes.iter().fold(0, |acc, &x| (acc << 8) + x as usize); + // modulo the number of columns in the encoded matrix + indices.push(ind % n); + } + Ok(indices) +} + +#[inline] +pub(crate) fn calculate_t( + sec_param: usize, + distance: (usize, usize), + codeword_len: usize, +) -> Result { + // Took from the analysis by BCI+20 and Ligero + // We will find the smallest $t$ such that + // $(1-\delta)^t + (\rho+\delta)^t + \frac{n}{F} < 2^{-\lambda}$. + // With $\delta = \frac{1-\rho}{2}$, the expreesion is + // $2 * (\frac{1+\rho}{2})^t + \frac{n}{F} < 2^(-\lambda)$. + + let field_bits = F::MODULUS_BIT_SIZE as i32; + let sec_param = sec_param as i32; + + let residual = codeword_len as f64 / 2.0_f64.powi(field_bits); + let rhs = (2.0_f64.powi(-sec_param) - residual).log2(); + if !(rhs.is_normal()) { + return Err(Error::InvalidParameters("For the given codeword length and the required security guarantee, the field is not big enough.".to_string())); + } + let nom = rhs - 1.0; + let denom = (1.0 - 0.5 * distance.0 as f64 / distance.1 as f64).log2(); + if !(denom.is_normal()) { + return Err(Error::InvalidParameters( + "The distance is wrong".to_string(), + )); + } + let t = (nom / denom).ceil() as usize; + Ok(if t < codeword_len { t } else { codeword_len }) +} + +/// Only needed for benches and tests. +#[cfg(any(feature = "benches", test))] +pub struct LeafIdentityHasher; + +#[cfg(any(feature = "benches", test))] +impl CRHScheme for LeafIdentityHasher { + type Input = Vec; + type Output = Vec; + type Parameters = (); + + fn setup(_: &mut R) -> Result { + Ok(()) + } + + fn evaluate>( + _: &Self::Parameters, + input: T, + ) -> Result { + Ok(input.borrow().to_vec().into()) + } +} + +/// Only needed for benches and tests. +#[cfg(any(feature = "benches", test))] +pub struct FieldToBytesColHasher +where + F: PrimeField + CanonicalSerialize, + D: Digest, +{ + _phantom: PhantomData<(F, D)>, +} + +#[cfg(any(feature = "benches", test))] +impl CRHScheme for FieldToBytesColHasher +where + F: PrimeField + CanonicalSerialize, + D: Digest, +{ + type Input = Vec; + type Output = Vec; + type Parameters = (); + + fn setup(_rng: &mut R) -> Result { + Ok(()) + } + + fn evaluate>( + _parameters: &Self::Parameters, + input: T, + ) -> Result { + let mut dig = D::new(); + dig.update(to_bytes!(input.borrow()).unwrap()); + Ok(dig.finalize().to_vec()) + } +} + +pub(crate) fn tensor_vec(values: &[F]) -> Vec { + let one = F::one(); + let anti_values: Vec = values.iter().map(|v| one - *v).collect(); + + let mut layer: Vec = vec![one]; + + for i in 0..values.len() { + let mut new_layer = Vec::new(); + for v in &layer { + new_layer.push(*v * anti_values[i]); + } + for v in &layer { + new_layer.push(*v * values[i]); + } + layer = new_layer; + } + + layer +} + +#[cfg(test)] +pub(crate) mod tests { + + use crate::utils::to_field; + + use super::*; + use ark_bls12_377::Fr; + + #[test] + fn test_sprs_row_mul() { + // The columns major representation of a matrix. + let mat: Vec = to_field(vec![10, 23, 55, 100, 1, 58, 4, 0, 9]); + + let mat = SprsMat::new_from_flat(3, 3, 3, &mat); + let v: Vec = to_field(vec![12, 41, 55]); + // by giving the result in the integers and then converting to Fr + // we ensure the test will still pass even if Fr changes + assert_eq!(mat.row_mul(&v), to_field::(vec![4088, 4431, 543])); + } + + #[test] + fn test_sprs_row_mul_sparse_mat() { + // The columns major representation of a matrix. + let mat: Vec = to_field(vec![10, 23, 55, 100, 1, 58, 4, 0, 9]); + let mat = vec![ + vec![(0usize, mat[0]), (1usize, mat[1]), (2usize, mat[2])], + vec![(0usize, mat[3]), (1usize, mat[4]), (2usize, mat[5])], + vec![(0usize, mat[6]), (1usize, mat[7]), (2usize, mat[8])], + ]; + + let mat = SprsMat::new_from_columns(3, 3, 3, &mat); + let v: Vec = to_field(vec![12, 41, 55]); + // by giving the result in the integers and then converting to Fr + // we ensure the test will still pass even if Fr changes + assert_eq!(mat.row_mul(&v), to_field::(vec![4088, 4431, 543])); + } + + #[test] + fn test_get_num_bytes() { + assert_eq!(get_num_bytes(0), 0); + assert_eq!(get_num_bytes(1), 1); + assert_eq!(get_num_bytes(9), 1); + assert_eq!(get_num_bytes(1 << 11), 2); + assert_eq!(get_num_bytes(1 << 32 - 1), 4); + assert_eq!(get_num_bytes(1 << 32), 5); + assert_eq!(get_num_bytes(1 << 32 + 1), 5); + } +} diff --git a/src/streaming_kzg/data_structures.rs b/src/streaming_kzg/data_structures.rs index 5923a40e..7adaf005 100644 --- a/src/streaming_kzg/data_structures.rs +++ b/src/streaming_kzg/data_structures.rs @@ -2,9 +2,10 @@ use ark_ff::Field; use ark_std::borrow::Borrow; use ark_std::vec::Vec; -use crate::streaming_kzg::ceil_div; use ark_std::iterable::Iterable; +use crate::utils::ceil_div; + /// A `Streamer` folding a vector of coefficients /// with the given challenges, and producing a stream of items /// `(i, v)` where `i` indicates the depth, and `v` is the next coefficient. diff --git a/src/streaming_kzg/mod.rs b/src/streaming_kzg/mod.rs index e3bdb2af..8fd494e2 100644 --- a/src/streaming_kzg/mod.rs +++ b/src/streaming_kzg/mod.rs @@ -284,12 +284,6 @@ pub(crate) fn vanishing_polynomial(points: &[F]) -> DensePolynomial .fold(one, |x, y| x.naive_mul(&y)) } -/// Return ceil(x / y). -pub(crate) fn ceil_div(x: usize, y: usize) -> usize { - // XXX. warning: this expression can overflow. - (x + y - 1) / y -} - /// Compute a linear combination of the polynomials `polynomials` with the given challenges. pub(crate) fn linear_combination( polynomials: &[PP], diff --git a/src/streaming_kzg/space.rs b/src/streaming_kzg/space.rs index ab50adfd..cc1d36d2 100644 --- a/src/streaming_kzg/space.rs +++ b/src/streaming_kzg/space.rs @@ -6,7 +6,8 @@ use ark_std::borrow::Borrow; use ark_std::collections::VecDeque; use ark_std::vec::Vec; -use crate::streaming_kzg::{ceil_div, vanishing_polynomial, FoldedPolynomialTree}; +use crate::streaming_kzg::{vanishing_polynomial, FoldedPolynomialTree}; +use crate::utils::ceil_div; use ark_ec::scalar_mul::variable_base::{ChunkedPippenger, HashMapPippenger, VariableBaseMSM}; use ark_std::iterable::{Iterable, Reverse}; diff --git a/src/utils.rs b/src/utils.rs new file mode 100644 index 00000000..34e41197 --- /dev/null +++ b/src/utils.rs @@ -0,0 +1,359 @@ +use core::marker::PhantomData; + +#[cfg(not(feature = "std"))] +use num_traits::Float; + +#[cfg(feature = "parallel")] +use rayon::{ + iter::{IntoParallelRefIterator, ParallelIterator}, + prelude::IndexedParallelIterator, +}; + +use ark_ff::{Field, PrimeField}; +use ark_serialize::CanonicalSerialize; +use ark_std::vec::Vec; +use merlin::Transcript; + +use crate::Error; + +/// Takes as input a struct, and converts them to a series of bytes. All traits +/// that implement `CanonicalSerialize` can be automatically converted to bytes +/// in this manner. +/// From jellyfish lib +#[macro_export] +macro_rules! to_bytes { + ($x:expr) => {{ + let mut buf = ark_std::vec![]; + ark_serialize::CanonicalSerialize::serialize_compressed($x, &mut buf).map(|_| buf) + }}; +} + +/// Entropy function +pub(crate) fn ent(x: f64) -> f64 { + assert!(0f64 <= x && x <= 1f64); + if x == 0f64 || x == 1f64 { + 0f64 + } else { + -x * x.log2() - (1.0 - x) * (1.0 - x).log2() + } +} + +/// ceil of a * b, where a is integer and b is a rational number +#[inline] +pub(crate) fn ceil_mul(a: usize, b: (usize, usize)) -> usize { + (a * b.0 + b.1 - 1) / b.1 +} + +/// Return ceil(x / y). +pub(crate) fn ceil_div(x: usize, y: usize) -> usize { + // XXX. warning: this expression can overflow. + (x + y - 1) / y +} + +#[derive(Debug)] +pub struct Matrix { + pub(crate) n: usize, + pub(crate) m: usize, + entries: Vec>, +} + +impl Matrix { + /// Returns a Matrix of dimensions n x m given a list of n * m field elements. + /// The list should be ordered row-first, i.e. [a11, ..., a1m, a21, ..., a2m, ...]. + /// + /// # Panics + /// Panics if the dimensions do not match the length of the list + pub(crate) fn new_from_flat(n: usize, m: usize, entry_list: &[F]) -> Self { + assert_eq!( + entry_list.len(), + n * m, + "Invalid matrix construction: dimensions are {} x {} but entry vector has {} entries", + n, + m, + entry_list.len() + ); + + // TODO more efficient to run linearly? + let entries: Vec> = (0..n) + .map(|row| (0..m).map(|col| entry_list[m * row + col]).collect()) + .collect(); + + Self { n, m, entries } + } + + /// Returns a Matrix given a list of its rows, each in turn represented as a list of field elements. + /// + /// # Panics + /// Panics if the sub-lists do not all have the same length. + pub(crate) fn new_from_rows(row_list: Vec>) -> Self { + let m = row_list[0].len(); + + for row in row_list.iter().skip(1) { + assert_eq!( + row.len(), + m, + "Invalid matrix construction: not all rows have the same length" + ); + } + + Self { + n: row_list.len(), + m, + entries: row_list, + } + } + + /// Returns the entry in position (i, j). **Indexing starts at 0 in both coordinates**, + /// i.e. the first element is in position (0, 0) and the last one in (n - 1, j - 1), + /// where n and m are the number of rows and columns, respectively. + /// + /// Index bound checks are waived for efficiency and behaviour under invalid indexing is undefined + #[cfg(test)] + pub(crate) fn entry(&self, i: usize, j: usize) -> F { + self.entries[i][j] + } + + /// Returns self as a list of rows + pub(crate) fn rows(&self) -> Vec> { + self.entries.clone() + } + + /// Returns self as a list of columns + pub(crate) fn cols(&self) -> Vec> { + (0..self.m) + .map(|col| (0..self.n).map(|row| self.entries[row][col]).collect()) + .collect() + } + + /// Returns the product v * self, where v is interpreted as a row vector. In other words, + /// it returns a linear combination of the rows of self with coefficients given by v. + /// + /// Panics if the length of v is different from the number of rows of self. + pub(crate) fn row_mul(&self, v: &[F]) -> Vec { + assert_eq!( + v.len(), + self.n, + "Invalid row multiplication: vector has {} elements whereas each matrix column has {}", + v.len(), + self.n + ); + + (0..self.m) + .map(|col| { + inner_product( + v, + &(0..self.n) + .map(|row| self.entries[row][col]) + .collect::>(), + ) + }) + .collect() + } +} + +#[inline] +pub(crate) fn inner_product(v1: &[F], v2: &[F]) -> F { + ark_std::cfg_iter!(v1) + .zip(v2) + .map(|(li, ri)| *li * ri) + .sum() +} + +/// The following struct is taken from jellyfish repository. Once they change +/// their dependency on `crypto-primitive`, we use their crate instead of +/// a copy-paste. We needed the newer `crypto-primitive` for serializing. +#[derive(Clone)] +pub(crate) struct IOPTranscript { + transcript: Transcript, + is_empty: bool, + #[doc(hidden)] + phantom: PhantomData, +} + +// TODO: merge this with jf_plonk::transcript +impl IOPTranscript { + /// Create a new IOP transcript. + pub(crate) fn new(label: &'static [u8]) -> Self { + Self { + transcript: Transcript::new(label), + is_empty: true, + phantom: PhantomData, + } + } + + /// Append the message to the transcript. + pub(crate) fn append_message(&mut self, label: &'static [u8], msg: &[u8]) -> Result<(), Error> { + self.transcript.append_message(label, msg); + self.is_empty = false; + Ok(()) + } + + /// Append the message to the transcript. + pub(crate) fn append_serializable_element( + &mut self, + label: &'static [u8], + group_elem: &S, + ) -> Result<(), Error> { + self.append_message( + label, + &to_bytes!(group_elem).map_err(|_| Error::TranscriptError)?, + ) + } + + /// Generate the challenge from the current transcript + /// and append it to the transcript. + /// + /// The output field element is statistical uniform as long + /// as the field has a size less than 2^384. + pub(crate) fn get_and_append_challenge(&mut self, label: &'static [u8]) -> Result { + // we need to reject when transcript is empty + if self.is_empty { + return Err(Error::TranscriptError); + } + + let mut buf = [0u8; 64]; + self.transcript.challenge_bytes(label, &mut buf); + let challenge = F::from_le_bytes_mod_order(&buf); + self.append_serializable_element(label, &challenge)?; + Ok(challenge) + } + + /// Generate the challenge from the current transcript + /// and append it to the transcript. + /// + /// Without exposing the internal field `transcript`, + /// this is a wrapper around getting bytes as opposed to field elements. + pub(crate) fn get_and_append_byte_challenge( + &mut self, + label: &'static [u8], + dest: &mut [u8], + ) -> Result<(), Error> { + // we need to reject when transcript is empty + if self.is_empty { + return Err(Error::TranscriptError); + } + + self.transcript.challenge_bytes(label, dest); + self.append_message(label, dest)?; + Ok(()) + } +} + +#[inline] +#[cfg(test)] +pub(crate) fn to_field(v: Vec) -> Vec { + v.iter().map(|x| F::from(*x)).collect::>() +} + +// TODO: replace by https://github.com/arkworks-rs/crypto-primitives/issues/112. +#[cfg(test)] +use ark_crypto_primitives::sponge::poseidon::PoseidonSponge; + +#[cfg(test)] +pub(crate) fn test_sponge() -> PoseidonSponge { + use ark_crypto_primitives::sponge::{poseidon::PoseidonConfig, CryptographicSponge}; + use ark_std::test_rng; + + let full_rounds = 8; + let partial_rounds = 31; + let alpha = 17; + + let mds = vec![ + vec![F::one(), F::zero(), F::one()], + vec![F::one(), F::one(), F::zero()], + vec![F::zero(), F::one(), F::one()], + ]; + + let mut v = Vec::new(); + let mut ark_rng = test_rng(); + + for _ in 0..(full_rounds + partial_rounds) { + let mut res = Vec::new(); + + for _ in 0..3 { + res.push(F::rand(&mut ark_rng)); + } + v.push(res); + } + let config = PoseidonConfig::new(full_rounds, partial_rounds, alpha, mds, v, 2, 1); + PoseidonSponge::new(&config) +} + +#[cfg(test)] +pub(crate) mod tests { + + use super::*; + + use ark_bls12_377::Fr; + + #[test] + fn test_matrix_constructor_flat() { + let entries: Vec = to_field(vec![10, 100, 4, 67, 44, 50]); + let mat = Matrix::new_from_flat(2, 3, &entries); + assert_eq!(mat.entry(1, 2), Fr::from(50)); + } + + #[test] + fn test_matrix_constructor_flat_square() { + let entries: Vec = to_field(vec![10, 100, 4, 67]); + let mat = Matrix::new_from_flat(2, 2, &entries); + assert_eq!(mat.entry(1, 1), Fr::from(67)); + } + + #[test] + #[should_panic(expected = "dimensions are 2 x 3 but entry vector has 5 entries")] + fn test_matrix_constructor_flat_panic() { + let entries: Vec = to_field(vec![10, 100, 4, 67, 44]); + Matrix::new_from_flat(2, 3, &entries); + } + + #[test] + fn test_matrix_constructor_rows() { + let rows: Vec> = vec![ + to_field(vec![10, 100, 4]), + to_field(vec![23, 1, 0]), + to_field(vec![55, 58, 9]), + ]; + let mat = Matrix::new_from_rows(rows); + assert_eq!(mat.entry(2, 0), Fr::from(55)); + } + + #[test] + #[should_panic(expected = "not all rows have the same length")] + fn test_matrix_constructor_rows_panic() { + let rows: Vec> = vec![ + to_field(vec![10, 100, 4]), + to_field(vec![23, 1, 0]), + to_field(vec![55, 58]), + ]; + Matrix::new_from_rows(rows); + } + + #[test] + fn test_cols() { + let rows: Vec> = vec![ + to_field(vec![4, 76]), + to_field(vec![14, 92]), + to_field(vec![17, 89]), + ]; + + let mat = Matrix::new_from_rows(rows); + + assert_eq!(mat.cols()[1], to_field(vec![76, 92, 89])); + } + + #[test] + fn test_row_mul() { + let rows: Vec> = vec![ + to_field(vec![10, 100, 4]), + to_field(vec![23, 1, 0]), + to_field(vec![55, 58, 9]), + ]; + + let mat = Matrix::new_from_rows(rows); + let v: Vec = to_field(vec![12, 41, 55]); + // by giving the result in the integers and then converting to Fr + // we ensure the test will still pass even if Fr changes + assert_eq!(mat.row_mul(&v), to_field::(vec![4088, 4431, 543])); + } +} From e00d5b0f0cf89584d40323ebc6b35aba6c1fdd7b Mon Sep 17 00:00:00 2001 From: mmagician Date: Thu, 26 Oct 2023 09:45:07 +0200 Subject: [PATCH 04/75] adapt the scheme to https://github.com/arkworks-rs/algebra/issues/691 --- src/multilinear_pc/mod.rs | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/src/multilinear_pc/mod.rs b/src/multilinear_pc/mod.rs index bd5d3e53..eff86ab9 100644 --- a/src/multilinear_pc/mod.rs +++ b/src/multilinear_pc/mod.rs @@ -265,7 +265,9 @@ mod tests { use crate::multilinear_pc::MultilinearPC; use ark_bls12_381::Bls12_381; use ark_ec::pairing::Pairing; - use ark_poly::{DenseMultilinearExtension, MultilinearExtension, SparseMultilinearExtension}; + use ark_poly::{ + DenseMultilinearExtension, MultilinearExtension, Polynomial, SparseMultilinearExtension, + }; use ark_std::rand::RngCore; use ark_std::test_rng; use ark_std::vec::Vec; @@ -284,7 +286,7 @@ mod tests { let com = MultilinearPC::commit(&ck, poly); let proof = MultilinearPC::open(&ck, poly, &point); - let value = poly.evaluate(&point).unwrap(); + let value = poly.evaluate(&point); let result = MultilinearPC::check(&vk, &com, &point, value, &proof); assert!(result); } @@ -332,7 +334,7 @@ mod tests { let com = MultilinearPC::commit(&ck, &poly); let proof = MultilinearPC::open(&ck, &poly, &point); - let value = poly.evaluate(&point).unwrap(); + let value = poly.evaluate(&point); let result = MultilinearPC::check(&vk, &com, &point, value + &(1u16.into()), &proof); assert!(!result); } From 9d5119b8abb441ee784ef4b4af91723097effe44 Mon Sep 17 00:00:00 2001 From: mmagician Date: Thu, 26 Oct 2023 09:53:54 +0200 Subject: [PATCH 05/75] move tests shared across univariate and ML ligero to utils --- src/linear_codes/multilinear_ligero/tests.rs | 18 ------------------ src/linear_codes/univariate_ligero/tests.rs | 18 ------------------ src/linear_codes/utils.rs | 19 +++++++++++++++++++ 3 files changed, 19 insertions(+), 36 deletions(-) diff --git a/src/linear_codes/multilinear_ligero/tests.rs b/src/linear_codes/multilinear_ligero/tests.rs index e8017d7e..9df67eee 100644 --- a/src/linear_codes/multilinear_ligero/tests.rs +++ b/src/linear_codes/multilinear_ligero/tests.rs @@ -8,7 +8,6 @@ mod tests { linear_codes::{utils::*, LigeroPCParams, MultilinearLigero, PolynomialCommitment}, LabeledPolynomial, }; - use ark_bls12_377::Fq; use ark_bls12_377::Fr; use ark_bls12_381::Fr as Fr381; use ark_crypto_primitives::{ @@ -146,23 +145,6 @@ mod tests { .unwrap()); } - #[test] - fn test_calculate_t_with_good_parameters() { - assert!(calculate_t::(128, (3, 4), 2_usize.pow(32)).unwrap() < 200); - assert!(calculate_t::(256, (3, 4), 2_usize.pow(32)).unwrap() < 400); - } - - #[test] - fn test_calculate_t_with_bad_parameters() { - calculate_t::( - (Fq::MODULUS_BIT_SIZE - 60) as usize, - (3, 4), - 2_usize.pow(60), - ) - .unwrap_err(); - calculate_t::(400, (3, 4), 2_usize.pow(32)).unwrap_err(); - } - fn rand_point(num_vars: Option, rng: &mut ChaCha20Rng) -> Vec { match num_vars { Some(n) => (0..n).map(|_| F::rand(rng)).collect(), diff --git a/src/linear_codes/univariate_ligero/tests.rs b/src/linear_codes/univariate_ligero/tests.rs index b0fb67c8..6cee8f5a 100644 --- a/src/linear_codes/univariate_ligero/tests.rs +++ b/src/linear_codes/univariate_ligero/tests.rs @@ -9,7 +9,6 @@ mod tests { linear_codes::{utils::*, LigeroPCParams, PolynomialCommitment, UnivariateLigero}, LabeledPolynomial, }; - use ark_bls12_377::Fq; use ark_bls12_377::Fr; use ark_bls12_381::Fr as Fr381; use ark_crypto_primitives::{ @@ -140,23 +139,6 @@ mod tests { .unwrap()); } - #[test] - fn test_calculate_t_with_good_parameters() { - assert!(calculate_t::(128, (3, 4), 2_usize.pow(32)).unwrap() < 200); - assert!(calculate_t::(256, (3, 4), 2_usize.pow(32)).unwrap() < 400); - } - - #[test] - fn test_calculate_t_with_bad_parameters() { - calculate_t::( - (Fq::MODULUS_BIT_SIZE - 60) as usize, - (3, 4), - 2_usize.pow(60), - ) - .unwrap_err(); - calculate_t::(400, (3, 4), 2_usize.pow(32)).unwrap_err(); - } - fn rand_point(_: Option, rng: &mut ChaCha20Rng) -> F { F::rand(rng) } diff --git a/src/linear_codes/utils.rs b/src/linear_codes/utils.rs index 99c7a068..472dfc94 100644 --- a/src/linear_codes/utils.rs +++ b/src/linear_codes/utils.rs @@ -195,6 +195,8 @@ pub(crate) fn tensor_vec(values: &[F]) -> Vec { pub(crate) mod tests { use super::*; + + use ark_bls12_377::Fq; use ark_bls12_377::Fr; use ark_poly::{ domain::general::GeneralEvaluationDomain, univariate::DensePolynomial, DenseUVPolynomial, @@ -243,4 +245,21 @@ pub(crate) mod tests { assert_eq!(get_num_bytes(1 << 32), 5); assert_eq!(get_num_bytes(1 << 32 + 1), 5); } + + #[test] + fn test_calculate_t_with_good_parameters() { + assert!(calculate_t::(128, (3, 4), 2_usize.pow(32)).unwrap() < 200); + assert!(calculate_t::(256, (3, 4), 2_usize.pow(32)).unwrap() < 400); + } + + #[test] + fn test_calculate_t_with_bad_parameters() { + calculate_t::( + (Fq::MODULUS_BIT_SIZE - 60) as usize, + (3, 4), + 2_usize.pow(60), + ) + .unwrap_err(); + calculate_t::(400, (3, 4), 2_usize.pow(32)).unwrap_err(); + } } From aba5dd23e4e350274c6e3ee24d6c101016ad9a00 Mon Sep 17 00:00:00 2001 From: mmagician Date: Thu, 26 Oct 2023 09:45:07 +0200 Subject: [PATCH 06/75] adapt the scheme to https://github.com/arkworks-rs/algebra/issues/691 --- src/multilinear_pc/mod.rs | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/src/multilinear_pc/mod.rs b/src/multilinear_pc/mod.rs index bd5d3e53..eff86ab9 100644 --- a/src/multilinear_pc/mod.rs +++ b/src/multilinear_pc/mod.rs @@ -265,7 +265,9 @@ mod tests { use crate::multilinear_pc::MultilinearPC; use ark_bls12_381::Bls12_381; use ark_ec::pairing::Pairing; - use ark_poly::{DenseMultilinearExtension, MultilinearExtension, SparseMultilinearExtension}; + use ark_poly::{ + DenseMultilinearExtension, MultilinearExtension, Polynomial, SparseMultilinearExtension, + }; use ark_std::rand::RngCore; use ark_std::test_rng; use ark_std::vec::Vec; @@ -284,7 +286,7 @@ mod tests { let com = MultilinearPC::commit(&ck, poly); let proof = MultilinearPC::open(&ck, poly, &point); - let value = poly.evaluate(&point).unwrap(); + let value = poly.evaluate(&point); let result = MultilinearPC::check(&vk, &com, &point, value, &proof); assert!(result); } @@ -332,7 +334,7 @@ mod tests { let com = MultilinearPC::commit(&ck, &poly); let proof = MultilinearPC::open(&ck, &poly, &point); - let value = poly.evaluate(&point).unwrap(); + let value = poly.evaluate(&point); let result = MultilinearPC::check(&vk, &com, &point, value + &(1u16.into()), &proof); assert!(!result); } From b1f65af27f693a5f67c09fa70722365fcebab1ac Mon Sep 17 00:00:00 2001 From: mmagician Date: Thu, 26 Oct 2023 09:53:54 +0200 Subject: [PATCH 07/75] move tests shared across schemes to utils --- .../multilinear_brakedown/tests.rs | 18 ------------------ src/linear_codes/utils.rs | 19 +++++++++++++++++++ 2 files changed, 19 insertions(+), 18 deletions(-) diff --git a/src/linear_codes/multilinear_brakedown/tests.rs b/src/linear_codes/multilinear_brakedown/tests.rs index 6e94ad2d..1a820650 100644 --- a/src/linear_codes/multilinear_brakedown/tests.rs +++ b/src/linear_codes/multilinear_brakedown/tests.rs @@ -8,7 +8,6 @@ mod tests { linear_codes::{utils::*, BrakedownPCParams, MultilinearBrakedown, PolynomialCommitment}, LabeledPolynomial, }; - use ark_bls12_377::Fq; use ark_bls12_377::Fr; use ark_bls12_381::Fr as Fr381; use ark_crypto_primitives::{ @@ -146,23 +145,6 @@ mod tests { .unwrap()); } - #[test] - fn test_calculate_t_with_good_parameters() { - assert!(calculate_t::(128, (3, 4), 2_usize.pow(32)).unwrap() < 200); - assert!(calculate_t::(256, (3, 4), 2_usize.pow(32)).unwrap() < 400); - } - - #[test] - fn test_calculate_t_with_bad_parameters() { - calculate_t::( - (Fq::MODULUS_BIT_SIZE - 60) as usize, - (3, 4), - 2_usize.pow(60), - ) - .unwrap_err(); - calculate_t::(400, (3, 4), 2_usize.pow(32)).unwrap_err(); - } - fn rand_point(num_vars: Option, rng: &mut ChaCha20Rng) -> Vec { match num_vars { Some(n) => (0..n).map(|_| F::rand(rng)).collect(), diff --git a/src/linear_codes/utils.rs b/src/linear_codes/utils.rs index e53df9a9..2d26feda 100644 --- a/src/linear_codes/utils.rs +++ b/src/linear_codes/utils.rs @@ -264,6 +264,8 @@ pub(crate) mod tests { use crate::utils::to_field; use super::*; + + use ark_bls12_377::Fq; use ark_bls12_377::Fr; #[test] @@ -305,4 +307,21 @@ pub(crate) mod tests { assert_eq!(get_num_bytes(1 << 32), 5); assert_eq!(get_num_bytes(1 << 32 + 1), 5); } + + #[test] + fn test_calculate_t_with_good_parameters() { + assert!(calculate_t::(128, (3, 4), 2_usize.pow(32)).unwrap() < 200); + assert!(calculate_t::(256, (3, 4), 2_usize.pow(32)).unwrap() < 400); + } + + #[test] + fn test_calculate_t_with_bad_parameters() { + calculate_t::( + (Fq::MODULUS_BIT_SIZE - 60) as usize, + (3, 4), + 2_usize.pow(60), + ) + .unwrap_err(); + calculate_t::(400, (3, 4), 2_usize.pow(32)).unwrap_err(); + } } From 86a322fcf3ea73b0bb973dd30c37b288b4b8e831 Mon Sep 17 00:00:00 2001 From: mmagician Date: Thu, 26 Oct 2023 10:00:53 +0200 Subject: [PATCH 08/75] remove unused no-std import --- src/utils.rs | 3 --- 1 file changed, 3 deletions(-) diff --git a/src/utils.rs b/src/utils.rs index 6a819bf0..5606c6b0 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -1,8 +1,5 @@ use core::marker::PhantomData; -#[cfg(not(feature = "std"))] -use num_traits::Float; - #[cfg(feature = "parallel")] use rayon::{ iter::{IntoParallelRefIterator, ParallelIterator}, From 69896d44e3c18583de33b5f250d6195f447c96b3 Mon Sep 17 00:00:00 2001 From: mmagician Date: Thu, 26 Oct 2023 09:45:07 +0200 Subject: [PATCH 09/75] adapt the scheme to https://github.com/arkworks-rs/algebra/issues/691 --- Cargo.toml | 2 ++ src/multilinear_pc/mod.rs | 8 +++++--- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 8c30f9ba..86f0f5e3 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -61,6 +61,8 @@ num-traits = { version = "0.2", default-features = false, features = ["libm"] } ark-ff = { git = "https://github.com/arkworks-rs/algebra/" } ark-ec = { git = "https://github.com/arkworks-rs/algebra/" } ark-serialize = { git = "https://github.com/arkworks-rs/algebra/" } +ark-poly = { git = "https://github.com/arkworks-rs/algebra/" } + ark-crypto-primitives = { git = "https://github.com/arkworks-rs/crypto-primitives" } ark-r1cs-std = { git = "https://github.com/arkworks-rs/r1cs-std/" } diff --git a/src/multilinear_pc/mod.rs b/src/multilinear_pc/mod.rs index bd5d3e53..eff86ab9 100644 --- a/src/multilinear_pc/mod.rs +++ b/src/multilinear_pc/mod.rs @@ -265,7 +265,9 @@ mod tests { use crate::multilinear_pc::MultilinearPC; use ark_bls12_381::Bls12_381; use ark_ec::pairing::Pairing; - use ark_poly::{DenseMultilinearExtension, MultilinearExtension, SparseMultilinearExtension}; + use ark_poly::{ + DenseMultilinearExtension, MultilinearExtension, Polynomial, SparseMultilinearExtension, + }; use ark_std::rand::RngCore; use ark_std::test_rng; use ark_std::vec::Vec; @@ -284,7 +286,7 @@ mod tests { let com = MultilinearPC::commit(&ck, poly); let proof = MultilinearPC::open(&ck, poly, &point); - let value = poly.evaluate(&point).unwrap(); + let value = poly.evaluate(&point); let result = MultilinearPC::check(&vk, &com, &point, value, &proof); assert!(result); } @@ -332,7 +334,7 @@ mod tests { let com = MultilinearPC::commit(&ck, &poly); let proof = MultilinearPC::open(&ck, &poly, &point); - let value = poly.evaluate(&point).unwrap(); + let value = poly.evaluate(&point); let result = MultilinearPC::check(&vk, &com, &point, value + &(1u16.into()), &proof); assert!(!result); } From ecf73f4b1cca7e89c2fa0a75b963b3869a9c8509 Mon Sep 17 00:00:00 2001 From: mmagician Date: Thu, 26 Oct 2023 10:35:10 +0200 Subject: [PATCH 10/75] remove unused code in hyrax --- src/utils.rs | 19 ------------------- 1 file changed, 19 deletions(-) diff --git a/src/utils.rs b/src/utils.rs index 437f993e..7c4a0575 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -1,8 +1,5 @@ use core::marker::PhantomData; -#[cfg(not(feature = "std"))] -use num_traits::Float; - #[cfg(feature = "parallel")] use rayon::{ iter::{IntoParallelRefIterator, ParallelIterator}, @@ -64,16 +61,6 @@ impl Matrix { } } - /// Returns the entry in position (i, j). **Indexing starts at 0 in both coordinates**, - /// i.e. the first element is in position (0, 0) and the last one in (n - 1, j - 1), - /// where n and m are the number of rows and columns, respectively. - /// - /// Index bound checks are waived for efficiency and behaviour under invalid indexing is undefined - #[cfg(test)] - pub(crate) fn entry(&self, i: usize, j: usize) -> F { - self.entries[i][j] - } - /// Returns the product v * self, where v is interpreted as a row vector. In other words, /// it returns a linear combination of the rows of self with coefficients given by v. /// @@ -181,12 +168,6 @@ impl IOPTranscript { } } -#[inline] -#[cfg(test)] -pub(crate) fn to_field(v: Vec) -> Vec { - v.iter().map(|x| F::from(*x)).collect::>() -} - // TODO: replace by https://github.com/arkworks-rs/crypto-primitives/issues/112. #[cfg(test)] use ark_crypto_primitives::sponge::poseidon::PoseidonSponge; From 234e049d3a1d15ef12c3262571c192a0585e9b64 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Antonio=20Mej=C3=ADas=20Gil?= Date: Fri, 20 Oct 2023 14:35:33 +0200 Subject: [PATCH 11/75] Improve the choice of dimensions for polynomial matrix --- src/linear_codes/ligero.rs | 30 +++++++++++++++++++----------- 1 file changed, 19 insertions(+), 11 deletions(-) diff --git a/src/linear_codes/ligero.rs b/src/linear_codes/ligero.rs index 5102c146..fbd9f876 100644 --- a/src/linear_codes/ligero.rs +++ b/src/linear_codes/ligero.rs @@ -1,13 +1,13 @@ use super::LigeroPCParams; use super::LinCodeParametersInfo; +use crate::linear_codes::utils::calculate_t; use crate::utils::ceil_div; use crate::{PCCommitterKey, PCUniversalParams, PCVerifierKey}; use ark_crypto_primitives::crh::{CRHScheme, TwoToOneCRHScheme}; use ark_crypto_primitives::merkle_tree::{Config, LeafParam, TwoToOneParam}; use ark_ff::PrimeField; -use ark_poly::EvaluationDomain; -use ark_poly::GeneralEvaluationDomain; +use ark_std::log2; use ark_std::marker::PhantomData; #[cfg(not(feature = "std"))] use num_traits::Float; @@ -112,21 +112,29 @@ where self.sec_param } - /// Compute the a suitable (for instance, FFT-friendly over F) matrix with at least n entries. + /// Compute the a suitable (for instance, FFT-friendly over F) matrix with at least poly_len entries. /// The return pair (n, m) corresponds to the dimensions n x m. - fn compute_dimensions(&self, n: usize) -> (usize, usize) { + fn compute_dimensions(&self, poly_len: usize) -> (usize, usize) { assert_eq!( - (n as f64) as usize, - n, + (poly_len as f64) as usize, + poly_len, "n cannot be converted to f64: aborting" ); - let aux = (n as f64).sqrt().ceil() as usize; - let n_cols = GeneralEvaluationDomain::::new(aux) - .expect("Field F does not admit FFT with m elements") - .size(); + // let aux = (poly_len as f64).sqrt().ceil() as usize; + // let n_cols = GeneralEvaluationDomain::::new(aux) + // .expect("Field F does not admit FFT with m elements") + // .size(); + // TODO this check is actually insufficient, pass rho_inv and + // check the codeword length (or just disregard check) - (ceil_div(n, n_cols), n_cols) + // TODO changed + let t = calculate_t::(self.sec_param(), self.distance(), poly_len).unwrap(); + let n = 1 << log2((ceil_div(2 * poly_len, t) as f64).sqrt().ceil() as usize); + let m = ceil_div(poly_len, n); + + // (ceil_div(poly_len, n_cols), n_cols) + (n, m) } fn leaf_hash_params(&self) -> &<::LeafHash as CRHScheme>::Parameters { From dd281f180ef6d1d41ee130baca3a78184eac701c Mon Sep 17 00:00:00 2001 From: Hossein Moghaddas Date: Thu, 26 Oct 2023 10:44:54 +0200 Subject: [PATCH 12/75] Update comments --- src/linear_codes/ligero.rs | 12 +----------- 1 file changed, 1 insertion(+), 11 deletions(-) diff --git a/src/linear_codes/ligero.rs b/src/linear_codes/ligero.rs index fbd9f876..f60125b4 100644 --- a/src/linear_codes/ligero.rs +++ b/src/linear_codes/ligero.rs @@ -114,26 +114,16 @@ where /// Compute the a suitable (for instance, FFT-friendly over F) matrix with at least poly_len entries. /// The return pair (n, m) corresponds to the dimensions n x m. + /// FIXME: Maybe, there should be some checks for making sure the extended row can have an FFT. fn compute_dimensions(&self, poly_len: usize) -> (usize, usize) { assert_eq!( (poly_len as f64) as usize, poly_len, "n cannot be converted to f64: aborting" ); - - // let aux = (poly_len as f64).sqrt().ceil() as usize; - // let n_cols = GeneralEvaluationDomain::::new(aux) - // .expect("Field F does not admit FFT with m elements") - // .size(); - // TODO this check is actually insufficient, pass rho_inv and - // check the codeword length (or just disregard check) - - // TODO changed let t = calculate_t::(self.sec_param(), self.distance(), poly_len).unwrap(); let n = 1 << log2((ceil_div(2 * poly_len, t) as f64).sqrt().ceil() as usize); let m = ceil_div(poly_len, n); - - // (ceil_div(poly_len, n_cols), n_cols) (n, m) } From 14f80fdc225cfea515e6d2401d2aa225f8ea5aa9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Antonio=20Mej=C3=ADas=20Gil?= Date: Thu, 26 Oct 2023 10:55:49 +0200 Subject: [PATCH 13/75] parallelised row encoding and col-to-leaf hashing; significant performance gains --- src/linear_codes/mod.rs | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/src/linear_codes/mod.rs b/src/linear_codes/mod.rs index a6b40fbc..9ff9236a 100644 --- a/src/linear_codes/mod.rs +++ b/src/linear_codes/mod.rs @@ -14,6 +14,7 @@ use ark_std::marker::PhantomData; use ark_std::rand::RngCore; use ark_std::string::ToString; use ark_std::vec::Vec; +use rayon::iter::{IntoParallelRefIterator, ParallelIterator, IntoParallelIterator}; mod utils; @@ -118,8 +119,9 @@ where let mat = Matrix::new_from_flat(n_rows, n_cols, &coeffs); // 2. Apply encoding row-wise + let rows = mat.rows(); let ext_mat = - Matrix::new_from_rows(mat.rows().iter().map(|r| Self::encode(r, param)).collect()); + Matrix::new_from_rows(cfg_iter!(rows).map(|r| Self::encode(r, param)).collect()); (mat, ext_mat) } @@ -155,7 +157,7 @@ where C: Config + 'static, Vec: Borrow<::Input>, H::Output: Into, - C::Leaf: Sized + Clone + Default, + C::Leaf: Sized + Clone + Default + Send, H: CRHScheme, { type UniversalParams = L::LinCodePCParams; @@ -550,15 +552,13 @@ where H: CRHScheme, Vec: Borrow<::Input>, H::Output: Into, - C::Leaf: Default + Clone, + C::Leaf: Default + Clone + Send, { - let mut col_hashes: Vec = Vec::new(); let ext_mat_cols = ext_mat.cols(); - for col in ext_mat_cols.into_iter() { - let col_digest = hash_column::(col, col_hash_params)?; - col_hashes.push(col_digest); - } + let mut col_hashes: Vec = cfg_into_iter!(ext_mat_cols) + .map(|col| hash_column::(col, &col_hash_params).unwrap()) + .collect(); // pad the column hashes with zeroes let next_pow_of_two = col_hashes.len().next_power_of_two(); From f0dd3361ba077a231170019043a72b21c9de515b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Antonio=20Mej=C3=ADas=20Gil?= Date: Thu, 26 Oct 2023 11:15:00 +0200 Subject: [PATCH 14/75] parallelised row encoding and col-to-leaf hashing; significant performance gains --- src/linear_codes/mod.rs | 22 +++++++++------------- 1 file changed, 9 insertions(+), 13 deletions(-) diff --git a/src/linear_codes/mod.rs b/src/linear_codes/mod.rs index fa160620..32ea2f31 100644 --- a/src/linear_codes/mod.rs +++ b/src/linear_codes/mod.rs @@ -14,6 +14,7 @@ use ark_std::marker::PhantomData; use ark_std::rand::RngCore; use ark_std::string::ToString; use ark_std::vec::Vec; +use rayon::iter::{IntoParallelRefIterator, ParallelIterator, IntoParallelIterator}; mod utils; @@ -116,12 +117,9 @@ where let mat = Matrix::new_from_flat(n_rows, n_cols, &coeffs); // 2. Apply encoding row-wise - let ext_mat = Matrix::new_from_rows( - mat.rows() - .iter() - .map(|r| Self::encode(r, param).unwrap()) // Since we just computed the dimension, the error does not happen - .collect(), - ); + let rows = mat.rows(); + let ext_mat = + Matrix::new_from_rows(cfg_iter!(rows).map(|r| Self::encode(r, param).unwrap()).collect()); (mat, ext_mat) } @@ -157,7 +155,7 @@ where C: Config + 'static, Vec: Borrow<::Input>, H::Output: Into, - C::Leaf: Sized + Clone + Default, + C::Leaf: Sized + Clone + Default + Send, H: CRHScheme, { type UniversalParams = L::LinCodePCParams; @@ -552,15 +550,13 @@ where H: CRHScheme, Vec: Borrow<::Input>, H::Output: Into, - C::Leaf: Default + Clone, + C::Leaf: Default + Clone + Send, { - let mut col_hashes: Vec = Vec::new(); let ext_mat_cols = ext_mat.cols(); - for col in ext_mat_cols.into_iter() { - let col_digest = hash_column::(col, col_hash_params)?; - col_hashes.push(col_digest); - } + let mut col_hashes: Vec = cfg_into_iter!(ext_mat_cols) + .map(|col| hash_column::(col, &col_hash_params).unwrap()) + .collect(); // pad the column hashes with zeroes let next_pow_of_two = col_hashes.len().next_power_of_two(); From f424c4830ff252b3b9968c309c382c86486dbd35 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Antonio=20Mej=C3=ADas=20Gil?= Date: Thu, 26 Oct 2023 11:26:24 +0200 Subject: [PATCH 15/75] expanded on Future Optimisations section --- src/hyrax/mod.rs | 36 ++++++++++++++++++++++++++---------- 1 file changed, 26 insertions(+), 10 deletions(-) diff --git a/src/hyrax/mod.rs b/src/hyrax/mod.rs index 37462274..96c2316b 100644 --- a/src/hyrax/mod.rs +++ b/src/hyrax/mod.rs @@ -51,20 +51,36 @@ pub const PROTOCOL_NAME: &'static [u8] = b"Hyrax protocol"; /// /// ### Future optimisations /// -/// - Due to the homomorphic nature of Pedersen commitments, it is likely some -/// of the following methods can be designed more efficiently than their -/// default implementations: batch_open, batch_check, open_combinations, -/// check_combinations. This is not discussed in the reference article, but -/// the IPA and KZG modules might be a good starting point. -/// - On a related note to the previous point, there might be a more efficient -/// way to open several polynomials at a single point than the currently -/// implemented method, where only the computation of the vectors L and R is +/// - Deal with the modification described above: either modify the PCS trait +/// to encompass hiding PCSs (in terms of the actual evaluation, not only +/// the polynomial), or turn this scheme into a non-hiding one by removing +/// unnecessary work (which would probably involve non-trivial theoretical +/// work). +/// - Add parallelisation. There is at least one natural place where +/// parallelisation could bring performance gains: in essence, the prover +/// commits to the polynomial by expressing it as an evaluation matrix and +/// Pederson-multi-committing to each row. Each of this commitments can be +/// computed independently from the rest, and therefore, in parallel. It is +/// still to be seen how much of an improvement this would entail, since each +/// Pederson multi-commitment boils down to a multi-exponentiation and this +/// operation is itself parallelised. +/// - Due to the homomorphic nature of Pedersen commitments, it is likely +/// some of the following methods can be designed more efficiently than their +/// default implementations: `batch_open`, `batch_check`, +/// `open_combinations`, `check_combinations`. This is not discussed in the +/// reference article, but the IPA and KZG modules might be a good starting +/// point. +/// - On a related note to the previous point, there might be a more +/// efficient way to open several polynomials at a single point (this is the +/// functionality of the `open` method) than the currently implemented +/// technique, where only the computation of the vectors `L` and `R` is /// shared across polynomials. -/// - The cited article proposes an optimisation in the section `Reducing the -/// cost of proof-of-dot-prod`. It allows for non-square matrices (and hence +/// - The cited article proposes an optimisation in the section _Reducing the +/// cost of proof-of-dot-prod_. It allows for non-square matrices (and hence /// removes the requirement for the number of variables to be even) and /// introduces a tradeoff between proof size and verifier time. It is /// probably worth pursuing. + pub struct HyraxPC< // The elliptic curve used for Pedersen commitments (only EC groups are // supported as of now). From 6138fe24cb1595dfd71d901594fe1ca13665831c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Antonio=20Mej=C3=ADas=20Gil?= Date: Thu, 26 Oct 2023 12:13:12 +0200 Subject: [PATCH 16/75] fixed GH action failures: formatted and added feature flag --- src/linear_codes/mod.rs | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/src/linear_codes/mod.rs b/src/linear_codes/mod.rs index 32ea2f31..5516f1a8 100644 --- a/src/linear_codes/mod.rs +++ b/src/linear_codes/mod.rs @@ -14,7 +14,9 @@ use ark_std::marker::PhantomData; use ark_std::rand::RngCore; use ark_std::string::ToString; use ark_std::vec::Vec; -use rayon::iter::{IntoParallelRefIterator, ParallelIterator, IntoParallelIterator}; + +#[cfg(feature = "parallel")] +use rayon::iter::{IntoParallelIterator, IntoParallelRefIterator, ParallelIterator}; mod utils; @@ -118,8 +120,11 @@ where // 2. Apply encoding row-wise let rows = mat.rows(); - let ext_mat = - Matrix::new_from_rows(cfg_iter!(rows).map(|r| Self::encode(r, param).unwrap()).collect()); + let ext_mat = Matrix::new_from_rows( + cfg_iter!(rows) + .map(|r| Self::encode(r, param).unwrap()) + .collect(), + ); (mat, ext_mat) } From 0628824cdadc62ad14c79d71f4e514ce11143e04 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Antonio=20Mej=C3=ADas=20Gil?= Date: Thu, 26 Oct 2023 12:14:15 +0200 Subject: [PATCH 17/75] fixed GH action failures: formatted and added feature flag --- src/linear_codes/mod.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/linear_codes/mod.rs b/src/linear_codes/mod.rs index 9ff9236a..47b65092 100644 --- a/src/linear_codes/mod.rs +++ b/src/linear_codes/mod.rs @@ -14,7 +14,9 @@ use ark_std::marker::PhantomData; use ark_std::rand::RngCore; use ark_std::string::ToString; use ark_std::vec::Vec; -use rayon::iter::{IntoParallelRefIterator, ParallelIterator, IntoParallelIterator}; + +#[cfg(feature = "parallel")] +use rayon::iter::{IntoParallelIterator, IntoParallelRefIterator, ParallelIterator}; mod utils; From 0d8dc4514550067db61f7a3b408b2c4ef10e3ffc Mon Sep 17 00:00:00 2001 From: mmagician Date: Sat, 28 Oct 2023 18:10:26 +0200 Subject: [PATCH 18/75] remove Prepared data types from `PolynomialCommitment` trait --- src/linear_codes/data_structures.rs | 19 +------------------ src/linear_codes/mod.rs | 4 ---- 2 files changed, 1 insertion(+), 22 deletions(-) diff --git a/src/linear_codes/data_structures.rs b/src/linear_codes/data_structures.rs index 3da4a47b..8a6f91dd 100644 --- a/src/linear_codes/data_structures.rs +++ b/src/linear_codes/data_structures.rs @@ -1,6 +1,4 @@ -use crate::{ - PCCommitment, PCPreparedCommitment, PCPreparedVerifierKey, PCRandomness, PCVerifierKey, -}; +use crate::{PCCommitment, PCRandomness}; use ark_crypto_primitives::{ crh::CRHScheme, merkle_tree::{Config, LeafParam, Path, TwoToOneParam}, @@ -33,11 +31,6 @@ pub struct LigeroPCParams { pub(crate) col_hash_params: H::Parameters, } -pub(crate) type LinCodePCPreparedVerifierKey = (); - -impl PCPreparedVerifierKey for LinCodePCPreparedVerifierKey { - fn prepare(_vk: &Unprepared) -> Self {} -} #[derive(Derivative, CanonicalSerialize, CanonicalDeserialize)] #[derivative(Default(bound = ""), Clone(bound = ""), Debug(bound = ""))] pub(crate) struct Metadata { @@ -66,16 +59,6 @@ impl PCCommitment for LinCodePCCommitment { } } -pub(crate) type LinCodePCPreparedCommitment = LinCodePCCommitment; - -impl PCPreparedCommitment - for LinCodePCPreparedCommitment -{ - fn prepare(_cm: &Unprepared) -> Self { - LinCodePCPreparedCommitment::default() - } -} - pub(crate) type LinCodePCRandomness = (); impl PCRandomness for LinCodePCRandomness { diff --git a/src/linear_codes/mod.rs b/src/linear_codes/mod.rs index 47b65092..0ce9510c 100644 --- a/src/linear_codes/mod.rs +++ b/src/linear_codes/mod.rs @@ -168,12 +168,8 @@ where type VerifierKey = L::LinCodePCParams; - type PreparedVerifierKey = LinCodePCPreparedVerifierKey; - type Commitment = LinCodePCCommitment; - type PreparedCommitment = LinCodePCPreparedCommitment; - type Randomness = LinCodePCRandomness; type Proof = LPCPArray; From b0d825b7825cfc4294757665ee945f37f4bdf657 Mon Sep 17 00:00:00 2001 From: mmagician Date: Sat, 28 Oct 2023 18:10:26 +0200 Subject: [PATCH 19/75] remove Prepared data types from `PolynomialCommitment` trait --- src/linear_codes/data_structures.rs | 19 +------------------ src/linear_codes/mod.rs | 4 ---- 2 files changed, 1 insertion(+), 22 deletions(-) diff --git a/src/linear_codes/data_structures.rs b/src/linear_codes/data_structures.rs index c176be71..ecebcf8f 100644 --- a/src/linear_codes/data_structures.rs +++ b/src/linear_codes/data_structures.rs @@ -1,7 +1,5 @@ use super::utils::SprsMat; -use crate::{ - PCCommitment, PCPreparedCommitment, PCPreparedVerifierKey, PCRandomness, PCVerifierKey, -}; +use crate::{PCCommitment, PCRandomness}; use ark_crypto_primitives::{ crh::CRHScheme, merkle_tree::{Config, LeafParam, Path, TwoToOneParam}, @@ -62,11 +60,6 @@ pub struct BrakedownPCParams { pub(crate) col_hash_params: H::Parameters, } -pub(crate) type LinCodePCPreparedVerifierKey = (); - -impl PCPreparedVerifierKey for LinCodePCPreparedVerifierKey { - fn prepare(_vk: &Unprepared) -> Self {} -} #[derive(Derivative, CanonicalSerialize, CanonicalDeserialize)] #[derivative(Default(bound = ""), Clone(bound = ""), Debug(bound = ""))] pub(crate) struct Metadata { @@ -95,16 +88,6 @@ impl PCCommitment for LinCodePCCommitment { } } -pub(crate) type LinCodePCPreparedCommitment = LinCodePCCommitment; - -impl PCPreparedCommitment - for LinCodePCPreparedCommitment -{ - fn prepare(_cm: &Unprepared) -> Self { - LinCodePCPreparedCommitment::default() - } -} - pub(crate) type LinCodePCRandomness = (); impl PCRandomness for LinCodePCRandomness { diff --git a/src/linear_codes/mod.rs b/src/linear_codes/mod.rs index 5516f1a8..e6628dd8 100644 --- a/src/linear_codes/mod.rs +++ b/src/linear_codes/mod.rs @@ -169,12 +169,8 @@ where type VerifierKey = L::LinCodePCParams; - type PreparedVerifierKey = LinCodePCPreparedVerifierKey; - type Commitment = LinCodePCCommitment; - type PreparedCommitment = LinCodePCPreparedCommitment; - type Randomness = LinCodePCRandomness; type Proof = LPCPArray; From 06c59e98f324c43b1081423bde606355cdc4789a Mon Sep 17 00:00:00 2001 From: mmagician Date: Sat, 28 Oct 2023 18:14:38 +0200 Subject: [PATCH 20/75] Remove Prepared data types from `PolynomialCommitment` trait impl --- src/hyrax/data_structures.rs | 25 +------------------------ src/hyrax/mod.rs | 2 -- 2 files changed, 1 insertion(+), 26 deletions(-) diff --git a/src/hyrax/data_structures.rs b/src/hyrax/data_structures.rs index c1f91b42..57a068d7 100644 --- a/src/hyrax/data_structures.rs +++ b/src/hyrax/data_structures.rs @@ -3,10 +3,7 @@ use ark_ff::PrimeField; use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; use ark_std::{rand::RngCore, vec::Vec}; -use crate::{ - PCCommitment, PCCommitterKey, PCPreparedCommitment, PCPreparedVerifierKey, PCRandomness, - PCUniversalParams, PCVerifierKey, -}; +use crate::{PCCommitment, PCCommitterKey, PCRandomness, PCUniversalParams, PCVerifierKey}; /// `UniversalParams` amounts to a Pederson commitment key of sufficient length #[derive(Derivative, CanonicalSerialize, CanonicalDeserialize)] @@ -53,16 +50,6 @@ impl PCVerifierKey for HyraxVerifierKey { } } -/// Nothing to do to prepare this prover-verifier key. -pub type HyraxPreparedVerifierKey = HyraxVerifierKey; - -impl PCPreparedVerifierKey> for HyraxPreparedVerifierKey { - /// Simply clone the prover-verifier key - fn prepare(vk: &HyraxVerifierKey) -> Self { - vk.clone() - } -} - /// Hyrax commitment to a polynomial consisting of one multi-commit per row of /// the coefficient matrix #[derive(Derivative, CanonicalSerialize, CanonicalDeserialize)] @@ -88,16 +75,6 @@ impl PCCommitment for HyraxCommitment { } } -/// No preparation is needed for Hyrax commitments -pub type HyraxPreparedCommitment = HyraxCommitment; - -impl PCPreparedCommitment> for HyraxPreparedCommitment { - /// Simply clone the prover-verifier key - fn prepare(vk: &HyraxCommitment) -> Self { - vk.clone() - } -} - pub(crate) type HyraxRandomness = Vec; /// A vector of scalars, each of which multiplies the distinguished group diff --git a/src/hyrax/mod.rs b/src/hyrax/mod.rs index 96c2316b..887fa255 100644 --- a/src/hyrax/mod.rs +++ b/src/hyrax/mod.rs @@ -144,9 +144,7 @@ impl> type UniversalParams = HyraxUniversalParams; type CommitterKey = HyraxCommitterKey; type VerifierKey = HyraxVerifierKey; - type PreparedVerifierKey = HyraxPreparedVerifierKey; type Commitment = HyraxCommitment; - type PreparedCommitment = HyraxPreparedCommitment; type Randomness = HyraxRandomness; type Proof = Vec>; type BatchProof = Vec; From 12afb3307f5772832501a42f7b29823d875b79c5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Antonio=20Mej=C3=ADas=20Gil?= Date: Mon, 30 Oct 2023 16:00:01 +0100 Subject: [PATCH 21/75] added necessary dependencies overwritten by previous merge commit --- poly-commit/Cargo.toml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/poly-commit/Cargo.toml b/poly-commit/Cargo.toml index 19098ce0..30456feb 100644 --- a/poly-commit/Cargo.toml +++ b/poly-commit/Cargo.toml @@ -15,6 +15,8 @@ ark-ec = { version = "^0.4.0", default-features = false } ark-poly = {version = "^0.4.0", default-features = false } ark-crypto-primitives = {version = "^0.4.0", default-features = false, features = ["sponge", "merkle_tree"] } ark-std = { version = "^0.4.0", default-features = false } +blake2 = { version = "0.10", default-features = false } +merlin = { version = "3.0.0", default-features = false } ark-relations = { version = "^0.4.0", default-features = false, optional = true } ark-r1cs-std = { version = "^0.4.0", default-features = false, optional = true } @@ -38,7 +40,6 @@ harness = false ark-ed-on-bls12-381 = { version = "^0.4.0", default-features = false } ark-bls12-381 = { version = "^0.4.0", default-features = false, features = [ "curve" ] } ark-bls12-377 = { version = "^0.4.0", default-features = false, features = [ "curve" ] } -blake2 = { version = "0.10", default-features = false } rand_chacha = { version = "0.3.0", default-features = false } ark-pcs-bench-templates = { path = "../bench-templates" } From 5e955bc130edefcd211ab6cf39eaccd94c887f98 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Antonio=20Mej=C3=ADas=20Gil?= Date: Mon, 30 Oct 2023 16:37:54 +0100 Subject: [PATCH 22/75] fixed hashbrown version --- poly-commit/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/poly-commit/Cargo.toml b/poly-commit/Cargo.toml index b5cf2d9c..ae7b8fa9 100644 --- a/poly-commit/Cargo.toml +++ b/poly-commit/Cargo.toml @@ -18,7 +18,7 @@ ark-std = { version = "^0.4.0", default-features = false } ark-relations = { version = "^0.4.0", default-features = false, optional = true } ark-r1cs-std = { version = "^0.4.0", default-features = false, optional = true } -hashbrown = { version = "0.13", default-features = false, optional = true } +hashbrown = { version = "0.14", default-features = false, optional = true } digest = "0.10" derivative = { version = "2", features = [ "use_core" ] } From 3b0c371d6c15208656ee4a45efcb8b2b592bc2a9 Mon Sep 17 00:00:00 2001 From: mmagician Date: Mon, 30 Oct 2023 18:30:20 +0100 Subject: [PATCH 23/75] Add back the cfg dependency for no-std build --- poly-commit/Cargo.toml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/poly-commit/Cargo.toml b/poly-commit/Cargo.toml index ae7b8fa9..9f2fbbeb 100644 --- a/poly-commit/Cargo.toml +++ b/poly-commit/Cargo.toml @@ -43,6 +43,9 @@ blake2 = { version = "0.10", default-features = false } rand_chacha = { version = "0.3.0", default-features = false } ark-pcs-bench-templates = { path = "../bench-templates" } +[target.'cfg(target_arch = "aarch64")'.dependencies] +num-traits = { version = "0.2", default-features = false, features = ["libm"] } + [features] default = [ "std", "parallel" ] std = [ "ark-ff/std", "ark-ec/std", "ark-poly/std", "ark-std/std", "ark-relations/std", "ark-serialize/std", "ark-crypto-primitives/std"] From 6967c28c40464efa52afb93a2362a373fbdba520 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Antonio=20Mej=C3=ADas=20Gil?= Date: Mon, 30 Oct 2023 16:37:54 +0100 Subject: [PATCH 24/75] fixed hashbrown version --- poly-commit/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/poly-commit/Cargo.toml b/poly-commit/Cargo.toml index 30456feb..47cdd342 100644 --- a/poly-commit/Cargo.toml +++ b/poly-commit/Cargo.toml @@ -20,7 +20,7 @@ merlin = { version = "3.0.0", default-features = false } ark-relations = { version = "^0.4.0", default-features = false, optional = true } ark-r1cs-std = { version = "^0.4.0", default-features = false, optional = true } -hashbrown = { version = "0.13", default-features = false, optional = true } +hashbrown = { version = "0.14", default-features = false, optional = true } digest = "0.10" derivative = { version = "2", features = [ "use_core" ] } From dd82dbc09574ed197c730a81de933464d9a0119c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Antonio=20Mej=C3=ADas=20Gil?= Date: Mon, 30 Oct 2023 20:42:09 +0100 Subject: [PATCH 25/75] pulled --- poly-commit/Cargo.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/poly-commit/Cargo.toml b/poly-commit/Cargo.toml index 47cdd342..ba32e10f 100644 --- a/poly-commit/Cargo.toml +++ b/poly-commit/Cargo.toml @@ -25,6 +25,7 @@ hashbrown = { version = "0.14", default-features = false, optional = true } digest = "0.10" derivative = { version = "2", features = [ "use_core" ] } rayon = { version = "1", optional = true } +hashbrown = { version = "0.14", default-features = false, optional = true } [[bench]] name = "pcs" From a029081272b8b67d52713010fc9f3748a4fdd85f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Antonio=20Mej=C3=ADas=20Gil?= Date: Mon, 30 Oct 2023 20:42:38 +0100 Subject: [PATCH 26/75] created separate benchmark files --- poly-commit/Cargo.toml | 12 +++++++++ poly-commit/benches/hyrax_times.rs | 26 ++++++++++++++++++++ poly-commit/benches/{pcs.rs => ipa_times.rs} | 0 3 files changed, 38 insertions(+) create mode 100644 poly-commit/benches/hyrax_times.rs rename poly-commit/benches/{pcs.rs => ipa_times.rs} (100%) diff --git a/poly-commit/Cargo.toml b/poly-commit/Cargo.toml index ba32e10f..5668d8b2 100644 --- a/poly-commit/Cargo.toml +++ b/poly-commit/Cargo.toml @@ -27,6 +27,16 @@ derivative = { version = "2", features = [ "use_core" ] } rayon = { version = "1", optional = true } hashbrown = { version = "0.14", default-features = false, optional = true } +[[bench]] +name = "ipa_times" +path = "benches/ipa_times.rs" +harness = false + +[[bench]] +name = "hyrax_times" +path = "benches/hyrax_times.rs" +harness = false + [[bench]] name = "pcs" path = "benches/pcs.rs" @@ -41,6 +51,8 @@ harness = false ark-ed-on-bls12-381 = { version = "^0.4.0", default-features = false } ark-bls12-381 = { version = "^0.4.0", default-features = false, features = [ "curve" ] } ark-bls12-377 = { version = "^0.4.0", default-features = false, features = [ "curve" ] } +ark-bn254 = { version = "^0.4.0", default-features = false, features = [ "curve" ] } + rand_chacha = { version = "0.3.0", default-features = false } ark-pcs-bench-templates = { path = "../bench-templates" } diff --git a/poly-commit/benches/hyrax_times.rs b/poly-commit/benches/hyrax_times.rs new file mode 100644 index 00000000..ca4925c8 --- /dev/null +++ b/poly-commit/benches/hyrax_times.rs @@ -0,0 +1,26 @@ +use ark_ec::AffineRepr; +use ark_pcs_bench_templates::*; +use ark_poly::{DenseMultilinearExtension, MultilinearExtension}; + +use ark_crypto_primitives::sponge::poseidon::PoseidonSponge; +use ark_bn254::{Fr, G1Affine}; +use ark_ff::PrimeField; +use ark_poly::univariate::DensePolynomial as DenseUnivariatePoly; +use ark_poly_commit::ipa_pc::InnerProductArgPC; + +use rand_chacha::ChaCha20Rng; + +type UniPoly = DenseUnivariatePoly; +type Sponge = PoseidonSponge<::ScalarField>; + +// Hyrax PCS over BN254 +type Hyrax254 = HyraxPC>; + +fn rand_poly_hyrax(num_vars: usize, rng: &mut ChaCha20Rng) -> DenseMultilinearExtension { + DenseMultilinearExtension::rand(num_vars, rng) +} + +const MIN_NUM_VARS: usize = 10; +const MAX_NUM_VARS: usize = 20; + +bench!(Hyrax254, rand_poly_hyrax); diff --git a/poly-commit/benches/pcs.rs b/poly-commit/benches/ipa_times.rs similarity index 100% rename from poly-commit/benches/pcs.rs rename to poly-commit/benches/ipa_times.rs From a7f465a09b073581f18a05f0e1d5e8bac98aa9f8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Antonio=20Mej=C3=ADas=20Gil?= Date: Mon, 30 Oct 2023 20:44:02 +0100 Subject: [PATCH 27/75] fixed duplicate dependency to match other branches --- poly-commit/Cargo.toml | 1 - 1 file changed, 1 deletion(-) diff --git a/poly-commit/Cargo.toml b/poly-commit/Cargo.toml index 5668d8b2..088f56e8 100644 --- a/poly-commit/Cargo.toml +++ b/poly-commit/Cargo.toml @@ -20,7 +20,6 @@ merlin = { version = "3.0.0", default-features = false } ark-relations = { version = "^0.4.0", default-features = false, optional = true } ark-r1cs-std = { version = "^0.4.0", default-features = false, optional = true } -hashbrown = { version = "0.14", default-features = false, optional = true } digest = "0.10" derivative = { version = "2", features = [ "use_core" ] } From c010663c748260cba1075524cde84ae14f50f25d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Antonio=20Mej=C3=ADas=20Gil?= Date: Mon, 30 Oct 2023 21:24:09 +0100 Subject: [PATCH 28/75] patched bn254 dep --- Cargo.toml | 1 + poly-commit/benches/hyrax_times.rs | 5 +---- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index f76ff587..617fd8a1 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -38,3 +38,4 @@ ark-r1cs-std = { git = "https://github.com/arkworks-rs/r1cs-std/" } ark-bls12-377 = { git = "https://github.com/arkworks-rs/curves/" } ark-bls12-381 = { git = "https://github.com/arkworks-rs/curves/" } +ark-bn254 = { git = "https://github.com/arkworks-rs/curves/" } diff --git a/poly-commit/benches/hyrax_times.rs b/poly-commit/benches/hyrax_times.rs index ca4925c8..932d3a1a 100644 --- a/poly-commit/benches/hyrax_times.rs +++ b/poly-commit/benches/hyrax_times.rs @@ -1,17 +1,14 @@ -use ark_ec::AffineRepr; use ark_pcs_bench_templates::*; use ark_poly::{DenseMultilinearExtension, MultilinearExtension}; -use ark_crypto_primitives::sponge::poseidon::PoseidonSponge; use ark_bn254::{Fr, G1Affine}; use ark_ff::PrimeField; use ark_poly::univariate::DensePolynomial as DenseUnivariatePoly; -use ark_poly_commit::ipa_pc::InnerProductArgPC; +use ark_poly_commit::hyrax::HyraxPC; use rand_chacha::ChaCha20Rng; type UniPoly = DenseUnivariatePoly; -type Sponge = PoseidonSponge<::ScalarField>; // Hyrax PCS over BN254 type Hyrax254 = HyraxPC>; From d415053587a8bfe6475023f34a2666ac9d2f4479 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Antonio=20Mej=C3=ADas=20Gil?= Date: Tue, 31 Oct 2023 08:41:23 +0100 Subject: [PATCH 29/75] restructured benchmark macros to accept ML schemes; benches working --- bench-templates/src/lib.rs | 24 ++++++++++++++---------- poly-commit/Cargo.toml | 5 ----- poly-commit/benches/hyrax_times.rs | 14 +++++++++----- poly-commit/benches/ipa_times.rs | 6 +++++- 4 files changed, 28 insertions(+), 21 deletions(-) diff --git a/bench-templates/src/lib.rs b/bench-templates/src/lib.rs index 1594ee7c..31f78931 100644 --- a/bench-templates/src/lib.rs +++ b/bench-templates/src/lib.rs @@ -30,8 +30,10 @@ pub fn bench_pcs_method< &PCS::VerifierKey, usize, fn(usize, &mut ChaCha20Rng) -> P, + fn(usize, &mut ChaCha20Rng) -> P::Point, ) -> Duration, rand_poly: fn(usize, &mut ChaCha20Rng) -> P, + rand_point: fn(usize, &mut ChaCha20Rng) -> P::Point, ) { let mut group = c.benchmark_group(msg); let rng = &mut ChaCha20Rng::from_rng(test_rng()).unwrap(); @@ -44,7 +46,7 @@ pub fn bench_pcs_method< BenchmarkId::from_parameter(num_vars), &num_vars, |b, num_vars| { - b.iter(|| method(&ck, &vk, *num_vars, rand_poly)); + b.iter(|| method(&ck, &vk, *num_vars, rand_poly, rand_point)); }, ); } @@ -62,6 +64,7 @@ pub fn commit< _vk: &PCS::VerifierKey, num_vars: usize, rand_poly: fn(usize, &mut ChaCha20Rng) -> P, + _rand_point: fn(usize, &mut ChaCha20Rng) -> P::Point, ) -> Duration { let rng = &mut ChaCha20Rng::from_rng(test_rng()).unwrap(); @@ -102,12 +105,12 @@ pub fn open( _vk: &PCS::VerifierKey, num_vars: usize, rand_poly: fn(usize, &mut ChaCha20Rng) -> P, + rand_point: fn(usize, &mut ChaCha20Rng) -> P::Point, ) -> Duration where F: PrimeField, P: Polynomial, PCS: PolynomialCommitment>, - P::Point: UniformRand, { let rng = &mut ChaCha20Rng::from_rng(test_rng()).unwrap(); @@ -115,7 +118,7 @@ where LabeledPolynomial::new("test".to_string(), rand_poly(num_vars, rng), None, None); let (coms, randomness) = PCS::commit(&ck, [&labeled_poly], Some(rng)).unwrap(); - let point = P::Point::rand(rng); + let point = rand_point(num_vars, rng); let start = Instant::now(); let _ = PCS::open( @@ -173,12 +176,12 @@ pub fn verify( vk: &PCS::VerifierKey, num_vars: usize, rand_poly: fn(usize, &mut ChaCha20Rng) -> P, + rand_point: fn(usize, &mut ChaCha20Rng) -> P::Point, ) -> Duration where F: PrimeField, P: Polynomial, PCS: PolynomialCommitment>, - P::Point: UniformRand, { let rng = &mut ChaCha20Rng::from_rng(test_rng()).unwrap(); @@ -186,7 +189,7 @@ where LabeledPolynomial::new("test".to_string(), rand_poly(num_vars, rng), None, None); let (coms, randomness) = PCS::commit(&ck, [&labeled_poly], Some(rng)).unwrap(); - let point = P::Point::rand(rng); + let point = rand_point(num_vars, rng); let claimed_eval = labeled_poly.evaluate(&point); let proof = PCS::open( &ck, @@ -243,7 +246,7 @@ fn test_sponge() -> PoseidonSponge { #[macro_export] macro_rules! bench_method { - ($c:expr, $method:ident, $scheme_type:ty, $rand_poly:ident) => { + ($c:expr, $method:ident, $scheme_type:ty, $rand_poly:ident, $rand_point:ident) => { let scheme_type_str = stringify!($scheme_type); let bench_name = format!("{} {}", stringify!($method), scheme_type_str); bench_pcs_method::<_, _, $scheme_type>( @@ -252,6 +255,7 @@ macro_rules! bench_method { &bench_name, $method::<_, _, $scheme_type>, $rand_poly::<_>, + $rand_point::<_>, ); }; } @@ -259,12 +263,12 @@ macro_rules! bench_method { #[macro_export] macro_rules! bench { ( - $scheme_type:ty, $rand_poly:ident + $scheme_type:ty, $rand_poly:ident, $rand_point:ident ) => { fn bench_pcs(c: &mut Criterion) { - bench_method!(c, commit, $scheme_type, $rand_poly); - bench_method!(c, open, $scheme_type, $rand_poly); - bench_method!(c, verify, $scheme_type, $rand_poly); + bench_method!(c, commit, $scheme_type, $rand_poly, $rand_point); + bench_method!(c, open, $scheme_type, $rand_poly, $rand_point); + bench_method!(c, verify, $scheme_type, $rand_poly, $rand_point); } criterion_group!(benches, bench_pcs); diff --git a/poly-commit/Cargo.toml b/poly-commit/Cargo.toml index 088f56e8..1c2a6347 100644 --- a/poly-commit/Cargo.toml +++ b/poly-commit/Cargo.toml @@ -36,11 +36,6 @@ name = "hyrax_times" path = "benches/hyrax_times.rs" harness = false -[[bench]] -name = "pcs" -path = "benches/pcs.rs" -harness = false - [[bench]] name = "size" path = "benches/size.rs" diff --git a/poly-commit/benches/hyrax_times.rs b/poly-commit/benches/hyrax_times.rs index 932d3a1a..60c5a057 100644 --- a/poly-commit/benches/hyrax_times.rs +++ b/poly-commit/benches/hyrax_times.rs @@ -3,21 +3,25 @@ use ark_poly::{DenseMultilinearExtension, MultilinearExtension}; use ark_bn254::{Fr, G1Affine}; use ark_ff::PrimeField; -use ark_poly::univariate::DensePolynomial as DenseUnivariatePoly; use ark_poly_commit::hyrax::HyraxPC; use rand_chacha::ChaCha20Rng; -type UniPoly = DenseUnivariatePoly; - // Hyrax PCS over BN254 type Hyrax254 = HyraxPC>; -fn rand_poly_hyrax(num_vars: usize, rng: &mut ChaCha20Rng) -> DenseMultilinearExtension { +fn rand_poly_hyrax( + num_vars: usize, + rng: &mut ChaCha20Rng, +) -> DenseMultilinearExtension { DenseMultilinearExtension::rand(num_vars, rng) } +fn rand_point_hyrax(num_vars: usize, rng: &mut ChaCha20Rng) -> Vec { + (0..num_vars).map(|_| F::rand(rng)).collect() +} + const MIN_NUM_VARS: usize = 10; const MAX_NUM_VARS: usize = 20; -bench!(Hyrax254, rand_poly_hyrax); +bench!(Hyrax254, rand_poly_hyrax, rand_point_hyrax); diff --git a/poly-commit/benches/ipa_times.rs b/poly-commit/benches/ipa_times.rs index 77ab04f7..27b4d3ba 100644 --- a/poly-commit/benches/ipa_times.rs +++ b/poly-commit/benches/ipa_times.rs @@ -22,7 +22,11 @@ fn rand_poly_ipa_pc(degree: usize, rng: &mut ChaCha20Rng) -> Dens DenseUnivariatePoly::rand(degree, rng) } +fn rand_point_ipa_pc(_: usize, rng: &mut ChaCha20Rng) -> F { + F::rand(rng) +} + const MIN_NUM_VARS: usize = 10; const MAX_NUM_VARS: usize = 20; -bench!(IPA_JubJub, rand_poly_ipa_pc); +bench!(IPA_JubJub, rand_poly_ipa_pc, rand_point_ipa_pc); From acb65ec75b278de3d523a2e0a5e445e4fea2a09a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Antonio=20Mej=C3=ADas=20Gil?= Date: Tue, 31 Oct 2023 11:30:21 +0100 Subject: [PATCH 30/75] moved hashing structures to bench-templates crate, started ligero bench coding --- Cargo.toml | 3 +- bench-templates/src/lib.rs | 88 +++++++++++++++---- poly-commit/Cargo.toml | 9 +- poly-commit/benches/{pcs.rs => ipa_times.rs} | 6 +- poly-commit/src/linear_codes/mod.rs | 2 - .../linear_codes/multilinear_ligero/tests.rs | 4 +- .../linear_codes/univariate_ligero/tests.rs | 4 +- poly-commit/src/linear_codes/utils.rs | 64 -------------- 8 files changed, 93 insertions(+), 87 deletions(-) rename poly-commit/benches/{pcs.rs => ipa_times.rs} (85%) diff --git a/Cargo.toml b/Cargo.toml index 0b0fd81d..617fd8a1 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -30,11 +30,12 @@ debug = true [patch.crates-io] ark-ff = { git = "https://github.com/arkworks-rs/algebra/" } ark-ec = { git = "https://github.com/arkworks-rs/algebra/" } -ark-poly = { git = "https://github.com/arkworks-rs/algebra/" } ark-serialize = { git = "https://github.com/arkworks-rs/algebra/" } +ark-poly = { git = "https://github.com/arkworks-rs/algebra/" } ark-crypto-primitives = { git = "https://github.com/arkworks-rs/crypto-primitives" } ark-r1cs-std = { git = "https://github.com/arkworks-rs/r1cs-std/" } ark-bls12-377 = { git = "https://github.com/arkworks-rs/curves/" } ark-bls12-381 = { git = "https://github.com/arkworks-rs/curves/" } +ark-bn254 = { git = "https://github.com/arkworks-rs/curves/" } diff --git a/bench-templates/src/lib.rs b/bench-templates/src/lib.rs index 1594ee7c..38a1a7a2 100644 --- a/bench-templates/src/lib.rs +++ b/bench-templates/src/lib.rs @@ -1,17 +1,17 @@ -use ark_crypto_primitives::sponge::{ +use ark_crypto_primitives::{sponge::{ poseidon::{PoseidonConfig, PoseidonSponge}, CryptographicSponge, -}; +}, crh::{sha256::digest::Digest, CRHScheme}}; use ark_ff::PrimeField; use ark_poly::Polynomial; use ark_serialize::{CanonicalSerialize, Compress}; use ark_std::{test_rng, UniformRand}; -use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; +use rand_chacha::{rand_core::{SeedableRng, RngCore}, ChaCha20Rng}; use core::time::Duration; -use std::time::Instant; +use std::{time::Instant, marker::PhantomData, borrow::Borrow}; -use ark_poly_commit::{challenge::ChallengeGenerator, LabeledPolynomial, PolynomialCommitment}; +use ark_poly_commit::{challenge::ChallengeGenerator, LabeledPolynomial, PolynomialCommitment, to_bytes}; pub use criterion::*; pub use paste::paste; @@ -30,8 +30,10 @@ pub fn bench_pcs_method< &PCS::VerifierKey, usize, fn(usize, &mut ChaCha20Rng) -> P, + fn(usize, &mut ChaCha20Rng) -> P::Point, ) -> Duration, rand_poly: fn(usize, &mut ChaCha20Rng) -> P, + rand_point: fn(usize, &mut ChaCha20Rng) -> P::Point, ) { let mut group = c.benchmark_group(msg); let rng = &mut ChaCha20Rng::from_rng(test_rng()).unwrap(); @@ -44,7 +46,7 @@ pub fn bench_pcs_method< BenchmarkId::from_parameter(num_vars), &num_vars, |b, num_vars| { - b.iter(|| method(&ck, &vk, *num_vars, rand_poly)); + b.iter(|| method(&ck, &vk, *num_vars, rand_poly, rand_point)); }, ); } @@ -62,6 +64,7 @@ pub fn commit< _vk: &PCS::VerifierKey, num_vars: usize, rand_poly: fn(usize, &mut ChaCha20Rng) -> P, + _rand_point: fn(usize, &mut ChaCha20Rng) -> P::Point, ) -> Duration { let rng = &mut ChaCha20Rng::from_rng(test_rng()).unwrap(); @@ -102,12 +105,12 @@ pub fn open( _vk: &PCS::VerifierKey, num_vars: usize, rand_poly: fn(usize, &mut ChaCha20Rng) -> P, + rand_point: fn(usize, &mut ChaCha20Rng) -> P::Point, ) -> Duration where F: PrimeField, P: Polynomial, PCS: PolynomialCommitment>, - P::Point: UniformRand, { let rng = &mut ChaCha20Rng::from_rng(test_rng()).unwrap(); @@ -115,7 +118,7 @@ where LabeledPolynomial::new("test".to_string(), rand_poly(num_vars, rng), None, None); let (coms, randomness) = PCS::commit(&ck, [&labeled_poly], Some(rng)).unwrap(); - let point = P::Point::rand(rng); + let point = rand_point(num_vars, rng); let start = Instant::now(); let _ = PCS::open( @@ -173,12 +176,12 @@ pub fn verify( vk: &PCS::VerifierKey, num_vars: usize, rand_poly: fn(usize, &mut ChaCha20Rng) -> P, + rand_point: fn(usize, &mut ChaCha20Rng) -> P::Point, ) -> Duration where F: PrimeField, P: Polynomial, PCS: PolynomialCommitment>, - P::Point: UniformRand, { let rng = &mut ChaCha20Rng::from_rng(test_rng()).unwrap(); @@ -186,7 +189,7 @@ where LabeledPolynomial::new("test".to_string(), rand_poly(num_vars, rng), None, None); let (coms, randomness) = PCS::commit(&ck, [&labeled_poly], Some(rng)).unwrap(); - let point = P::Point::rand(rng); + let point = rand_point(num_vars, rng); let claimed_eval = labeled_poly.evaluate(&point); let proof = PCS::open( &ck, @@ -243,7 +246,7 @@ fn test_sponge() -> PoseidonSponge { #[macro_export] macro_rules! bench_method { - ($c:expr, $method:ident, $scheme_type:ty, $rand_poly:ident) => { + ($c:expr, $method:ident, $scheme_type:ty, $rand_poly:ident, $rand_point:ident) => { let scheme_type_str = stringify!($scheme_type); let bench_name = format!("{} {}", stringify!($method), scheme_type_str); bench_pcs_method::<_, _, $scheme_type>( @@ -252,6 +255,7 @@ macro_rules! bench_method { &bench_name, $method::<_, _, $scheme_type>, $rand_poly::<_>, + $rand_point::<_>, ); }; } @@ -259,12 +263,12 @@ macro_rules! bench_method { #[macro_export] macro_rules! bench { ( - $scheme_type:ty, $rand_poly:ident + $scheme_type:ty, $rand_poly:ident, $rand_point:ident ) => { fn bench_pcs(c: &mut Criterion) { - bench_method!(c, commit, $scheme_type, $rand_poly); - bench_method!(c, open, $scheme_type, $rand_poly); - bench_method!(c, verify, $scheme_type, $rand_poly); + bench_method!(c, commit, $scheme_type, $rand_poly, $rand_point); + bench_method!(c, open, $scheme_type, $rand_poly, $rand_point); + bench_method!(c, verify, $scheme_type, $rand_poly, $rand_point); } criterion_group!(benches, bench_pcs); @@ -276,3 +280,57 @@ macro_rules! bench { } }; } + +/**** Auxiliary methods for linear-code-based PCSs ****/ + +/// Needed for benches and tests. +pub struct LeafIdentityHasher; + +impl CRHScheme for LeafIdentityHasher { + type Input = Vec; + type Output = Vec; + type Parameters = (); + + fn setup(_: &mut R) -> Result { + Ok(()) + } + + fn evaluate>( + _: &Self::Parameters, + input: T, + ) -> Result { + Ok(input.borrow().to_vec().into()) + } +} + +/// Needed for benches and tests. +pub struct FieldToBytesColHasher +where + F: PrimeField + CanonicalSerialize, + D: Digest, +{ + _phantom: PhantomData<(F, D)>, +} + +impl CRHScheme for FieldToBytesColHasher +where + F: PrimeField + CanonicalSerialize, + D: Digest, +{ + type Input = Vec; + type Output = Vec; + type Parameters = (); + + fn setup(_rng: &mut R) -> Result { + Ok(()) + } + + fn evaluate>( + _parameters: &Self::Parameters, + input: T, + ) -> Result { + let mut dig = D::new(); + dig.update(to_bytes!(input.borrow()).unwrap()); + Ok(dig.finalize().to_vec()) + } +} diff --git a/poly-commit/Cargo.toml b/poly-commit/Cargo.toml index 9f2fbbeb..cdcf87e8 100644 --- a/poly-commit/Cargo.toml +++ b/poly-commit/Cargo.toml @@ -26,8 +26,13 @@ rayon = { version = "1", optional = true } merlin = { version = "3.0.0", default-features = false } [[bench]] -name = "pcs" -path = "benches/pcs.rs" +name = "ipa_times" +path = "benches/ipa_times.rs" +harness = false + +[[bench]] +name = "ligero_ml_times" +path = "benches/ligero_ml_times.rs" harness = false [[bench]] diff --git a/poly-commit/benches/pcs.rs b/poly-commit/benches/ipa_times.rs similarity index 85% rename from poly-commit/benches/pcs.rs rename to poly-commit/benches/ipa_times.rs index 77ab04f7..27b4d3ba 100644 --- a/poly-commit/benches/pcs.rs +++ b/poly-commit/benches/ipa_times.rs @@ -22,7 +22,11 @@ fn rand_poly_ipa_pc(degree: usize, rng: &mut ChaCha20Rng) -> Dens DenseUnivariatePoly::rand(degree, rng) } +fn rand_point_ipa_pc(_: usize, rng: &mut ChaCha20Rng) -> F { + F::rand(rng) +} + const MIN_NUM_VARS: usize = 10; const MAX_NUM_VARS: usize = 20; -bench!(IPA_JubJub, rand_poly_ipa_pc); +bench!(IPA_JubJub, rand_poly_ipa_pc, rand_point_ipa_pc); diff --git a/poly-commit/src/linear_codes/mod.rs b/poly-commit/src/linear_codes/mod.rs index 0ce9510c..8e21c83a 100644 --- a/poly-commit/src/linear_codes/mod.rs +++ b/poly-commit/src/linear_codes/mod.rs @@ -31,8 +31,6 @@ mod ligero; use data_structures::*; pub use data_structures::{LigeroPCParams, LinCodePCProof}; -#[cfg(any(feature = "benches", test))] -pub use utils::{FieldToBytesColHasher, LeafIdentityHasher}; use utils::{calculate_t, get_indices_from_transcript, hash_column}; diff --git a/poly-commit/src/linear_codes/multilinear_ligero/tests.rs b/poly-commit/src/linear_codes/multilinear_ligero/tests.rs index 9df67eee..4e5c99c1 100644 --- a/poly-commit/src/linear_codes/multilinear_ligero/tests.rs +++ b/poly-commit/src/linear_codes/multilinear_ligero/tests.rs @@ -5,7 +5,7 @@ mod tests { use crate::utils::test_sponge; use crate::{ challenge::ChallengeGenerator, - linear_codes::{utils::*, LigeroPCParams, MultilinearLigero, PolynomialCommitment}, + linear_codes::{LigeroPCParams, MultilinearLigero, PolynomialCommitment}, LabeledPolynomial, }; use ark_bls12_377::Fr; @@ -21,6 +21,8 @@ mod tests { use blake2::Blake2s256; use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; + use ark_pcs_bench_templates::{LeafIdentityHasher, FieldToBytesColHasher}; + type LeafH = LeafIdentityHasher; type CompressH = Sha256; type ColHasher = FieldToBytesColHasher; diff --git a/poly-commit/src/linear_codes/univariate_ligero/tests.rs b/poly-commit/src/linear_codes/univariate_ligero/tests.rs index 6cee8f5a..45f441d8 100644 --- a/poly-commit/src/linear_codes/univariate_ligero/tests.rs +++ b/poly-commit/src/linear_codes/univariate_ligero/tests.rs @@ -6,7 +6,7 @@ mod tests { use crate::utils::test_sponge; use crate::{ challenge::ChallengeGenerator, - linear_codes::{utils::*, LigeroPCParams, PolynomialCommitment, UnivariateLigero}, + linear_codes::{LigeroPCParams, PolynomialCommitment, UnivariateLigero}, LabeledPolynomial, }; use ark_bls12_377::Fr; @@ -22,6 +22,8 @@ mod tests { use blake2::Blake2s256; use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; + use ark_pcs_bench_templates::{LeafIdentityHasher, FieldToBytesColHasher}; + type LeafH = LeafIdentityHasher; type CompressH = Sha256; type ColHasher = FieldToBytesColHasher; diff --git a/poly-commit/src/linear_codes/utils.rs b/poly-commit/src/linear_codes/utils.rs index 472dfc94..9e43221f 100644 --- a/poly-commit/src/linear_codes/utils.rs +++ b/poly-commit/src/linear_codes/utils.rs @@ -13,14 +13,6 @@ use ark_std::vec::Vec; #[cfg(not(feature = "std"))] use num_traits::Float; -#[cfg(any(feature = "benches", test))] -use { - crate::to_bytes, - ark_serialize::CanonicalSerialize, - ark_std::{marker::PhantomData, rand::RngCore}, - digest::Digest, -}; - /// Apply reed-solomon encoding to msg. /// Assumes msg.len() is equal to the order of some FFT domain in F. /// Returns a vector of length equal to the smallest FFT domain of size at least msg.len() * RHO_INV. @@ -115,62 +107,6 @@ pub(crate) fn calculate_t( Ok(if t < codeword_len { t } else { codeword_len }) } -/// Only needed for benches and tests. -#[cfg(any(feature = "benches", test))] -pub struct LeafIdentityHasher; - -#[cfg(any(feature = "benches", test))] -impl CRHScheme for LeafIdentityHasher { - type Input = Vec; - type Output = Vec; - type Parameters = (); - - fn setup(_: &mut R) -> Result { - Ok(()) - } - - fn evaluate>( - _: &Self::Parameters, - input: T, - ) -> Result { - Ok(input.borrow().to_vec().into()) - } -} - -/// Only needed for benches and tests. -#[cfg(any(feature = "benches", test))] -pub struct FieldToBytesColHasher -where - F: PrimeField + CanonicalSerialize, - D: Digest, -{ - _phantom: PhantomData<(F, D)>, -} - -#[cfg(any(feature = "benches", test))] -impl CRHScheme for FieldToBytesColHasher -where - F: PrimeField + CanonicalSerialize, - D: Digest, -{ - type Input = Vec; - type Output = Vec; - type Parameters = (); - - fn setup(_rng: &mut R) -> Result { - Ok(()) - } - - fn evaluate>( - _parameters: &Self::Parameters, - input: T, - ) -> Result { - let mut dig = D::new(); - dig.update(to_bytes!(input.borrow()).unwrap()); - Ok(dig.finalize().to_vec()) - } -} - pub(crate) fn tensor_vec(values: &[F]) -> Vec { let one = F::one(); let anti_values: Vec = values.iter().map(|v| one - *v).collect(); From 80410d0587cf4181e1c473dcb0527c2721548dd4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Antonio=20Mej=C3=ADas=20Gil?= Date: Tue, 31 Oct 2023 11:42:17 +0100 Subject: [PATCH 31/75] completed ligero benchmarks --- bench-templates/src/lib.rs | 22 +++++++++++++------ poly-commit/Cargo.toml | 1 + .../linear_codes/multilinear_ligero/tests.rs | 2 +- .../linear_codes/univariate_ligero/tests.rs | 2 +- 4 files changed, 18 insertions(+), 9 deletions(-) diff --git a/bench-templates/src/lib.rs b/bench-templates/src/lib.rs index 38a1a7a2..92dbd07e 100644 --- a/bench-templates/src/lib.rs +++ b/bench-templates/src/lib.rs @@ -1,17 +1,25 @@ -use ark_crypto_primitives::{sponge::{ - poseidon::{PoseidonConfig, PoseidonSponge}, - CryptographicSponge, -}, crh::{sha256::digest::Digest, CRHScheme}}; +use ark_crypto_primitives::{ + crh::{sha256::digest::Digest, CRHScheme}, + sponge::{ + poseidon::{PoseidonConfig, PoseidonSponge}, + CryptographicSponge, + }, +}; use ark_ff::PrimeField; use ark_poly::Polynomial; use ark_serialize::{CanonicalSerialize, Compress}; use ark_std::{test_rng, UniformRand}; -use rand_chacha::{rand_core::{SeedableRng, RngCore}, ChaCha20Rng}; +use rand_chacha::{ + rand_core::{RngCore, SeedableRng}, + ChaCha20Rng, +}; use core::time::Duration; -use std::{time::Instant, marker::PhantomData, borrow::Borrow}; +use std::{borrow::Borrow, marker::PhantomData, time::Instant}; -use ark_poly_commit::{challenge::ChallengeGenerator, LabeledPolynomial, PolynomialCommitment, to_bytes}; +use ark_poly_commit::{ + challenge::ChallengeGenerator, to_bytes, LabeledPolynomial, PolynomialCommitment, +}; pub use criterion::*; pub use paste::paste; diff --git a/poly-commit/Cargo.toml b/poly-commit/Cargo.toml index cdcf87e8..59740e07 100644 --- a/poly-commit/Cargo.toml +++ b/poly-commit/Cargo.toml @@ -44,6 +44,7 @@ harness = false ark-ed-on-bls12-381 = { version = "^0.4.0", default-features = false } ark-bls12-381 = { version = "^0.4.0", default-features = false, features = [ "curve" ] } ark-bls12-377 = { version = "^0.4.0", default-features = false, features = [ "curve" ] } +ark-bn254 = { version = "^0.4.0", default-features = false, features = [ "curve" ] } blake2 = { version = "0.10", default-features = false } rand_chacha = { version = "0.3.0", default-features = false } ark-pcs-bench-templates = { path = "../bench-templates" } diff --git a/poly-commit/src/linear_codes/multilinear_ligero/tests.rs b/poly-commit/src/linear_codes/multilinear_ligero/tests.rs index 4e5c99c1..2f91c402 100644 --- a/poly-commit/src/linear_codes/multilinear_ligero/tests.rs +++ b/poly-commit/src/linear_codes/multilinear_ligero/tests.rs @@ -21,7 +21,7 @@ mod tests { use blake2::Blake2s256; use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; - use ark_pcs_bench_templates::{LeafIdentityHasher, FieldToBytesColHasher}; + use ark_pcs_bench_templates::{FieldToBytesColHasher, LeafIdentityHasher}; type LeafH = LeafIdentityHasher; type CompressH = Sha256; diff --git a/poly-commit/src/linear_codes/univariate_ligero/tests.rs b/poly-commit/src/linear_codes/univariate_ligero/tests.rs index 45f441d8..da0ec6fb 100644 --- a/poly-commit/src/linear_codes/univariate_ligero/tests.rs +++ b/poly-commit/src/linear_codes/univariate_ligero/tests.rs @@ -22,7 +22,7 @@ mod tests { use blake2::Blake2s256; use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; - use ark_pcs_bench_templates::{LeafIdentityHasher, FieldToBytesColHasher}; + use ark_pcs_bench_templates::{FieldToBytesColHasher, LeafIdentityHasher}; type LeafH = LeafIdentityHasher; type CompressH = Sha256; From 25c34aa56f46dbe8543cb5efe3131d7457e411b6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Antonio=20Mej=C3=ADas=20Gil?= Date: Tue, 31 Oct 2023 12:07:26 +0100 Subject: [PATCH 32/75] added ligero benchmark file --- poly-commit/benches/ligero_ml_times.rs | 58 ++++++++++++++++++++++++++ 1 file changed, 58 insertions(+) create mode 100644 poly-commit/benches/ligero_ml_times.rs diff --git a/poly-commit/benches/ligero_ml_times.rs b/poly-commit/benches/ligero_ml_times.rs new file mode 100644 index 00000000..c59b688a --- /dev/null +++ b/poly-commit/benches/ligero_ml_times.rs @@ -0,0 +1,58 @@ +use ark_crypto_primitives::{ + crh::{sha256::Sha256, CRHScheme, TwoToOneCRHScheme}, + merkle_tree::{ByteDigestConverter, Config}, + sponge::poseidon::PoseidonSponge, +}; +use ark_pcs_bench_templates::*; +use ark_poly::{DenseMultilinearExtension, MultilinearExtension}; + +use ark_bn254::Fr; +use ark_ff::PrimeField; + +use ark_poly_commit::linear_codes::{LinearCodePCS, MultilinearLigero}; +use blake2::Blake2s256; +use rand_chacha::ChaCha20Rng; + +// Ligero PCS over BN254 +struct MerkleTreeParams; +type LeafH = LeafIdentityHasher; +type CompressH = Sha256; +impl Config for MerkleTreeParams { + type Leaf = Vec; + + type LeafDigest = ::Output; + type LeafInnerDigestConverter = ByteDigestConverter; + type InnerDigest = ::Output; + + type LeafHash = LeafH; + type TwoToOneHash = CompressH; +} + +pub type MLE = DenseMultilinearExtension; +type MTConfig = MerkleTreeParams; +type Sponge = PoseidonSponge; +type ColHasher = FieldToBytesColHasher; +type Ligero = LinearCodePCS< + MultilinearLigero, MLE, ColHasher>, + F, + MLE, + Sponge, + MTConfig, + ColHasher, +>; + +fn rand_poly_ligero_ml( + num_vars: usize, + rng: &mut ChaCha20Rng, +) -> DenseMultilinearExtension { + DenseMultilinearExtension::rand(num_vars, rng) +} + +fn rand_point_ligero_ml(num_vars: usize, rng: &mut ChaCha20Rng) -> Vec { + (0..num_vars).map(|_| F::rand(rng)).collect() +} + +const MIN_NUM_VARS: usize = 10; +const MAX_NUM_VARS: usize = 20; + +bench!(Ligero, rand_poly_ligero_ml, rand_point_ligero_ml); From 5bbc51980c8457eadeb821b1e63bcef3d637194f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Antonio=20Mej=C3=ADas=20Gil?= Date: Tue, 31 Oct 2023 14:34:33 +0100 Subject: [PATCH 33/75] adapted to new crate structure and created benchmark for ML brakedown --- Cargo.toml | 9 +- bench-templates/src/lib.rs | 98 ++++++++++++++++---- poly-commit/Cargo.toml | 16 +++- poly-commit/benches/brakedown_ml_times.rs | 62 +++++++++++++ poly-commit/benches/{pcs.rs => ipa_times.rs} | 6 +- 5 files changed, 167 insertions(+), 24 deletions(-) create mode 100644 poly-commit/benches/brakedown_ml_times.rs rename poly-commit/benches/{pcs.rs => ipa_times.rs} (85%) diff --git a/Cargo.toml b/Cargo.toml index 1b3fc0de..617fd8a1 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -28,13 +28,14 @@ incremental = true debug = true [patch.crates-io] -ark-ff = { git = "https://github.com/HungryCatsStudio/algebra", branch = "ml-is-poly-vec"} -ark-ec = { git = "https://github.com/HungryCatsStudio/algebra", branch = "ml-is-poly-vec"} -ark-poly = { git = "https://github.com/HungryCatsStudio/algebra", branch = "ml-is-poly-vec"} -ark-serialize = { git = "https://github.com/HungryCatsStudio/algebra", branch = "ml-is-poly-vec"} +ark-ff = { git = "https://github.com/arkworks-rs/algebra/" } +ark-ec = { git = "https://github.com/arkworks-rs/algebra/" } +ark-serialize = { git = "https://github.com/arkworks-rs/algebra/" } +ark-poly = { git = "https://github.com/arkworks-rs/algebra/" } ark-crypto-primitives = { git = "https://github.com/arkworks-rs/crypto-primitives" } ark-r1cs-std = { git = "https://github.com/arkworks-rs/r1cs-std/" } ark-bls12-377 = { git = "https://github.com/arkworks-rs/curves/" } ark-bls12-381 = { git = "https://github.com/arkworks-rs/curves/" } +ark-bn254 = { git = "https://github.com/arkworks-rs/curves/" } diff --git a/bench-templates/src/lib.rs b/bench-templates/src/lib.rs index 1594ee7c..92dbd07e 100644 --- a/bench-templates/src/lib.rs +++ b/bench-templates/src/lib.rs @@ -1,17 +1,25 @@ -use ark_crypto_primitives::sponge::{ - poseidon::{PoseidonConfig, PoseidonSponge}, - CryptographicSponge, +use ark_crypto_primitives::{ + crh::{sha256::digest::Digest, CRHScheme}, + sponge::{ + poseidon::{PoseidonConfig, PoseidonSponge}, + CryptographicSponge, + }, }; use ark_ff::PrimeField; use ark_poly::Polynomial; use ark_serialize::{CanonicalSerialize, Compress}; use ark_std::{test_rng, UniformRand}; -use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; +use rand_chacha::{ + rand_core::{RngCore, SeedableRng}, + ChaCha20Rng, +}; use core::time::Duration; -use std::time::Instant; +use std::{borrow::Borrow, marker::PhantomData, time::Instant}; -use ark_poly_commit::{challenge::ChallengeGenerator, LabeledPolynomial, PolynomialCommitment}; +use ark_poly_commit::{ + challenge::ChallengeGenerator, to_bytes, LabeledPolynomial, PolynomialCommitment, +}; pub use criterion::*; pub use paste::paste; @@ -30,8 +38,10 @@ pub fn bench_pcs_method< &PCS::VerifierKey, usize, fn(usize, &mut ChaCha20Rng) -> P, + fn(usize, &mut ChaCha20Rng) -> P::Point, ) -> Duration, rand_poly: fn(usize, &mut ChaCha20Rng) -> P, + rand_point: fn(usize, &mut ChaCha20Rng) -> P::Point, ) { let mut group = c.benchmark_group(msg); let rng = &mut ChaCha20Rng::from_rng(test_rng()).unwrap(); @@ -44,7 +54,7 @@ pub fn bench_pcs_method< BenchmarkId::from_parameter(num_vars), &num_vars, |b, num_vars| { - b.iter(|| method(&ck, &vk, *num_vars, rand_poly)); + b.iter(|| method(&ck, &vk, *num_vars, rand_poly, rand_point)); }, ); } @@ -62,6 +72,7 @@ pub fn commit< _vk: &PCS::VerifierKey, num_vars: usize, rand_poly: fn(usize, &mut ChaCha20Rng) -> P, + _rand_point: fn(usize, &mut ChaCha20Rng) -> P::Point, ) -> Duration { let rng = &mut ChaCha20Rng::from_rng(test_rng()).unwrap(); @@ -102,12 +113,12 @@ pub fn open( _vk: &PCS::VerifierKey, num_vars: usize, rand_poly: fn(usize, &mut ChaCha20Rng) -> P, + rand_point: fn(usize, &mut ChaCha20Rng) -> P::Point, ) -> Duration where F: PrimeField, P: Polynomial, PCS: PolynomialCommitment>, - P::Point: UniformRand, { let rng = &mut ChaCha20Rng::from_rng(test_rng()).unwrap(); @@ -115,7 +126,7 @@ where LabeledPolynomial::new("test".to_string(), rand_poly(num_vars, rng), None, None); let (coms, randomness) = PCS::commit(&ck, [&labeled_poly], Some(rng)).unwrap(); - let point = P::Point::rand(rng); + let point = rand_point(num_vars, rng); let start = Instant::now(); let _ = PCS::open( @@ -173,12 +184,12 @@ pub fn verify( vk: &PCS::VerifierKey, num_vars: usize, rand_poly: fn(usize, &mut ChaCha20Rng) -> P, + rand_point: fn(usize, &mut ChaCha20Rng) -> P::Point, ) -> Duration where F: PrimeField, P: Polynomial, PCS: PolynomialCommitment>, - P::Point: UniformRand, { let rng = &mut ChaCha20Rng::from_rng(test_rng()).unwrap(); @@ -186,7 +197,7 @@ where LabeledPolynomial::new("test".to_string(), rand_poly(num_vars, rng), None, None); let (coms, randomness) = PCS::commit(&ck, [&labeled_poly], Some(rng)).unwrap(); - let point = P::Point::rand(rng); + let point = rand_point(num_vars, rng); let claimed_eval = labeled_poly.evaluate(&point); let proof = PCS::open( &ck, @@ -243,7 +254,7 @@ fn test_sponge() -> PoseidonSponge { #[macro_export] macro_rules! bench_method { - ($c:expr, $method:ident, $scheme_type:ty, $rand_poly:ident) => { + ($c:expr, $method:ident, $scheme_type:ty, $rand_poly:ident, $rand_point:ident) => { let scheme_type_str = stringify!($scheme_type); let bench_name = format!("{} {}", stringify!($method), scheme_type_str); bench_pcs_method::<_, _, $scheme_type>( @@ -252,6 +263,7 @@ macro_rules! bench_method { &bench_name, $method::<_, _, $scheme_type>, $rand_poly::<_>, + $rand_point::<_>, ); }; } @@ -259,12 +271,12 @@ macro_rules! bench_method { #[macro_export] macro_rules! bench { ( - $scheme_type:ty, $rand_poly:ident + $scheme_type:ty, $rand_poly:ident, $rand_point:ident ) => { fn bench_pcs(c: &mut Criterion) { - bench_method!(c, commit, $scheme_type, $rand_poly); - bench_method!(c, open, $scheme_type, $rand_poly); - bench_method!(c, verify, $scheme_type, $rand_poly); + bench_method!(c, commit, $scheme_type, $rand_poly, $rand_point); + bench_method!(c, open, $scheme_type, $rand_poly, $rand_point); + bench_method!(c, verify, $scheme_type, $rand_poly, $rand_point); } criterion_group!(benches, bench_pcs); @@ -276,3 +288,57 @@ macro_rules! bench { } }; } + +/**** Auxiliary methods for linear-code-based PCSs ****/ + +/// Needed for benches and tests. +pub struct LeafIdentityHasher; + +impl CRHScheme for LeafIdentityHasher { + type Input = Vec; + type Output = Vec; + type Parameters = (); + + fn setup(_: &mut R) -> Result { + Ok(()) + } + + fn evaluate>( + _: &Self::Parameters, + input: T, + ) -> Result { + Ok(input.borrow().to_vec().into()) + } +} + +/// Needed for benches and tests. +pub struct FieldToBytesColHasher +where + F: PrimeField + CanonicalSerialize, + D: Digest, +{ + _phantom: PhantomData<(F, D)>, +} + +impl CRHScheme for FieldToBytesColHasher +where + F: PrimeField + CanonicalSerialize, + D: Digest, +{ + type Input = Vec; + type Output = Vec; + type Parameters = (); + + fn setup(_rng: &mut R) -> Result { + Ok(()) + } + + fn evaluate>( + _parameters: &Self::Parameters, + input: T, + ) -> Result { + let mut dig = D::new(); + dig.update(to_bytes!(input.borrow()).unwrap()); + Ok(dig.finalize().to_vec()) + } +} diff --git a/poly-commit/Cargo.toml b/poly-commit/Cargo.toml index 19098ce0..16bfd95a 100644 --- a/poly-commit/Cargo.toml +++ b/poly-commit/Cargo.toml @@ -18,15 +18,21 @@ ark-std = { version = "^0.4.0", default-features = false } ark-relations = { version = "^0.4.0", default-features = false, optional = true } ark-r1cs-std = { version = "^0.4.0", default-features = false, optional = true } -hashbrown = { version = "0.13", default-features = false, optional = true } +hashbrown = { version = "0.14", default-features = false, optional = true } digest = "0.10" derivative = { version = "2", features = [ "use_core" ] } rayon = { version = "1", optional = true } +merlin = { version = "3.0.0", default-features = false } [[bench]] -name = "pcs" -path = "benches/pcs.rs" +name = "ipa_times" +path = "benches/ipa_times.rs" +harness = false + +[[bench]] +name = "brakedown_times" +path = "benches/brakedown_ml_times.rs" harness = false [[bench]] @@ -38,10 +44,14 @@ harness = false ark-ed-on-bls12-381 = { version = "^0.4.0", default-features = false } ark-bls12-381 = { version = "^0.4.0", default-features = false, features = [ "curve" ] } ark-bls12-377 = { version = "^0.4.0", default-features = false, features = [ "curve" ] } +ark-bn254 = { version = "^0.4.0", default-features = false, features = [ "curve" ] } blake2 = { version = "0.10", default-features = false } rand_chacha = { version = "0.3.0", default-features = false } ark-pcs-bench-templates = { path = "../bench-templates" } +[target.'cfg(target_arch = "aarch64")'.dependencies] +num-traits = { version = "0.2", default-features = false, features = ["libm"] } + [features] default = [ "std", "parallel" ] std = [ "ark-ff/std", "ark-ec/std", "ark-poly/std", "ark-std/std", "ark-relations/std", "ark-serialize/std", "ark-crypto-primitives/std"] diff --git a/poly-commit/benches/brakedown_ml_times.rs b/poly-commit/benches/brakedown_ml_times.rs new file mode 100644 index 00000000..860d0a99 --- /dev/null +++ b/poly-commit/benches/brakedown_ml_times.rs @@ -0,0 +1,62 @@ +use ark_crypto_primitives::{ + crh::{sha256::Sha256, CRHScheme, TwoToOneCRHScheme}, + merkle_tree::{ByteDigestConverter, Config}, + sponge::poseidon::PoseidonSponge, +}; +use ark_pcs_bench_templates::*; +use ark_poly::{DenseMultilinearExtension, MultilinearExtension}; + +use ark_bn254::Fr; +use ark_ff::PrimeField; + +use ark_poly_commit::linear_codes::{LinearCodePCS, MultilinearBrakedown}; +use blake2::Blake2s256; +use rand_chacha::ChaCha20Rng; + +// Brakedown PCS over BN254 +struct MerkleTreeParams; +type LeafH = LeafIdentityHasher; +type CompressH = Sha256; +impl Config for MerkleTreeParams { + type Leaf = Vec; + + type LeafDigest = ::Output; + type LeafInnerDigestConverter = ByteDigestConverter; + type InnerDigest = ::Output; + + type LeafHash = LeafH; + type TwoToOneHash = CompressH; +} + +pub type MLE = DenseMultilinearExtension; +type MTConfig = MerkleTreeParams; +type Sponge = PoseidonSponge; +type ColHasher = FieldToBytesColHasher; +type Brakedown = LinearCodePCS< + MultilinearBrakedown, MLE, ColHasher>, + F, + MLE, + Sponge, + MTConfig, + ColHasher, +>; + +fn rand_poly_brakedown_ml( + num_vars: usize, + rng: &mut ChaCha20Rng, +) -> DenseMultilinearExtension { + DenseMultilinearExtension::rand(num_vars, rng) +} + +fn rand_point_brakedown_ml(num_vars: usize, rng: &mut ChaCha20Rng) -> Vec { + (0..num_vars).map(|_| F::rand(rng)).collect() +} + +const MIN_NUM_VARS: usize = 10; +const MAX_NUM_VARS: usize = 20; + +bench!( + Brakedown, + rand_poly_brakedown_ml, + rand_point_brakedown_ml +); diff --git a/poly-commit/benches/pcs.rs b/poly-commit/benches/ipa_times.rs similarity index 85% rename from poly-commit/benches/pcs.rs rename to poly-commit/benches/ipa_times.rs index 77ab04f7..27b4d3ba 100644 --- a/poly-commit/benches/pcs.rs +++ b/poly-commit/benches/ipa_times.rs @@ -22,7 +22,11 @@ fn rand_poly_ipa_pc(degree: usize, rng: &mut ChaCha20Rng) -> Dens DenseUnivariatePoly::rand(degree, rng) } +fn rand_point_ipa_pc(_: usize, rng: &mut ChaCha20Rng) -> F { + F::rand(rng) +} + const MIN_NUM_VARS: usize = 10; const MAX_NUM_VARS: usize = 20; -bench!(IPA_JubJub, rand_poly_ipa_pc); +bench!(IPA_JubJub, rand_poly_ipa_pc, rand_point_ipa_pc); From 3ef730907c26857c9ec6742bbf7bf379b12c4b98 Mon Sep 17 00:00:00 2001 From: mmagician Date: Mon, 13 Nov 2023 10:40:33 +0100 Subject: [PATCH 34/75] Hyrax fix bench (#42) * fix bench call * set num vars from 12-20 --- bench-templates/src/lib.rs | 8 +++++++- poly-commit/benches/hyrax_times.rs | 4 ++-- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/bench-templates/src/lib.rs b/bench-templates/src/lib.rs index 31f78931..552f5924 100644 --- a/bench-templates/src/lib.rs +++ b/bench-templates/src/lib.rs @@ -46,7 +46,13 @@ pub fn bench_pcs_method< BenchmarkId::from_parameter(num_vars), &num_vars, |b, num_vars| { - b.iter(|| method(&ck, &vk, *num_vars, rand_poly, rand_point)); + b.iter_custom(|i| { + let mut time = Duration::from_nanos(0); + for _ in 0..i { + time += method(&ck, &vk, *num_vars, rand_poly, rand_point); + } + time + }); }, ); } diff --git a/poly-commit/benches/hyrax_times.rs b/poly-commit/benches/hyrax_times.rs index 60c5a057..7f579cab 100644 --- a/poly-commit/benches/hyrax_times.rs +++ b/poly-commit/benches/hyrax_times.rs @@ -21,7 +21,7 @@ fn rand_point_hyrax(num_vars: usize, rng: &mut ChaCha20Rng) -> Ve (0..num_vars).map(|_| F::rand(rng)).collect() } -const MIN_NUM_VARS: usize = 10; -const MAX_NUM_VARS: usize = 20; +const MIN_NUM_VARS: usize = 12; +const MAX_NUM_VARS: usize = 22; bench!(Hyrax254, rand_poly_hyrax, rand_point_hyrax); From 61ff14ae30bf3f434378ea28dad8e89bd00ebd88 Mon Sep 17 00:00:00 2001 From: mmagician Date: Mon, 13 Nov 2023 10:40:36 +0100 Subject: [PATCH 35/75] Brakedown fix bench (#41) * fix bench call * set num vars from 12-20 --- bench-templates/src/lib.rs | 8 +++++++- poly-commit/benches/brakedown_ml_times.rs | 4 ++-- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/bench-templates/src/lib.rs b/bench-templates/src/lib.rs index 92dbd07e..9500eb9c 100644 --- a/bench-templates/src/lib.rs +++ b/bench-templates/src/lib.rs @@ -54,7 +54,13 @@ pub fn bench_pcs_method< BenchmarkId::from_parameter(num_vars), &num_vars, |b, num_vars| { - b.iter(|| method(&ck, &vk, *num_vars, rand_poly, rand_point)); + b.iter_custom(|i| { + let mut time = Duration::from_nanos(0); + for _ in 0..i { + time += method(&ck, &vk, *num_vars, rand_poly, rand_point); + } + time + }); }, ); } diff --git a/poly-commit/benches/brakedown_ml_times.rs b/poly-commit/benches/brakedown_ml_times.rs index 860d0a99..55ebfa7f 100644 --- a/poly-commit/benches/brakedown_ml_times.rs +++ b/poly-commit/benches/brakedown_ml_times.rs @@ -52,8 +52,8 @@ fn rand_point_brakedown_ml(num_vars: usize, rng: &mut ChaCha20Rng (0..num_vars).map(|_| F::rand(rng)).collect() } -const MIN_NUM_VARS: usize = 10; -const MAX_NUM_VARS: usize = 20; +const MIN_NUM_VARS: usize = 12; +const MAX_NUM_VARS: usize = 22; bench!( Brakedown, From 370e77ca2e8414cee086b2c7fc2e88a6080268e4 Mon Sep 17 00:00:00 2001 From: mmagician Date: Mon, 13 Nov 2023 10:40:39 +0100 Subject: [PATCH 36/75] Ligero fix benches (#40) * fix bench call * set num vars from 12-20 --- bench-templates/src/lib.rs | 8 +++++++- poly-commit/benches/ligero_ml_times.rs | 4 ++-- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/bench-templates/src/lib.rs b/bench-templates/src/lib.rs index 92dbd07e..9500eb9c 100644 --- a/bench-templates/src/lib.rs +++ b/bench-templates/src/lib.rs @@ -54,7 +54,13 @@ pub fn bench_pcs_method< BenchmarkId::from_parameter(num_vars), &num_vars, |b, num_vars| { - b.iter(|| method(&ck, &vk, *num_vars, rand_poly, rand_point)); + b.iter_custom(|i| { + let mut time = Duration::from_nanos(0); + for _ in 0..i { + time += method(&ck, &vk, *num_vars, rand_poly, rand_point); + } + time + }); }, ); } diff --git a/poly-commit/benches/ligero_ml_times.rs b/poly-commit/benches/ligero_ml_times.rs index c59b688a..cf723704 100644 --- a/poly-commit/benches/ligero_ml_times.rs +++ b/poly-commit/benches/ligero_ml_times.rs @@ -52,7 +52,7 @@ fn rand_point_ligero_ml(num_vars: usize, rng: &mut ChaCha20Rng) - (0..num_vars).map(|_| F::rand(rng)).collect() } -const MIN_NUM_VARS: usize = 10; -const MAX_NUM_VARS: usize = 20; +const MIN_NUM_VARS: usize = 12; +const MAX_NUM_VARS: usize = 22; bench!(Ligero, rand_poly_ligero_ml, rand_point_ligero_ml); From cc1f75ae650234f994863d20dbd6cc5ba8112dac Mon Sep 17 00:00:00 2001 From: mmagician Date: Mon, 13 Nov 2023 11:21:21 +0100 Subject: [PATCH 37/75] Hyrax parallel `commit` (#39) * Enable parallel commitment in hyrax amend * make `rand` optional * remove dead code --- poly-commit/Cargo.toml | 4 ++-- poly-commit/src/hyrax/mod.rs | 17 +++++++++-------- 2 files changed, 11 insertions(+), 10 deletions(-) diff --git a/poly-commit/Cargo.toml b/poly-commit/Cargo.toml index 1c2a6347..57c68b85 100644 --- a/poly-commit/Cargo.toml +++ b/poly-commit/Cargo.toml @@ -17,7 +17,7 @@ ark-crypto-primitives = {version = "^0.4.0", default-features = false, features ark-std = { version = "^0.4.0", default-features = false } blake2 = { version = "0.10", default-features = false } merlin = { version = "3.0.0", default-features = false } - +rand = { version = "0.8.0", optional = true } ark-relations = { version = "^0.4.0", default-features = false, optional = true } ark-r1cs-std = { version = "^0.4.0", default-features = false, optional = true } @@ -55,4 +55,4 @@ default = [ "std", "parallel" ] std = [ "ark-ff/std", "ark-ec/std", "ark-poly/std", "ark-std/std", "ark-relations/std", "ark-serialize/std", "ark-crypto-primitives/std"] r1cs = [ "ark-relations", "ark-r1cs-std", "hashbrown", "ark-crypto-primitives/r1cs"] print-trace = [ "ark-std/print-trace" ] -parallel = [ "std", "ark-ff/parallel", "ark-ec/parallel", "ark-poly/parallel", "ark-std/parallel", "rayon" ] +parallel = [ "std", "ark-ff/parallel", "ark-ec/parallel", "ark-poly/parallel", "ark-std/parallel", "rayon", "rand" ] diff --git a/poly-commit/src/hyrax/mod.rs b/poly-commit/src/hyrax/mod.rs index 887fa255..6ce4a444 100644 --- a/poly-commit/src/hyrax/mod.rs +++ b/poly-commit/src/hyrax/mod.rs @@ -231,6 +231,7 @@ impl> /// /// Panics if `rng` is None, since Hyrax requires randomness in order to /// commit to a polynomial + #[allow(unused_variables)] fn commit<'a>( ck: &Self::CommitterKey, polynomials: impl IntoIterator>, @@ -248,11 +249,10 @@ impl> let mut coms = Vec::new(); let mut rands = Vec::new(); + #[cfg(not(feature = "parallel"))] let rng_inner = rng.expect("Committing to polynomials requires a random generator"); for l_poly in polynomials { - let mut com_rands = Vec::new(); - let label = l_poly.label(); let poly = l_poly.polynomial(); @@ -272,15 +272,16 @@ impl> let m = flat_to_matrix_column_major(&poly.to_evaluations(), dim, dim); // Commiting to the matrix with one multi-commitment per row - let row_coms = m - .iter() + let (row_coms, com_rands): (Vec<_>, Vec<_>) = cfg_iter!(m) .map(|row| { + #[cfg(not(feature = "parallel"))] let (c, r) = Self::pedersen_commit(ck, row, None, Some(rng_inner)); - // Storing the randomness used in the commitment - com_rands.push(r); - c + #[cfg(feature = "parallel")] + let (c, r) = + Self::pedersen_commit(ck, row, None, Some(&mut rand::thread_rng())); + (c, r) }) - .collect(); + .unzip(); let com = HyraxCommitment { row_coms }; let l_comm = LabeledCommitment::new(label.to_string(), com, Some(1)); From 7c7328d3335718baed9a4c1c4a86e71a05580e0e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Antonio=20Mej=C3=ADas=20Gil?= Date: Mon, 13 Nov 2023 12:39:40 +0100 Subject: [PATCH 38/75] Make Hyrax hiding again (#43) * removed evaluation randomness from proof and ignored claimed value in check to make scheme hiding * fmt * removed unnecessary usage of argument in check, added _ --- poly-commit/src/hyrax/data_structures.rs | 4 --- poly-commit/src/hyrax/mod.rs | 39 ++---------------------- 2 files changed, 2 insertions(+), 41 deletions(-) diff --git a/poly-commit/src/hyrax/data_structures.rs b/poly-commit/src/hyrax/data_structures.rs index 57a068d7..fbdd69a9 100644 --- a/poly-commit/src/hyrax/data_structures.rs +++ b/poly-commit/src/hyrax/data_structures.rs @@ -111,8 +111,4 @@ pub struct HyraxProof { pub z_d: G::ScalarField, /// Auxiliary random scalar pub z_b: G::ScalarField, - /// The hiding scalar r_eval is not part of a Hyrax PCS proof as described - /// in the reference article. Cf. the "Modification note" at the beginning - /// of `mod.rs` - pub r_eval: G::ScalarField, } diff --git a/poly-commit/src/hyrax/mod.rs b/poly-commit/src/hyrax/mod.rs index 6ce4a444..67937470 100644 --- a/poly-commit/src/hyrax/mod.rs +++ b/poly-commit/src/hyrax/mod.rs @@ -37,25 +37,8 @@ pub const PROTOCOL_NAME: &'static [u8] = b"Hyrax protocol"; /// /// [hyrax]: https://eprint.iacr.org/2017/1132.pdf /// -/// ### Modification note -/// -/// In the PCS contained in the cited article, the verifier never learns the -/// actual evaluation of the polynomial at the requested point, but is instead -/// convinced that a previously received Pedersen commitment is indeed a -/// commitment to said evaluation - this is what the SNARK proposed therein -/// necessitates. However, the Arkworks framework requies the verifier to -/// actually learn that value, which is why we have added the opening of -/// the commitment at the end of the protocol. This likely does not result in -/// an optimal non-hiding PCS, but we feel it is the most faithful adaptation -/// of the original PCS that can be implemented with the current restrictions. -/// /// ### Future optimisations /// -/// - Deal with the modification described above: either modify the PCS trait -/// to encompass hiding PCSs (in terms of the actual evaluation, not only -/// the polynomial), or turn this scheme into a non-hiding one by removing -/// unnecessary work (which would probably involve non-trivial theoretical -/// work). /// - Add parallelisation. There is at least one natural place where /// parallelisation could bring performance gains: in essence, the prover /// commits to the polynomial by expressing it as an evaluation matrix and @@ -437,12 +420,6 @@ impl> let z_d = c * r_lt + r_d; let z_b = c * r_eval + r_b; - // ******** Opening ******** - // This is *not* part of the Hyrax PCS as described in the reference - // article. Cf. the "Modification note" at the beginning of this file. - // From the prover's perspective, opening amounts to adding r_eval to - // the proof. - proofs.push(HyraxProof { com_eval, com_d, @@ -450,7 +427,6 @@ impl> z, z_d, z_b, - r_eval, }); } @@ -472,7 +448,7 @@ impl> vk: &Self::VerifierKey, commitments: impl IntoIterator>, point: &'a P::Point, - values: impl IntoIterator, + _values: impl IntoIterator, proof: &Self::Proof, // Not used and not generic on the cryptographic sponge S _opening_challenges: &mut ChallengeGenerator< @@ -504,10 +480,7 @@ impl> let l = tensor_prime(point_lower); let r = tensor_prime(point_upper); - for (com, (claim, h_proof)) in commitments - .into_iter() - .zip(values.into_iter().zip(proof.iter())) - { + for (com, h_proof) in commitments.into_iter().zip(proof.iter()) { let row_coms = &com.commitment().row_coms; // extract each field from h_proof @@ -518,7 +491,6 @@ impl> z, z_d, z_b, - r_eval, } = h_proof; if row_coms.len() != 1 << n / 2 { @@ -569,13 +541,6 @@ impl> if com_dp != (com_eval.mul(c) + com_b).into() { return Ok(false); } - - // Third check: opening - let exp = Self::pedersen_commit(vk, &[claim], Some(*r_eval), None).0; - - if *com_eval != exp { - return Ok(false); - } } Ok(true) From d402d538f5c46ee186a9e15fe42291ef9df22bf1 Mon Sep 17 00:00:00 2001 From: mmagician Date: Tue, 14 Nov 2023 12:43:43 +0100 Subject: [PATCH 39/75] remove cfg(benches) attributes as that feature is no longer used --- poly-commit/src/linear_codes/mod.rs | 2 -- poly-commit/src/linear_codes/utils.rs | 16 +++++++--------- 2 files changed, 7 insertions(+), 11 deletions(-) diff --git a/poly-commit/src/linear_codes/mod.rs b/poly-commit/src/linear_codes/mod.rs index e6628dd8..99c61b1e 100644 --- a/poly-commit/src/linear_codes/mod.rs +++ b/poly-commit/src/linear_codes/mod.rs @@ -29,8 +29,6 @@ mod data_structures; use data_structures::*; pub use data_structures::LinCodePCProof; -#[cfg(any(feature = "benches", test))] -pub use utils::{FieldToBytesColHasher, LeafIdentityHasher}; use utils::{calculate_t, get_indices_from_transcript, hash_column}; diff --git a/poly-commit/src/linear_codes/utils.rs b/poly-commit/src/linear_codes/utils.rs index 2d26feda..941c85e4 100644 --- a/poly-commit/src/linear_codes/utils.rs +++ b/poly-commit/src/linear_codes/utils.rs @@ -13,7 +13,7 @@ use ark_std::vec::Vec; #[cfg(not(feature = "std"))] use num_traits::Float; -#[cfg(any(feature = "benches", test))] +#[cfg(test)] use { crate::to_bytes, ark_std::{marker::PhantomData, rand::RngCore}, @@ -182,11 +182,10 @@ pub(crate) fn calculate_t( Ok(if t < codeword_len { t } else { codeword_len }) } -/// Only needed for benches and tests. -#[cfg(any(feature = "benches", test))] -pub struct LeafIdentityHasher; +#[cfg(test)] +pub(crate) struct LeafIdentityHasher; -#[cfg(any(feature = "benches", test))] +#[cfg(test)] impl CRHScheme for LeafIdentityHasher { type Input = Vec; type Output = Vec; @@ -204,9 +203,8 @@ impl CRHScheme for LeafIdentityHasher { } } -/// Only needed for benches and tests. -#[cfg(any(feature = "benches", test))] -pub struct FieldToBytesColHasher +#[cfg(test)] +pub(crate) struct FieldToBytesColHasher where F: PrimeField + CanonicalSerialize, D: Digest, @@ -214,7 +212,7 @@ where _phantom: PhantomData<(F, D)>, } -#[cfg(any(feature = "benches", test))] +#[cfg(test)] impl CRHScheme for FieldToBytesColHasher where F: PrimeField + CanonicalSerialize, From 2bcff80825705e6528a7f864c9451b7aacc82d57 Mon Sep 17 00:00:00 2001 From: Hossein Moghaddas Date: Tue, 9 Jan 2024 21:44:49 +0100 Subject: [PATCH 40/75] Fix tests: sponge config for univariate ligero --- .../src/linear_codes/univariate_ligero/tests.rs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/poly-commit/src/linear_codes/univariate_ligero/tests.rs b/poly-commit/src/linear_codes/univariate_ligero/tests.rs index da0ec6fb..c98c09ec 100644 --- a/poly-commit/src/linear_codes/univariate_ligero/tests.rs +++ b/poly-commit/src/linear_codes/univariate_ligero/tests.rs @@ -42,22 +42,22 @@ mod tests { } type MTConfig = MerkleTreeParams; - type Sponge = PoseidonSponge; + type Sponge = PoseidonSponge; type LigeroPCS = LinearCodePCS< - UnivariateLigero, ColHasher>, + UnivariateLigero, DensePolynomial, ColHasher>, Fr, DensePolynomial, - Sponge, + Sponge, MTConfig, ColHasher, >; type LigeroPcsF = LinearCodePCS< - UnivariateLigero, ColHasher>, + UnivariateLigero, DensePolynomial, ColHasher>, F, DensePolynomial, - Sponge, + Sponge, MTConfig, ColHasher, >; From 4f6005d870fa4ed3182ea65d1d60404d9f0207b1 Mon Sep 17 00:00:00 2001 From: Hossein Moghaddas Date: Mon, 15 Jan 2024 17:13:53 +0100 Subject: [PATCH 41/75] Fix the comment Co-authored-by: Marcin --- poly-commit/src/linear_codes/multilinear_brakedown/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/poly-commit/src/linear_codes/multilinear_brakedown/mod.rs b/poly-commit/src/linear_codes/multilinear_brakedown/mod.rs index 7d195ae5..27cfc2a6 100644 --- a/poly-commit/src/linear_codes/multilinear_brakedown/mod.rs +++ b/poly-commit/src/linear_codes/multilinear_brakedown/mod.rs @@ -13,7 +13,7 @@ use ark_std::vec::Vec; mod tests; -/// The univariate Brakedown polynomial commitment scheme based on [[Brakedown]][bd]. +/// The multilinear Brakedown polynomial commitment scheme based on [[Brakedown]][bd]. /// The scheme defaults to the naive batching strategy. /// /// Note: The scheme currently does not support hiding. From 2011982e544d14edb1d91fb11ee1a3f8b215b01f Mon Sep 17 00:00:00 2001 From: Hossein Moghaddas Date: Wed, 17 Jan 2024 13:36:50 +0100 Subject: [PATCH 42/75] Delete `IOPTranscript`, update with master (#44) (aka Brakedown++) * Add the trait bounds * Add `CommitmentState` * Update benches for the new type * Fix the name of local variable * Merge `PCCommitmentState` with `PCRandomness` * Update `README.md` * Fix a bug * Complete the merge * Simplify `hash_column` * Delete comments * Add `CommitmentState` * Make `fmt` happy * Refactor, remove `hash_columns` * Rename all params * remove cfg(benches) attributes as that feature is no longer used * Brakedown+++ (#46) * conversion to `into_iter` is a no-op * remove explicit casts to vecs * rename to use singular of `labeled_commitment` * simplify the iterators even further by zipping two iters * Apply suggestions from code review * Maybe `empty` not return `Self` * Make `empty` return `Self` * Rename `rand` to `state` * Add the type `Randomness` * Rename nonnative to emulated, as in `r1cs-std` (#137) * Rename nonnative to emulated, as in `r1cs-std` * Run `fmt` * Temporarily change `Cargo.toml` * Revert `Cargo.toml` * Refactor `FoldedPolynomialStream` partially * Substitute `ChallengeGenerator` by the generic sponge (#139) * Rename nonnative to emulated, as in `r1cs-std` * Run `fmt` * Temporarily change `Cargo.toml` * Substitute `ChallengeGenerator` with the generic sponge * Run `fmt` * Remove the extra file * Update modules * Delete the unnecessary loop * Revert `Cargo.toml` * Refactor `FoldedPolynomialStream` partially * Update README * Make the diff more readable * Bring the whitespace back * Make diff more readable, 2 * Fix according to breaking changes in `ark-ec` (#141) * Fix for KZG10 * Fix the breaking changes in `ark-ec` * Remove the extra loop * Fix the loop range * re-use the preprocessing table * also re-use the preprocessing table for multilinear_pc --------- Co-authored-by: mmagician * Auxiliary opening data (#134) * Add the trait bounds * Add `CommitmentState` * Update benches for the new type * Fix the name of local variable * Merge `PCCommitmentState` with `PCRandomness` * Update `README.md` * Fix a bug * Put `Randomness` in `CommitmentState` * Add a comment * Remove the extra loop * Update the comment for `CommitmentState` Co-authored-by: Marcin * cargo fmt --------- Co-authored-by: Marcin * `batch_mul_with_preprocessing` no longer takes `self` as argument (#142) * batch_mul_with_preprocessing no longer takes `self` as argument * Apply suggestions from code review Co-authored-by: Pratyush Mishra * fix variable name --------- Co-authored-by: Pratyush Mishra * Remove `ChallengeGenerator` for Brakedown (#53) * Squash and merge `delete-chalgen` onto here * Fix Brakedown for `ChallengeGenerator` and `AsRef` for Merkle tree * Remove `IOPTranscript` (#52) * Replace the `IOPTranscript` with `CryptographicSponge` * Delete extra comments * Delete TODOs and do not absorb what you just squeezed * Remove the extra loop * Revert the incorrect changes in `bench-tamplates` --------- Co-authored-by: mmagician Co-authored-by: Pratyush Mishra --- README.md | 16 +- bench-templates/src/lib.rs | 24 +- poly-commit/src/challenge.rs | 61 -- poly-commit/src/constraints.rs | 20 +- poly-commit/src/data_structures.rs | 12 +- poly-commit/src/ipa_pc/data_structures.rs | 3 +- poly-commit/src/ipa_pc/mod.rs | 89 +-- poly-commit/src/kzg10/data_structures.rs | 3 +- poly-commit/src/kzg10/mod.rs | 49 +- poly-commit/src/lib.rs | 723 +++++++++--------- poly-commit/src/linear_codes/brakedown.rs | 26 +- .../src/linear_codes/data_structures.rs | 27 +- poly-commit/src/linear_codes/mod.rs | 288 +++---- .../linear_codes/multilinear_brakedown/mod.rs | 8 +- .../multilinear_brakedown/tests.rs | 20 +- poly-commit/src/linear_codes/utils.rs | 35 +- .../src/marlin/marlin_pc/data_structures.rs | 7 +- poly-commit/src/marlin/marlin_pc/mod.rs | 65 +- .../marlin/marlin_pst13_pc/data_structures.rs | 5 +- poly-commit/src/marlin/marlin_pst13_pc/mod.rs | 73 +- poly-commit/src/marlin/mod.rs | 41 +- poly-commit/src/multilinear_pc/mod.rs | 48 +- poly-commit/src/sonic_pc/mod.rs | 65 +- .../src/streaming_kzg/data_structures.rs | 9 +- poly-commit/src/streaming_kzg/time.rs | 10 +- poly-commit/src/utils.rs | 94 +-- 26 files changed, 767 insertions(+), 1054 deletions(-) delete mode 100644 poly-commit/src/challenge.rs diff --git a/README.md b/README.md index 7475e9c1..68d6d557 100644 --- a/README.md +++ b/README.md @@ -57,7 +57,7 @@ This trait defines the interface for a polynomial commitment scheme. It is recom // In this example, we will commit to a single polynomial, open it first at one point, and then batched at two points, and finally verify the proofs. // We will use the KZG10 polynomial commitment scheme, following the approach from Marlin. -use ark_poly_commit::{Polynomial, marlin_pc::MarlinKZG10, LabeledPolynomial, PolynomialCommitment, QuerySet, Evaluations, challenge::ChallengeGenerator}; +use ark_poly_commit::{Polynomial, marlin_pc::MarlinKZG10, LabeledPolynomial, PolynomialCommitment, QuerySet, Evaluations}; use ark_bls12_377::Bls12_377; use ark_crypto_primitives::sponge::poseidon::{PoseidonSponge, PoseidonConfig}; use ark_crypto_primitives::sponge::CryptographicSponge; @@ -128,17 +128,15 @@ let (ck, vk) = PCS::trim(&pp, degree, 2, Some(&[degree])).unwrap(); // 3. PolynomialCommitment::commit // The prover commits to the polynomial using their committer key `ck`. -let (comms, rands) = PCS::commit(&ck, [&labeled_poly], Some(rng)).unwrap(); - -let challenge_generator: ChallengeGenerator<::ScalarField, Sponge_Bls12_377> = ChallengeGenerator::new_univariate(&mut test_sponge); +let (comms, states) = PCS::commit(&ck, [&labeled_poly], Some(rng)).unwrap(); // 4a. PolynomialCommitment::open // Opening proof at a single point. -let proof_single = PCS::open(&ck, [&labeled_poly], &comms, &point_1, &mut (challenge_generator.clone()), &rands, None).unwrap(); +let proof_single = PCS::open(&ck, [&labeled_poly], &comms, &point_1, &mut (test_sponge.clone()), &states, None).unwrap(); // 5a. PolynomialCommitment::check // Verifying the proof at a single point, given the commitment, the point, the claimed evaluation, and the proof. -assert!(PCS::check(&vk, &comms, &point_1, [secret_poly.evaluate(&point_1)], &proof_single, &mut (challenge_generator.clone()), Some(rng)).unwrap()); +assert!(PCS::check(&vk, &comms, &point_1, [secret_poly.evaluate(&point_1)], &proof_single, &mut (test_sponge.clone()), Some(rng)).unwrap()); let mut query_set = QuerySet::new(); let mut values = Evaluations::new(); @@ -155,8 +153,8 @@ let proof_batched = PCS::batch_open( [&labeled_poly], &comms, &query_set, - &mut (challenge_generator.clone()), - &rands, + &mut (test_sponge.clone()), + &states, Some(rng), ).unwrap(); @@ -167,7 +165,7 @@ assert!(PCS::batch_check( &query_set, &values, &proof_batched, - &mut (challenge_generator.clone()), + &mut (test_sponge.clone()), rng, ).unwrap()); ``` diff --git a/bench-templates/src/lib.rs b/bench-templates/src/lib.rs index 9500eb9c..c3211c7f 100644 --- a/bench-templates/src/lib.rs +++ b/bench-templates/src/lib.rs @@ -17,9 +17,7 @@ use rand_chacha::{ use core::time::Duration; use std::{borrow::Borrow, marker::PhantomData, time::Instant}; -use ark_poly_commit::{ - challenge::ChallengeGenerator, to_bytes, LabeledPolynomial, PolynomialCommitment, -}; +use ark_poly_commit::{to_bytes, LabeledPolynomial, PolynomialCommitment}; pub use criterion::*; pub use paste::paste; @@ -131,7 +129,7 @@ where let labeled_poly = LabeledPolynomial::new("test".to_string(), rand_poly(num_vars, rng), None, None); - let (coms, randomness) = PCS::commit(&ck, [&labeled_poly], Some(rng)).unwrap(); + let (coms, states) = PCS::commit(&ck, [&labeled_poly], Some(rng)).unwrap(); let point = rand_point(num_vars, rng); let start = Instant::now(); @@ -140,8 +138,8 @@ where [&labeled_poly], &coms, &point, - &mut ChallengeGenerator::new_univariate(&mut test_sponge()), - &randomness, + &mut test_sponge(), + &states, Some(rng), ) .unwrap(); @@ -165,7 +163,7 @@ where let labeled_poly = LabeledPolynomial::new("test".to_string(), rand_poly(num_vars, rng), None, None); - let (coms, randomness) = PCS::commit(&ck, [&labeled_poly], Some(rng)).unwrap(); + let (coms, states) = PCS::commit(&ck, [&labeled_poly], Some(rng)).unwrap(); let point = P::Point::rand(rng); let proofs = PCS::open( @@ -173,8 +171,8 @@ where [&labeled_poly], &coms, &point, - &mut ChallengeGenerator::new_univariate(&mut test_sponge()), - &randomness, + &mut test_sponge(), + &states, Some(rng), ) .unwrap(); @@ -202,7 +200,7 @@ where let labeled_poly = LabeledPolynomial::new("test".to_string(), rand_poly(num_vars, rng), None, None); - let (coms, randomness) = PCS::commit(&ck, [&labeled_poly], Some(rng)).unwrap(); + let (coms, states) = PCS::commit(&ck, [&labeled_poly], Some(rng)).unwrap(); let point = rand_point(num_vars, rng); let claimed_eval = labeled_poly.evaluate(&point); let proof = PCS::open( @@ -210,8 +208,8 @@ where [&labeled_poly], &coms, &point, - &mut ChallengeGenerator::new_univariate(&mut test_sponge()), - &randomness, + &mut test_sponge(), + &states, Some(rng), ) .unwrap(); @@ -223,7 +221,7 @@ where &point, [claimed_eval], &proof, - &mut ChallengeGenerator::new_univariate(&mut test_sponge()), + &mut test_sponge(), None, ) .unwrap(); diff --git a/poly-commit/src/challenge.rs b/poly-commit/src/challenge.rs deleted file mode 100644 index 23b3c9d1..00000000 --- a/poly-commit/src/challenge.rs +++ /dev/null @@ -1,61 +0,0 @@ -use ark_crypto_primitives::sponge::{CryptographicSponge, FieldElementSize}; -use ark_ff::PrimeField; - -/// `ChallengeGenerator` generates opening challenges using multivariate or univariate strategy. -/// For multivariate strategy, each challenge is freshly squeezed from a sponge. -/// For univariate strategy, each challenge is a power of one squeezed element from sponge. -/// -/// Note that mutable reference cannot be cloned. -#[derive(Clone)] -pub enum ChallengeGenerator { - /// Each challenge is freshly squeezed from a sponge. - Multivariate(S), - /// Each challenge is a power of one squeezed element from sponge. - /// - /// `Univariate(generator, next_element)` - Univariate(F, F), -} - -impl ChallengeGenerator { - /// Returns a challenge generator with multivariate strategy. Each challenge is freshly squeezed - /// from a sponge. - pub fn new_multivariate(sponge: S) -> Self { - Self::Multivariate(sponge) - } - - /// Returns a challenge generator with univariate strategy. Each challenge is a power of one - /// squeezed element from sponge. - pub fn new_univariate(sponge: &mut S) -> Self { - let gen = sponge.squeeze_field_elements(1)[0]; - Self::Univariate(gen, gen) - } - - /// Returns a challenge of size `size`. - /// * If `self == Self::Multivariate(...)`, then this squeezes out a challenge of size `size`. - /// * If `self == Self::Univariate(...)`, then this ignores the `size` argument and simply squeezes out - /// the next field element. - pub fn try_next_challenge_of_size(&mut self, size: FieldElementSize) -> F { - match self { - // multivariate (full) - Self::Multivariate(sponge) => sponge.squeeze_field_elements_with_sizes(&[size])[0], - // univariate - Self::Univariate(gen, next) => { - let result = next.clone(); - *next *= *gen; - result - } - } - } - /// Returns the next challenge generated. - pub fn next_challenge(&mut self) -> F { - self.try_next_challenge_of_size(FieldElementSize::Full) - } - - /// Returns the sponge state if `self` is multivariate. Returns `None` otherwise. - pub fn into_sponge(self) -> Option { - match self { - Self::Multivariate(s) => Some(s), - _ => None, - } - } -} diff --git a/poly-commit/src/constraints.rs b/poly-commit/src/constraints.rs index e6fb5d4f..1300509a 100644 --- a/poly-commit/src/constraints.rs +++ b/poly-commit/src/constraints.rs @@ -5,7 +5,7 @@ use crate::{ use ark_crypto_primitives::sponge::CryptographicSponge; use ark_ff::PrimeField; use ark_poly::Polynomial; -use ark_r1cs_std::fields::nonnative::NonNativeFieldVar; +use ark_r1cs_std::fields::emulated_fp::EmulatedFpVar; use ark_r1cs_std::{fields::fp::FpVar, prelude::*}; use ark_relations::r1cs::{ConstraintSystemRef, Namespace, Result as R1CSResult, SynthesisError}; use ark_std::{borrow::Borrow, cmp::Eq, cmp::PartialEq, hash::Hash, marker::Sized}; @@ -24,8 +24,8 @@ pub enum LinearCombinationCoeffVar), + /// Other coefficient, represented as a "emulated" field element. + Var(EmulatedFpVar), } /// An allocated version of `LinearCombination`. @@ -60,7 +60,7 @@ impl let (f, lc_term) = term; let fg = - NonNativeFieldVar::new_variable(ark_relations::ns!(cs, "term"), || Ok(f), mode) + EmulatedFpVar::new_variable(ark_relations::ns!(cs, "term"), || Ok(f), mode) .unwrap(); (LinearCombinationCoeffVar::Var(fg), lc_term.clone()) @@ -79,12 +79,12 @@ impl pub struct PCCheckRandomDataVar { /// Opening challenges. /// The prover and the verifier MUST use the same opening challenges. - pub opening_challenges: Vec>, + pub opening_challenges: Vec>, /// Bit representations of the opening challenges. pub opening_challenges_bits: Vec>>, /// Batching random numbers. /// The verifier can choose these numbers freely, as long as they are random. - pub batching_rands: Vec>, + pub batching_rands: Vec>, /// Bit representations of the batching random numbers. pub batching_rands_bits: Vec>>, } @@ -172,7 +172,7 @@ pub struct LabeledPointVar { /// MUST be a unique identifier in a query set. pub name: String, /// The point value. - pub value: NonNativeFieldVar, + pub value: EmulatedFpVar, } /// An allocated version of `QuerySet`. @@ -184,7 +184,7 @@ pub struct QuerySetVar( /// An allocated version of `Evaluations`. #[derive(Clone)] pub struct EvaluationsVar( - pub HashMap, NonNativeFieldVar>, + pub HashMap, EmulatedFpVar>, ); impl EvaluationsVar { @@ -192,8 +192,8 @@ impl EvaluationsVar, - ) -> Result, SynthesisError> { + point: &EmulatedFpVar, + ) -> Result, SynthesisError> { let key = LabeledPointVar:: { name: String::from(lc_string), value: point.clone(), diff --git a/poly-commit/src/data_structures.rs b/poly-commit/src/data_structures.rs index 4a5eec21..2b942ee1 100644 --- a/poly-commit/src/data_structures.rs +++ b/poly-commit/src/data_structures.rs @@ -70,9 +70,12 @@ pub trait PCPreparedCommitment: Clone { fn prepare(comm: &UNPREPARED) -> Self; } -/// Defines the minimal interface of commitment randomness for any polynomial -/// commitment scheme. -pub trait PCRandomness: Clone + CanonicalSerialize + CanonicalDeserialize { +/// Defines the minimal interface of commitment state for any polynomial +/// commitment scheme. It might be randomness etc. +pub trait PCCommitmentState: Clone + CanonicalSerialize + CanonicalDeserialize { + /// This is the type of `Randomness` that the `rand` method returns + type Randomness: Clone + CanonicalSerialize + CanonicalDeserialize; + /// Outputs empty randomness that does not hide the commitment. fn empty() -> Self; @@ -86,9 +89,8 @@ pub trait PCRandomness: Clone + CanonicalSerialize + CanonicalDeserialize { has_degree_bound: bool, num_vars: Option, rng: &mut R, - ) -> Self; + ) -> Self::Randomness; } - /// A proof of satisfaction of linear combinations. #[derive(Clone, CanonicalSerialize, CanonicalDeserialize)] pub struct BatchLCProof { diff --git a/poly-commit/src/ipa_pc/data_structures.rs b/poly-commit/src/ipa_pc/data_structures.rs index 7ba56c95..84fcb7f2 100644 --- a/poly-commit/src/ipa_pc/data_structures.rs +++ b/poly-commit/src/ipa_pc/data_structures.rs @@ -146,7 +146,8 @@ pub struct Randomness { pub shifted_rand: Option, } -impl PCRandomness for Randomness { +impl PCCommitmentState for Randomness { + type Randomness = Self; fn empty() -> Self { Self { rand: G::ScalarField::zero(), diff --git a/poly-commit/src/ipa_pc/mod.rs b/poly-commit/src/ipa_pc/mod.rs index 25752d78..43a40852 100644 --- a/poly-commit/src/ipa_pc/mod.rs +++ b/poly-commit/src/ipa_pc/mod.rs @@ -1,7 +1,7 @@ use crate::{BTreeMap, BTreeSet, String, ToString, Vec, CHALLENGE_SIZE}; use crate::{BatchLCProof, DenseUVPolynomial, Error, Evaluations, QuerySet}; use crate::{LabeledCommitment, LabeledPolynomial, LinearCombination}; -use crate::{PCCommitterKey, PCRandomness, PCUniversalParams, PolynomialCommitment}; +use crate::{PCCommitmentState, PCCommitterKey, PCUniversalParams, PolynomialCommitment}; use ark_ec::{AffineRepr, CurveGroup, VariableBaseMSM}; use ark_ff::{Field, One, PrimeField, UniformRand, Zero}; @@ -15,7 +15,6 @@ pub use data_structures::*; #[cfg(feature = "parallel")] use rayon::prelude::*; -use crate::challenge::ChallengeGenerator; use ark_crypto_primitives::sponge::CryptographicSponge; use digest::Digest; @@ -105,7 +104,7 @@ where point: G::ScalarField, values: impl IntoIterator, proof: &Proof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, ) -> Option> { let check_time = start_timer!(|| "Succinct checking"); @@ -117,7 +116,8 @@ where let mut combined_commitment_proj = G::Group::zero(); let mut combined_v = G::ScalarField::zero(); - let mut cur_challenge = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + let mut cur_challenge: G::ScalarField = + sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; let labeled_commitments = commitments.into_iter(); let values = values.into_iter(); @@ -126,7 +126,7 @@ where let commitment = labeled_commitment.commitment(); combined_v += &(cur_challenge * &value); combined_commitment_proj += &labeled_commitment.commitment().comm.mul(cur_challenge); - cur_challenge = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + cur_challenge = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; let degree_bound = labeled_commitment.degree_bound(); assert_eq!(degree_bound.is_some(), commitment.shifted_comm.is_some()); @@ -137,7 +137,7 @@ where combined_commitment_proj += &commitment.shifted_comm.unwrap().mul(cur_challenge); } - cur_challenge = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + cur_challenge = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; } let mut combined_commitment = combined_commitment_proj.into_affine(); @@ -347,7 +347,7 @@ where type CommitterKey = CommitterKey; type VerifierKey = VerifierKey; type Commitment = Commitment; - type Randomness = Randomness; + type CommitmentState = Randomness; type Proof = Proof; type BatchProof = Vec; type Error = Error; @@ -418,7 +418,7 @@ where ) -> Result< ( Vec>, - Vec, + Vec, ), Self::Error, > @@ -427,7 +427,7 @@ where { let rng = &mut crate::optional_rng::OptionalRng(rng); let mut comms = Vec::new(); - let mut rands = Vec::new(); + let mut states = Vec::new(); let commit_time = start_timer!(|| "Committing to polynomials"); for labeled_polynomial in polynomials { @@ -446,7 +446,7 @@ where hiding_bound, )); - let randomness = if let Some(h) = hiding_bound { + let state = if let Some(h) = hiding_bound { Randomness::rand(h, degree_bound.is_some(), None, rng) } else { Randomness::empty() @@ -456,7 +456,7 @@ where &ck.comm_key[..(polynomial.degree() + 1)], &polynomial.coeffs(), Some(ck.s), - Some(randomness.rand), + Some(state.rand), ) .into(); @@ -465,7 +465,7 @@ where &ck.comm_key[(ck.supported_degree() - d)..], &polynomial.coeffs(), Some(ck.s), - randomness.shifted_rand, + state.shifted_rand, ) .into() }); @@ -474,13 +474,13 @@ where let labeled_comm = LabeledCommitment::new(label.to_string(), commitment, degree_bound); comms.push(labeled_comm); - rands.push(randomness); + states.push(state); end_timer!(commit_time); } end_timer!(commit_time); - Ok((comms, rands)) + Ok((comms, states)) } fn open<'a>( @@ -488,13 +488,13 @@ where labeled_polynomials: impl IntoIterator>, commitments: impl IntoIterator>, point: &'a P::Point, - opening_challenges: &mut ChallengeGenerator, - rands: impl IntoIterator, + sponge: &mut S, + states: impl IntoIterator, rng: Option<&mut dyn RngCore>, ) -> Result where Self::Commitment: 'a, - Self::Randomness: 'a, + Self::CommitmentState: 'a, P: 'a, { let mut combined_polynomial = P::zero(); @@ -504,15 +504,15 @@ where let mut has_hiding = false; let polys_iter = labeled_polynomials.into_iter(); - let rands_iter = rands.into_iter(); + let states_iter = states.into_iter(); let comms_iter = commitments.into_iter(); let combine_time = start_timer!(|| "Combining polynomials, randomness, and commitments."); - let mut cur_challenge = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + let mut cur_challenge = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; - for (labeled_polynomial, (labeled_commitment, randomness)) in - polys_iter.zip(comms_iter.zip(rands_iter)) + for (labeled_polynomial, (labeled_commitment, state)) in + polys_iter.zip(comms_iter.zip(states_iter)) { let label = labeled_polynomial.label(); assert_eq!(labeled_polynomial.label(), labeled_commitment.label()); @@ -528,10 +528,10 @@ where if hiding_bound.is_some() { has_hiding = true; - combined_rand += &(cur_challenge * &randomness.rand); + combined_rand += &(cur_challenge * &state.rand); } - cur_challenge = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + cur_challenge = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; let has_degree_bound = degree_bound.is_some(); @@ -554,7 +554,7 @@ where combined_commitment_proj += &commitment.shifted_comm.unwrap().mul(cur_challenge); if hiding_bound.is_some() { - let shifted_rand = randomness.shifted_rand; + let shifted_rand = state.shifted_rand; assert!( shifted_rand.is_some(), "shifted_rand.is_none() for {}", @@ -564,7 +564,7 @@ where } } - cur_challenge = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + cur_challenge = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; } end_timer!(combine_time); @@ -739,7 +739,7 @@ where point: &'a P::Point, values: impl IntoIterator, proof: &Self::Proof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, _rng: Option<&mut dyn RngCore>, ) -> Result where @@ -762,8 +762,7 @@ where )); } - let check_poly = - Self::succinct_check(vk, commitments, *point, values, proof, opening_challenges); + let check_poly = Self::succinct_check(vk, commitments, *point, values, proof, sponge); if check_poly.is_none() { return Ok(false); @@ -790,7 +789,7 @@ where query_set: &QuerySet, values: &Evaluations, proof: &Self::BatchProof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, rng: &mut R, ) -> Result where @@ -833,14 +832,8 @@ where vals.push(*v_i); } - let check_poly = Self::succinct_check( - vk, - comms.into_iter(), - *point, - vals.into_iter(), - p, - opening_challenges, - ); + let check_poly = + Self::succinct_check(vk, comms.into_iter(), *point, vals.into_iter(), p, sponge); if check_poly.is_none() { return Ok(false); @@ -876,24 +869,24 @@ where polynomials: impl IntoIterator>, commitments: impl IntoIterator>, query_set: &QuerySet, - opening_challenges: &mut ChallengeGenerator, - rands: impl IntoIterator, + sponge: &mut S, + states: impl IntoIterator, rng: Option<&mut dyn RngCore>, ) -> Result, Self::Error> where - Self::Randomness: 'a, + Self::CommitmentState: 'a, Self::Commitment: 'a, P: 'a, { let label_poly_map = polynomials .into_iter() - .zip(rands) + .zip(states) .zip(commitments) - .map(|((p, r), c)| (p.label(), (p, r, c))) + .map(|((p, s), c)| (p.label(), (p, s, c))) .collect::>(); let mut lc_polynomials = Vec::new(); - let mut lc_randomness = Vec::new(); + let mut lc_states = Vec::new(); let mut lc_commitments = Vec::new(); let mut lc_info = Vec::new(); @@ -951,7 +944,7 @@ where let lc_poly = LabeledPolynomial::new(lc_label.clone(), poly, degree_bound, hiding_bound); lc_polynomials.push(lc_poly); - lc_randomness.push(Randomness { + lc_states.push(Randomness { rand: combined_rand, shifted_rand: combined_shifted_rand, }); @@ -971,8 +964,8 @@ where lc_polynomials.iter(), lc_commitments.iter(), &query_set, - opening_challenges, - lc_randomness.iter(), + sponge, + lc_states.iter(), rng, )?; Ok(BatchLCProof { proof, evals: None }) @@ -987,7 +980,7 @@ where eqn_query_set: &QuerySet, eqn_evaluations: &Evaluations, proof: &BatchLCProof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, rng: &mut R, ) -> Result where @@ -1060,7 +1053,7 @@ where &eqn_query_set, &evaluations, proof, - opening_challenges, + sponge, rng, ) } diff --git a/poly-commit/src/kzg10/data_structures.rs b/poly-commit/src/kzg10/data_structures.rs index 60626e70..d648f19f 100644 --- a/poly-commit/src/kzg10/data_structures.rs +++ b/poly-commit/src/kzg10/data_structures.rs @@ -420,7 +420,8 @@ impl> Randomness { } } -impl> PCRandomness for Randomness { +impl> PCCommitmentState for Randomness { + type Randomness = Self; fn empty() -> Self { Self { blinding_polynomial: P::zero(), diff --git a/poly-commit/src/kzg10/mod.rs b/poly-commit/src/kzg10/mod.rs index a6ea5752..508db2cb 100644 --- a/poly-commit/src/kzg10/mod.rs +++ b/poly-commit/src/kzg10/mod.rs @@ -5,10 +5,10 @@ //! proposed by Kate, Zaverucha, and Goldberg ([KZG10](http://cacr.uwaterloo.ca/techreports/2010/cacr2010-10.pdf)). //! This construction achieves extractability in the algebraic group model (AGM). -use crate::{BTreeMap, Error, LabeledPolynomial, PCRandomness, ToString, Vec}; +use crate::{BTreeMap, Error, LabeledPolynomial, PCCommitmentState, ToString, Vec}; use ark_ec::AffineRepr; use ark_ec::{pairing::Pairing, CurveGroup}; -use ark_ec::{scalar_mul::fixed_base::FixedBase, VariableBaseMSM}; +use ark_ec::{scalar_mul::ScalarMul, VariableBaseMSM}; use ark_ff::{One, PrimeField, UniformRand, Zero}; use ark_poly::DenseUVPolynomial; use ark_std::{format, marker::PhantomData, ops::Div, ops::Mul, vec}; @@ -66,36 +66,27 @@ where let gamma_g = E::G1::rand(rng); let h = E::G2::rand(rng); + // powers_of_beta = [1, b, ..., b^(max_degree + 1)], len = max_degree + 2 let mut powers_of_beta = vec![E::ScalarField::one()]; - let mut cur = beta; - for _ in 0..max_degree { + for _ in 0..=max_degree { powers_of_beta.push(cur); cur *= β } - let window_size = FixedBase::get_mul_window_size(max_degree + 1); - - let scalar_bits = E::ScalarField::MODULUS_BIT_SIZE as usize; let g_time = start_timer!(|| "Generating powers of G"); - let g_table = FixedBase::get_window_table(scalar_bits, window_size, g); - let powers_of_g = - FixedBase::msm::(scalar_bits, window_size, &g_table, &powers_of_beta); + let powers_of_g = g.batch_mul(&powers_of_beta[0..max_degree + 1]); end_timer!(g_time); - let gamma_g_time = start_timer!(|| "Generating powers of gamma * G"); - let gamma_g_table = FixedBase::get_window_table(scalar_bits, window_size, gamma_g); - let mut powers_of_gamma_g = - FixedBase::msm::(scalar_bits, window_size, &gamma_g_table, &powers_of_beta); - // Add an additional power of gamma_g, because we want to be able to support - // up to D queries. - powers_of_gamma_g.push(powers_of_gamma_g.last().unwrap().mul(&beta)); - end_timer!(gamma_g_time); - let powers_of_g = E::G1::normalize_batch(&powers_of_g); - let powers_of_gamma_g = E::G1::normalize_batch(&powers_of_gamma_g) + // Use the entire `powers_of_beta`, since we want to be able to support + // up to D queries. + let gamma_g_time = start_timer!(|| "Generating powers of gamma * G"); + let powers_of_gamma_g = gamma_g + .batch_mul(&powers_of_beta) .into_iter() .enumerate() .collect(); + end_timer!(gamma_g_time); let neg_powers_of_h_time = start_timer!(|| "Generating negative powers of h in G2"); let neg_powers_of_h = if produce_g2_powers { @@ -106,20 +97,10 @@ where cur /= β } - let neg_h_table = FixedBase::get_window_table(scalar_bits, window_size, h); - let neg_powers_of_h = FixedBase::msm::( - scalar_bits, - window_size, - &neg_h_table, - &neg_powers_of_beta, - ); - - let affines = E::G2::normalize_batch(&neg_powers_of_h); - let mut affines_map = BTreeMap::new(); - affines.into_iter().enumerate().for_each(|(i, a)| { - affines_map.insert(i, a); - }); - affines_map + h.batch_mul(&neg_powers_of_beta) + .into_iter() + .enumerate() + .collect() } else { BTreeMap::new() }; diff --git a/poly-commit/src/lib.rs b/poly-commit/src/lib.rs index 35bed484..4c44c0f3 100644 --- a/poly-commit/src/lib.rs +++ b/poly-commit/src/lib.rs @@ -9,7 +9,7 @@ #![deny(renamed_and_removed_lints, stable_features, unused_allocation)] #![deny(unused_comparisons, bare_trait_objects, unused_must_use)] #![forbid(unsafe_code)] -#![doc = include_str!("../README.md")] +#![doc = include_str!("../../README.md")] #[allow(unused)] #[macro_use] @@ -101,8 +101,6 @@ pub mod sonic_pc; /// [pcdas]: https://eprint.iacr.org/2020/499 pub mod ipa_pc; -/// Defines the challenge strategies and challenge generator. -pub mod challenge; /// A multilinear polynomial commitment scheme that converts n-variate multilinear polynomial into /// n quotient UV polynomial. This scheme is based on hardness of the discrete logarithm /// in prime-order groups. Construction is detailed in [[XZZPD19]][xzzpd19] and [[ZGKPP18]][zgkpp18] @@ -111,7 +109,6 @@ pub mod challenge; /// [zgkpp]: https://ieeexplore.ieee.org/document/8418645 pub mod multilinear_pc; -use crate::challenge::ChallengeGenerator; use ark_crypto_primitives::sponge::{CryptographicSponge, FieldElementSize}; /// Multivariate polynomial commitment based on the construction in /// [[PST13]][pst] with batching and (optional) hiding property inspired @@ -165,8 +162,11 @@ pub trait PolynomialCommitment, S: Cryptographic type VerifierKey: PCVerifierKey; /// The commitment to a polynomial. type Commitment: PCCommitment + Default; - /// The commitment randomness. - type Randomness: PCRandomness; + /// Auxiliary state of the commitment, output by the `commit` phase. + /// It contains information that can be reused by the committer + /// during the `open` phase, such as the commitment randomness. + /// Not to be shared with the verifier. + type CommitmentState: PCCommitmentState; /// The evaluation proof for a single point. type Proof: Clone; /// The evaluation proof for a query set. @@ -212,7 +212,7 @@ pub trait PolynomialCommitment, S: Cryptographic ) -> Result< ( Vec>, - Vec, + Vec, ), Self::Error, > @@ -225,13 +225,13 @@ pub trait PolynomialCommitment, S: Cryptographic labeled_polynomials: impl IntoIterator>, commitments: impl IntoIterator>, point: &'a P::Point, - challenge_generator: &mut ChallengeGenerator, - rands: impl IntoIterator, + sponge: &mut S, + states: impl IntoIterator, rng: Option<&mut dyn RngCore>, ) -> Result where P: 'a, - Self::Randomness: 'a, + Self::CommitmentState: 'a, Self::Commitment: 'a; /// check but with individual challenges @@ -241,7 +241,7 @@ pub trait PolynomialCommitment, S: Cryptographic point: &'a P::Point, values: impl IntoIterator, proof: &Self::Proof, - challenge_generator: &mut ChallengeGenerator, + sponge: &mut S, rng: Option<&mut dyn RngCore>, ) -> Result where @@ -261,13 +261,13 @@ pub trait PolynomialCommitment, S: Cryptographic labeled_polynomials: impl IntoIterator>, commitments: impl IntoIterator>, query_set: &QuerySet, - challenge_generator: &mut ChallengeGenerator, - rands: impl IntoIterator, + sponge: &mut S, + states: impl IntoIterator, rng: Option<&mut dyn RngCore>, ) -> Result where P: 'a, - Self::Randomness: 'a, + Self::CommitmentState: 'a, Self::Commitment: 'a, { // The default implementation achieves proceeds by rearranging the queries in @@ -275,16 +275,16 @@ pub trait PolynomialCommitment, S: Cryptographic // the same point, then opening their commitments simultaneously with a // single call to `open` (per point) let rng = &mut crate::optional_rng::OptionalRng(rng); - let poly_rand_comm: BTreeMap<_, _> = labeled_polynomials + let poly_st_comm: BTreeMap<_, _> = labeled_polynomials .into_iter() - .zip(rands) + .zip(states) .zip(commitments.into_iter()) - .map(|((poly, r), comm)| (poly.label(), (poly, r, comm))) + .map(|((poly, st), comm)| (poly.label(), (poly, st, comm))) .collect(); let open_time = start_timer!(|| format!( "Opening {} polynomials at query set of size {}", - poly_rand_comm.len(), + poly_st_comm.len(), query_set.len(), )); @@ -307,20 +307,20 @@ pub trait PolynomialCommitment, S: Cryptographic let mut proofs = Vec::new(); for (_point_label, (point, labels)) in query_to_labels_map.into_iter() { let mut query_polys: Vec<&'a LabeledPolynomial<_, _>> = Vec::new(); - let mut query_rands: Vec<&'a Self::Randomness> = Vec::new(); + let mut query_states: Vec<&'a Self::CommitmentState> = Vec::new(); let mut query_comms: Vec<&'a LabeledCommitment> = Vec::new(); // Constructing matching vectors with the polynomial, commitment // randomness and actual commitment for each polynomial being // queried at `point` for label in labels { - let (polynomial, rand, comm) = - poly_rand_comm.get(label).ok_or(Error::MissingPolynomial { + let (polynomial, state, comm) = + poly_st_comm.get(label).ok_or(Error::MissingPolynomial { label: label.to_string(), })?; query_polys.push(polynomial); - query_rands.push(rand); + query_states.push(state); query_comms.push(comm); } @@ -333,8 +333,8 @@ pub trait PolynomialCommitment, S: Cryptographic query_polys, query_comms, &point, - challenge_generator, - query_rands, + sponge, + query_states, Some(rng), )?; @@ -366,7 +366,7 @@ pub trait PolynomialCommitment, S: Cryptographic query_set: &QuerySet, evaluations: &Evaluations, proof: &Self::BatchProof, - challenge_generator: &mut ChallengeGenerator, + sponge: &mut S, rng: &mut R, ) -> Result where @@ -424,15 +424,7 @@ pub trait PolynomialCommitment, S: Cryptographic // Verify all proofs referring to the current point simultaneously // with a single call to `check` - result &= Self::check( - vk, - comms, - &point, - values, - &proof, - challenge_generator, - Some(rng), - )?; + result &= Self::check(vk, comms, &point, values, &proof, sponge, Some(rng))?; end_timer!(proof_time); } Ok(result) @@ -446,12 +438,12 @@ pub trait PolynomialCommitment, S: Cryptographic polynomials: impl IntoIterator>, commitments: impl IntoIterator>, query_set: &QuerySet, - challenge_generator: &mut ChallengeGenerator, - rands: impl IntoIterator, + sponge: &mut S, + states: impl IntoIterator, rng: Option<&mut dyn RngCore>, ) -> Result, Self::Error> where - Self::Randomness: 'a, + Self::CommitmentState: 'a, Self::Commitment: 'a, P: 'a, { @@ -472,8 +464,8 @@ pub trait PolynomialCommitment, S: Cryptographic polynomials, commitments, &poly_query_set, - challenge_generator, - rands, + sponge, + states, rng, )?; Ok(BatchLCProof { @@ -491,7 +483,7 @@ pub trait PolynomialCommitment, S: Cryptographic eqn_query_set: &QuerySet, eqn_evaluations: &Evaluations, proof: &BatchLCProof, - challenge_generator: &mut ChallengeGenerator, + sponge: &mut S, rng: &mut R, ) -> Result where @@ -562,7 +554,7 @@ pub trait PolynomialCommitment, S: Cryptographic &poly_query_set, &poly_evals, proof, - challenge_generator, + sponge, rng, )?; if !pc_result { @@ -674,88 +666,83 @@ pub mod tests { PC: PolynomialCommitment, S: CryptographicSponge, { - let challenge_generators = vec![ - ChallengeGenerator::new_multivariate(sponge()), - ChallengeGenerator::new_univariate(&mut sponge()), - ]; - - for challenge_gen in challenge_generators { - let rng = &mut ChaCha20Rng::from_rng(test_rng()).unwrap(); - let max_degree = 100; - let pp = PC::setup(max_degree, None, rng)?; - for _ in 0..10 { - let supported_degree = Uniform::from(1..=max_degree).sample(rng); - assert!( - max_degree >= supported_degree, - "max_degree < supported_degree" - ); - - let mut labels = Vec::new(); - let mut polynomials = Vec::new(); - let mut degree_bounds = Vec::new(); - - for i in 0..10 { - let label = format!("Test{}", i); - labels.push(label.clone()); - let degree_bound = 1usize; - let hiding_bound = Some(1); - degree_bounds.push(degree_bound); - - polynomials.push(LabeledPolynomial::new( - label, - rand_poly(supported_degree, None, rng), - Some(degree_bound), - hiding_bound, - )); - } + let sponge = sponge(); + + let rng = &mut ChaCha20Rng::from_rng(test_rng()).unwrap(); + let max_degree = 100; + let pp = PC::setup(max_degree, None, rng)?; + for _ in 0..10 { + let supported_degree = Uniform::from(1..=max_degree).sample(rng); + assert!( + max_degree >= supported_degree, + "max_degree < supported_degree" + ); + + let mut labels = Vec::new(); + let mut polynomials = Vec::new(); + let mut degree_bounds = Vec::new(); + + for i in 0..10 { + let label = format!("Test{}", i); + labels.push(label.clone()); + let degree_bound = 1usize; + let hiding_bound = Some(1); + degree_bounds.push(degree_bound); + + polynomials.push(LabeledPolynomial::new( + label, + rand_poly(supported_degree, None, rng), + Some(degree_bound), + hiding_bound, + )); + } - let supported_hiding_bound = polynomials - .iter() - .map(|p| p.hiding_bound().unwrap_or(0)) - .max() - .unwrap_or(0); - println!("supported degree: {:?}", supported_degree); - println!("supported hiding bound: {:?}", supported_hiding_bound); - let (ck, vk) = PC::trim( - &pp, - supported_degree, - supported_hiding_bound, - Some(degree_bounds.as_slice()), - )?; - println!("Trimmed"); + let supported_hiding_bound = polynomials + .iter() + .map(|p| p.hiding_bound().unwrap_or(0)) + .max() + .unwrap_or(0); + println!("supported degree: {:?}", supported_degree); + println!("supported hiding bound: {:?}", supported_hiding_bound); + let (ck, vk) = PC::trim( + &pp, + supported_degree, + supported_hiding_bound, + Some(degree_bounds.as_slice()), + )?; + println!("Trimmed"); - let (comms, rands) = PC::commit(&ck, &polynomials, Some(rng))?; + let (comms, rands) = PC::commit(&ck, &polynomials, Some(rng))?; - let mut query_set = QuerySet::new(); - let mut values = Evaluations::new(); - let point = rand_point(None, rng); - for (i, label) in labels.iter().enumerate() { - query_set.insert((label.clone(), (format!("{}", i), point.clone()))); - let value = polynomials[i].evaluate(&point); - values.insert((label.clone(), point.clone()), value); - } - println!("Generated query set"); - - let proof = PC::batch_open( - &ck, - &polynomials, - &comms, - &query_set, - &mut (challenge_gen.clone()), - &rands, - Some(rng), - )?; - let result = PC::batch_check( - &vk, - &comms, - &query_set, - &values, - &proof, - &mut (challenge_gen.clone()), - rng, - )?; - assert!(result, "proof was incorrect, Query set: {:#?}", query_set); + let mut query_set = QuerySet::new(); + let mut values = Evaluations::new(); + let point = rand_point(None, rng); + for (i, label) in labels.iter().enumerate() { + query_set.insert((label.clone(), (format!("{}", i), point.clone()))); + let value = polynomials[i].evaluate(&point); + values.insert((label.clone(), point.clone()), value); } + println!("Generated query set"); + + let proof = PC::batch_open( + &ck, + &polynomials, + &comms, + &query_set, + &mut (sponge.clone()), + &rands, + Some(rng), + )?; + let result = PC::batch_check( + &vk, + &comms, + &query_set, + &values, + &proof, + &mut (sponge.clone()), + rng, + )?; + assert!(result, "proof was incorrect, Query set: {:#?}", query_set); } Ok(()) @@ -782,127 +769,123 @@ pub mod tests { sponge, } = info; - let challenge_gens = vec![ - ChallengeGenerator::new_multivariate(sponge()), - ChallengeGenerator::new_univariate(&mut sponge()), - ]; + let sponge = sponge(); - for challenge_gen in challenge_gens { - let rng = &mut ChaCha20Rng::from_rng(test_rng()).unwrap(); - // If testing multivariate polynomials, make the max degree lower - let max_degree = match num_vars { - Some(_) => max_degree.unwrap_or(Uniform::from(2..=10).sample(rng)), - None => max_degree.unwrap_or(Uniform::from(2..=64).sample(rng)), + let rng = &mut ChaCha20Rng::from_rng(test_rng()).unwrap(); + // If testing multivariate polynomials, make the max degree lower + let max_degree = match num_vars { + Some(_) => max_degree.unwrap_or(Uniform::from(2..=10).sample(rng)), + None => max_degree.unwrap_or(Uniform::from(2..=64).sample(rng)), + }; + let pp = PC::setup(max_degree, num_vars, rng)?; + + for _ in 0..num_iters { + let supported_degree = + supported_degree.unwrap_or(Uniform::from(1..=max_degree).sample(rng)); + assert!( + max_degree >= supported_degree, + "max_degree < supported_degree" + ); + let mut polynomials: Vec> = Vec::new(); + let mut degree_bounds = if enforce_degree_bounds { + Some(Vec::new()) + } else { + None }; - let pp = PC::setup(max_degree, num_vars, rng)?; - - for _ in 0..num_iters { - let supported_degree = - supported_degree.unwrap_or(Uniform::from(1..=max_degree).sample(rng)); - assert!( - max_degree >= supported_degree, - "max_degree < supported_degree" - ); - let mut polynomials: Vec> = Vec::new(); - let mut degree_bounds = if enforce_degree_bounds { - Some(Vec::new()) + + let mut labels = Vec::new(); + println!("Sampled supported degree"); + + // Generate polynomials + let num_points_in_query_set = Uniform::from(1..=max_num_queries).sample(rng); + for i in 0..num_polynomials { + let label = format!("Test{}", i); + labels.push(label.clone()); + let degree = Uniform::from(1..=supported_degree).sample(rng); + let degree_bound = if let Some(degree_bounds) = &mut degree_bounds { + let range = Uniform::from(degree..=supported_degree); + let degree_bound = range.sample(rng); + degree_bounds.push(degree_bound); + Some(degree_bound) } else { None }; - let mut labels = Vec::new(); - println!("Sampled supported degree"); - - // Generate polynomials - let num_points_in_query_set = Uniform::from(1..=max_num_queries).sample(rng); - for i in 0..num_polynomials { - let label = format!("Test{}", i); - labels.push(label.clone()); - let degree = Uniform::from(1..=supported_degree).sample(rng); - let degree_bound = if let Some(degree_bounds) = &mut degree_bounds { - let range = Uniform::from(degree..=supported_degree); - let degree_bound = range.sample(rng); - degree_bounds.push(degree_bound); - Some(degree_bound) - } else { - None - }; - - let hiding_bound = if num_points_in_query_set >= degree { - Some(degree) - } else { - Some(num_points_in_query_set) - }; + let hiding_bound = if num_points_in_query_set >= degree { + Some(degree) + } else { + Some(num_points_in_query_set) + }; - polynomials.push(LabeledPolynomial::new( - label, - rand_poly(degree, num_vars, rng).into(), - degree_bound, - hiding_bound, - )) - } - let supported_hiding_bound = polynomials - .iter() - .map(|p| p.hiding_bound().unwrap_or(0)) - .max() - .unwrap_or(0); - println!("supported degree: {:?}", supported_degree); - println!("supported hiding bound: {:?}", supported_hiding_bound); - println!("num_points_in_query_set: {:?}", num_points_in_query_set); - let (ck, vk) = PC::trim( - &pp, - supported_degree, - supported_hiding_bound, - degree_bounds.as_ref().map(|s| s.as_slice()), - )?; - println!("Trimmed"); + polynomials.push(LabeledPolynomial::new( + label, + rand_poly(degree, num_vars, rng).into(), + degree_bound, + hiding_bound, + )) + } + let supported_hiding_bound = polynomials + .iter() + .map(|p| p.hiding_bound().unwrap_or(0)) + .max() + .unwrap_or(0); + println!("supported degree: {:?}", supported_degree); + println!("supported hiding bound: {:?}", supported_hiding_bound); + println!("num_points_in_query_set: {:?}", num_points_in_query_set); + let (ck, vk) = PC::trim( + &pp, + supported_degree, + supported_hiding_bound, + degree_bounds.as_ref().map(|s| s.as_slice()), + )?; + println!("Trimmed"); - let (comms, rands) = PC::commit(&ck, &polynomials, Some(rng))?; + let (comms, rands) = PC::commit(&ck, &polynomials, Some(rng))?; - // Construct query set - let mut query_set = QuerySet::new(); - let mut values = Evaluations::new(); - for _ in 0..num_points_in_query_set { - let point = rand_point(num_vars, rng); - for (i, label) in labels.iter().enumerate() { - query_set.insert((label.clone(), (format!("{}", i), point.clone()))); - let value = polynomials[i].evaluate(&point); - values.insert((label.clone(), point.clone()), value); - } + // Construct query set + let mut query_set = QuerySet::new(); + let mut values = Evaluations::new(); + for _ in 0..num_points_in_query_set { + let point = rand_point(num_vars, rng); + for (i, label) in labels.iter().enumerate() { + query_set.insert((label.clone(), (format!("{}", i), point.clone()))); + let value = polynomials[i].evaluate(&point); + values.insert((label.clone(), point.clone()), value); } - println!("Generated query set"); - - let proof = PC::batch_open( - &ck, - &polynomials, - &comms, - &query_set, - &mut (challenge_gen.clone()), - &rands, - Some(rng), - )?; - let result = PC::batch_check( - &vk, - &comms, - &query_set, - &values, - &proof, - &mut (challenge_gen.clone()), - rng, - )?; - if !result { - println!( - "Failed with {} polynomials, num_points_in_query_set: {:?}", - num_polynomials, num_points_in_query_set - ); - println!("Degree of polynomials:",); - for poly in polynomials { - println!("Degree: {:?}", poly.degree()); - } + } + println!("Generated query set"); + + let proof = PC::batch_open( + &ck, + &polynomials, + &comms, + &query_set, + &mut (sponge.clone()), + &rands, + Some(rng), + )?; + let result = PC::batch_check( + &vk, + &comms, + &query_set, + &values, + &proof, + &mut (sponge.clone()), + rng, + )?; + if !result { + println!( + "Failed with {} polynomials, num_points_in_query_set: {:?}", + num_polynomials, num_points_in_query_set + ); + println!("Degree of polynomials:",); + for poly in polynomials { + println!("Degree: {:?}", poly.degree()); } - assert!(result, "proof was incorrect, Query set: {:#?}", query_set); } + assert!(result, "proof was incorrect, Query set: {:#?}", query_set); } + Ok(()) } @@ -927,167 +910,163 @@ pub mod tests { sponge, } = info; - let challenge_gens = vec![ - ChallengeGenerator::new_multivariate(sponge()), - ChallengeGenerator::new_univariate(&mut sponge()), - ]; + let sponge = sponge(); - for challenge_gen in challenge_gens { - let rng = &mut ChaCha20Rng::from_rng(test_rng()).unwrap(); - // If testing multivariate polynomials, make the max degree lower - let max_degree = match num_vars { - Some(_) => max_degree.unwrap_or(Uniform::from(2..=10).sample(rng)), - None => max_degree.unwrap_or(Uniform::from(2..=64).sample(rng)), + let rng = &mut ChaCha20Rng::from_rng(test_rng()).unwrap(); + // If testing multivariate polynomials, make the max degree lower + let max_degree = match num_vars { + Some(_) => max_degree.unwrap_or(Uniform::from(2..=10).sample(rng)), + None => max_degree.unwrap_or(Uniform::from(2..=64).sample(rng)), + }; + let pp = PC::setup(max_degree, num_vars, rng)?; + + for _ in 0..num_iters { + let supported_degree = + supported_degree.unwrap_or(Uniform::from(1..=max_degree).sample(rng)); + assert!( + max_degree >= supported_degree, + "max_degree < supported_degree" + ); + let mut polynomials = Vec::new(); + let mut degree_bounds = if enforce_degree_bounds { + Some(Vec::new()) + } else { + None }; - let pp = PC::setup(max_degree, num_vars, rng)?; - - for _ in 0..num_iters { - let supported_degree = - supported_degree.unwrap_or(Uniform::from(1..=max_degree).sample(rng)); - assert!( - max_degree >= supported_degree, - "max_degree < supported_degree" - ); - let mut polynomials = Vec::new(); - let mut degree_bounds = if enforce_degree_bounds { - Some(Vec::new()) + + let mut labels = Vec::new(); + println!("Sampled supported degree"); + + // Generate polynomials + let num_points_in_query_set = Uniform::from(1..=max_num_queries).sample(rng); + for i in 0..num_polynomials { + let label = format!("Test{}", i); + labels.push(label.clone()); + let degree = Uniform::from(1..=supported_degree).sample(rng); + let degree_bound = if let Some(degree_bounds) = &mut degree_bounds { + if rng.gen() { + let range = Uniform::from(degree..=supported_degree); + let degree_bound = range.sample(rng); + degree_bounds.push(degree_bound); + Some(degree_bound) + } else { + None + } } else { None }; - let mut labels = Vec::new(); - println!("Sampled supported degree"); - - // Generate polynomials - let num_points_in_query_set = Uniform::from(1..=max_num_queries).sample(rng); - for i in 0..num_polynomials { - let label = format!("Test{}", i); - labels.push(label.clone()); - let degree = Uniform::from(1..=supported_degree).sample(rng); - let degree_bound = if let Some(degree_bounds) = &mut degree_bounds { - if rng.gen() { - let range = Uniform::from(degree..=supported_degree); - let degree_bound = range.sample(rng); - degree_bounds.push(degree_bound); - Some(degree_bound) + let hiding_bound = if num_points_in_query_set >= degree { + Some(degree) + } else { + Some(num_points_in_query_set) + }; + println!("Hiding bound: {:?}", hiding_bound); + + polynomials.push(LabeledPolynomial::new( + label, + rand_poly(degree, num_vars, rng), + degree_bound, + hiding_bound, + )) + } + println!("supported degree: {:?}", supported_degree); + println!("num_points_in_query_set: {:?}", num_points_in_query_set); + println!("{:?}", degree_bounds); + println!("{}", num_polynomials); + println!("{}", enforce_degree_bounds); + + let (ck, vk) = PC::trim( + &pp, + supported_degree, + supported_degree, + degree_bounds.as_ref().map(|s| s.as_slice()), + )?; + println!("Trimmed"); + + let (comms, rands) = PC::commit(&ck, &polynomials, Some(rng))?; + + // Let's construct our equations + let mut linear_combinations = Vec::new(); + let mut query_set = QuerySet::new(); + let mut values = Evaluations::new(); + for i in 0..num_points_in_query_set { + let point = rand_point(num_vars, rng); + for j in 0..num_equations.unwrap() { + let label = format!("query {} eqn {}", i, j); + let mut lc = LinearCombination::empty(label.clone()); + + let mut value = F::zero(); + let should_have_degree_bounds: bool = rng.gen(); + for (k, label) in labels.iter().enumerate() { + if should_have_degree_bounds { + value += &polynomials[k].evaluate(&point); + lc.push((F::one(), label.to_string().into())); + break; } else { - None - } - } else { - None - }; - - let hiding_bound = if num_points_in_query_set >= degree { - Some(degree) - } else { - Some(num_points_in_query_set) - }; - println!("Hiding bound: {:?}", hiding_bound); - - polynomials.push(LabeledPolynomial::new( - label, - rand_poly(degree, num_vars, rng), - degree_bound, - hiding_bound, - )) - } - println!("supported degree: {:?}", supported_degree); - println!("num_points_in_query_set: {:?}", num_points_in_query_set); - println!("{:?}", degree_bounds); - println!("{}", num_polynomials); - println!("{}", enforce_degree_bounds); - - let (ck, vk) = PC::trim( - &pp, - supported_degree, - supported_degree, - degree_bounds.as_ref().map(|s| s.as_slice()), - )?; - println!("Trimmed"); - - let (comms, rands) = PC::commit(&ck, &polynomials, Some(rng))?; - - // Let's construct our equations - let mut linear_combinations = Vec::new(); - let mut query_set = QuerySet::new(); - let mut values = Evaluations::new(); - for i in 0..num_points_in_query_set { - let point = rand_point(num_vars, rng); - for j in 0..num_equations.unwrap() { - let label = format!("query {} eqn {}", i, j); - let mut lc = LinearCombination::empty(label.clone()); - - let mut value = F::zero(); - let should_have_degree_bounds: bool = rng.gen(); - for (k, label) in labels.iter().enumerate() { - if should_have_degree_bounds { - value += &polynomials[k].evaluate(&point); - lc.push((F::one(), label.to_string().into())); - break; + let poly = &polynomials[k]; + if poly.degree_bound().is_some() { + continue; } else { - let poly = &polynomials[k]; - if poly.degree_bound().is_some() { - continue; - } else { - assert!(poly.degree_bound().is_none()); - let coeff = F::rand(rng); - value += &(coeff * poly.evaluate(&point)); - lc.push((coeff, label.to_string().into())); - } + assert!(poly.degree_bound().is_none()); + let coeff = F::rand(rng); + value += &(coeff * poly.evaluate(&point)); + lc.push((coeff, label.to_string().into())); } } - values.insert((label.clone(), point.clone()), value); - if !lc.is_empty() { - linear_combinations.push(lc); - // Insert query - query_set.insert((label.clone(), (format!("{}", i), point.clone()))); - } } - } - if linear_combinations.is_empty() { - continue; - } - println!("Generated query set"); - println!("Linear combinations: {:?}", linear_combinations); - - let proof = PC::open_combinations( - &ck, - &linear_combinations, - &polynomials, - &comms, - &query_set, - &mut (challenge_gen.clone()), - &rands, - Some(rng), - )?; - println!("Generated proof"); - let result = PC::check_combinations( - &vk, - &linear_combinations, - &comms, - &query_set, - &values, - &proof, - &mut (challenge_gen.clone()), - rng, - )?; - if !result { - println!( - "Failed with {} polynomials, num_points_in_query_set: {:?}", - num_polynomials, num_points_in_query_set - ); - println!("Degree of polynomials:",); - for poly in polynomials { - println!("Degree: {:?}", poly.degree()); + values.insert((label.clone(), point.clone()), value); + if !lc.is_empty() { + linear_combinations.push(lc); + // Insert query + query_set.insert((label.clone(), (format!("{}", i), point.clone()))); } } - assert!( - result, - "proof was incorrect, equations: {:#?}", - linear_combinations + } + if linear_combinations.is_empty() { + continue; + } + println!("Generated query set"); + println!("Linear combinations: {:?}", linear_combinations); + + let proof = PC::open_combinations( + &ck, + &linear_combinations, + &polynomials, + &comms, + &query_set, + &mut (sponge.clone()), + &rands, + Some(rng), + )?; + println!("Generated proof"); + let result = PC::check_combinations( + &vk, + &linear_combinations, + &comms, + &query_set, + &values, + &proof, + &mut (sponge.clone()), + rng, + )?; + if !result { + println!( + "Failed with {} polynomials, num_points_in_query_set: {:?}", + num_polynomials, num_points_in_query_set ); + println!("Degree of polynomials:",); + for poly in polynomials { + println!("Degree: {:?}", poly.degree()); + } } + assert!( + result, + "proof was incorrect, equations: {:#?}", + linear_combinations + ); } + Ok(()) } diff --git a/poly-commit/src/linear_codes/brakedown.rs b/poly-commit/src/linear_codes/brakedown.rs index 625efb07..6afc7c8d 100644 --- a/poly-commit/src/linear_codes/brakedown.rs +++ b/poly-commit/src/linear_codes/brakedown.rs @@ -78,12 +78,14 @@ where (self.n, self.m) } - fn leaf_hash_params(&self) -> &<::LeafHash as CRHScheme>::Parameters { - &self.leaf_hash_params + fn leaf_hash_param(&self) -> &<::LeafHash as CRHScheme>::Parameters { + &self.leaf_hash_param } - fn two_to_one_params(&self) -> &<::TwoToOneHash as TwoToOneCRHScheme>::Parameters { - &self.two_to_one_params + fn two_to_one_hash_param( + &self, + ) -> &<::TwoToOneHash as TwoToOneCRHScheme>::Parameters { + &self.two_to_one_hash_param } fn col_hash_params(&self) -> &::Parameters { @@ -102,8 +104,8 @@ where rng: &mut R, poly_len: usize, check_well_formedness: bool, - leaf_hash_params: LeafParam, - two_to_one_params: TwoToOneParam, + leaf_hash_param: LeafParam, + two_to_one_hash_param: TwoToOneParam, col_hash_params: H::Parameters, ) -> Self { let sec_param = 128; @@ -134,8 +136,8 @@ where a_mats, b_mats, check_well_formedness, - leaf_hash_params, - two_to_one_params, + leaf_hash_param, + two_to_one_hash_param, col_hash_params, ) } @@ -154,8 +156,8 @@ where a_mats: Vec>, b_mats: Vec>, check_well_formedness: bool, - leaf_hash_params: LeafParam, - two_to_one_params: TwoToOneParam, + leaf_hash_param: LeafParam, + two_to_one_hash_param: TwoToOneParam, col_hash_params: H::Parameters, ) -> Self { let m_ext = if a_dims.is_empty() { @@ -194,8 +196,8 @@ where a_mats, b_mats, check_well_formedness, - leaf_hash_params, - two_to_one_params, + leaf_hash_param, + two_to_one_hash_param, col_hash_params, } } diff --git a/poly-commit/src/linear_codes/data_structures.rs b/poly-commit/src/linear_codes/data_structures.rs index ecebcf8f..0ceab21b 100644 --- a/poly-commit/src/linear_codes/data_structures.rs +++ b/poly-commit/src/linear_codes/data_structures.rs @@ -1,5 +1,5 @@ use super::utils::SprsMat; -use crate::{PCCommitment, PCRandomness}; +use crate::{utils::Matrix, PCCommitment, PCCommitmentState}; use ark_crypto_primitives::{ crh::CRHScheme, merkle_tree::{Config, LeafParam, Path, TwoToOneParam}, @@ -51,10 +51,10 @@ pub struct BrakedownPCParams { pub(crate) check_well_formedness: bool, /// Parameters for hash function of Merkle tree leaves #[derivative(Debug = "ignore")] - pub(crate) leaf_hash_params: LeafParam, + pub(crate) leaf_hash_param: LeafParam, /// Parameters for hash function of Merke tree combining two nodes into one #[derivative(Debug = "ignore")] - pub(crate) two_to_one_params: TwoToOneParam, + pub(crate) two_to_one_hash_param: TwoToOneParam, // Parameters for obtaining leaf digest from leaf value. #[derivative(Debug = "ignore")] pub(crate) col_hash_params: H::Parameters, @@ -88,9 +88,24 @@ impl PCCommitment for LinCodePCCommitment { } } -pub(crate) type LinCodePCRandomness = (); +#[derive(Derivative, CanonicalSerialize, CanonicalDeserialize)] +#[derivative(Default(bound = ""), Clone(bound = ""), Debug(bound = ""))] +pub struct LinCodePCCommitmentState +where + F: PrimeField, + H: CRHScheme, +{ + pub(crate) mat: Matrix, + pub(crate) ext_mat: Matrix, + pub(crate) leaves: Vec, +} -impl PCRandomness for LinCodePCRandomness { +impl PCCommitmentState for LinCodePCCommitmentState +where + F: PrimeField, + H: CRHScheme, +{ + type Randomness = (); fn empty() -> Self { unimplemented!() } @@ -100,7 +115,7 @@ impl PCRandomness for LinCodePCRandomness { _has_degree_bound: bool, _num_vars: Option, _rng: &mut R, - ) -> Self { + ) -> Self::Randomness { unimplemented!() } } diff --git a/poly-commit/src/linear_codes/mod.rs b/poly-commit/src/linear_codes/mod.rs index 99c61b1e..d0b8f90b 100644 --- a/poly-commit/src/linear_codes/mod.rs +++ b/poly-commit/src/linear_codes/mod.rs @@ -1,12 +1,15 @@ -use crate::utils::{inner_product, IOPTranscript, Matrix}; +use crate::utils::{inner_product, Matrix}; use crate::{ - Error, LabeledCommitment, LabeledPolynomial, PCCommitterKey, PCUniversalParams, PCVerifierKey, - PolynomialCommitment, + to_bytes, Error, LabeledCommitment, LabeledPolynomial, PCCommitterKey, PCUniversalParams, + PCVerifierKey, PolynomialCommitment, }; use ark_crypto_primitives::crh::{CRHScheme, TwoToOneCRHScheme}; use ark_crypto_primitives::merkle_tree::MerkleTree; -use ark_crypto_primitives::{merkle_tree::Config, sponge::CryptographicSponge}; +use ark_crypto_primitives::{ + merkle_tree::Config, + sponge::{Absorb, CryptographicSponge}, +}; use ark_ff::PrimeField; use ark_poly::Polynomial; use ark_std::borrow::Borrow; @@ -30,7 +33,7 @@ use data_structures::*; pub use data_structures::LinCodePCProof; -use utils::{calculate_t, get_indices_from_transcript, hash_column}; +use utils::{calculate_t, get_indices_from_sponge}; const FIELD_SIZE_ERROR: &str = "This field is not suitable for the proposed parameters"; @@ -55,10 +58,12 @@ where fn compute_dimensions(&self, n: usize) -> (usize, usize); /// Get the hash parameters for obtaining leaf digest from leaf value. - fn leaf_hash_params(&self) -> &<::LeafHash as CRHScheme>::Parameters; + fn leaf_hash_param(&self) -> &<::LeafHash as CRHScheme>::Parameters; /// Get the parameters for hashing nodes in the merkle tree. - fn two_to_one_params(&self) -> &<::TwoToOneHash as TwoToOneCRHScheme>::Parameters; + fn two_to_one_hash_param( + &self, + ) -> &<::TwoToOneHash as TwoToOneCRHScheme>::Parameters; /// Get the parameters for hashing a vector of values, /// representing a column of the coefficient matrix, into a leaf value. @@ -85,8 +90,8 @@ where max_degree: usize, num_vars: Option, rng: &mut R, - leaf_hash_params: <::LeafHash as CRHScheme>::Parameters, - two_to_one_params: <::TwoToOneHash as TwoToOneCRHScheme>::Parameters, + leaf_hash_param: <::LeafHash as CRHScheme>::Parameters, + two_to_one_hash_param: <::TwoToOneHash as TwoToOneCRHScheme>::Parameters, col_hash_params: H::Parameters, ) -> Self::LinCodePCParams; @@ -152,14 +157,14 @@ where impl PolynomialCommitment for LinearCodePCS where L: LinearEncode, - F: PrimeField, + F: PrimeField + Absorb, P: Polynomial, S: CryptographicSponge, C: Config + 'static, Vec: Borrow<::Input>, - H::Output: Into, - C::Leaf: Sized + Clone + Default + Send, - H: CRHScheme, + H::Output: Into + Send, + C::Leaf: Sized + Clone + Default + Send + AsRef, + H: CRHScheme + 'static, { type UniversalParams = L::LinCodePCParams; @@ -169,7 +174,7 @@ where type Commitment = LinCodePCCommitment; - type Randomness = LinCodePCRandomness; + type CommitmentState = LinCodePCCommitmentState; type Proof = LPCPArray; @@ -185,8 +190,8 @@ where num_vars: Option, rng: &mut R, ) -> Result { - let leaf_hash_params = ::setup(rng).unwrap(); - let two_to_one_params = ::setup(rng) + let leaf_hash_param = ::setup(rng).unwrap(); + let two_to_one_hash_param = ::setup(rng) .unwrap() .clone(); let col_hash_params = ::setup(rng).unwrap(); @@ -194,8 +199,8 @@ where max_degree, num_vars, rng, - leaf_hash_params, - two_to_one_params, + leaf_hash_param, + two_to_one_hash_param, col_hash_params, ); let real_max_degree = ::max_degree(&pp); @@ -224,7 +229,7 @@ where ) -> Result< ( Vec>, - Vec, + Vec, ), Self::Error, > @@ -232,35 +237,43 @@ where P: 'a, { let mut commitments = Vec::new(); + let mut states = Vec::new(); - for labeled_polynomial in polynomials.into_iter() { + for labeled_polynomial in polynomials { let polynomial = labeled_polynomial.polynomial(); // 1. Arrange the coefficients of the polynomial into a matrix, // and apply encoding to get `ext_mat`. let (mat, ext_mat) = L::compute_matrices(polynomial, ck); + let n_rows = mat.n; + let n_cols = mat.m; + let n_ext_cols = ext_mat.m; // 2. Create the Merkle tree from the hashes of each column. - let col_tree = create_merkle_tree::( - &ext_mat, - ck.leaf_hash_params(), - ck.two_to_one_params(), - ck.col_hash_params(), + let ext_mat_cols = ext_mat.cols(); + let leaves: Vec = cfg_into_iter!(ext_mat_cols) + .map(|col| { + H::evaluate(ck.col_hash_params(), col) + .map_err(|_| Error::HashingError) + .unwrap() + }) + .collect(); + let state = Self::CommitmentState { + mat, + ext_mat, + leaves, + }; + let mut leaves: Vec = + state.leaves.clone().into_iter().map(|h| h.into()).collect(); + let col_tree = create_merkle_tree::( + &mut leaves, + ck.leaf_hash_param(), + ck.two_to_one_hash_param(), )?; - // 3. Obtain the MT root and add it to the transcript. + // 3. Obtain the MT root let root = col_tree.root(); - let mut transcript: IOPTranscript = IOPTranscript::new(b"transcript"); - - transcript - .append_serializable_element(b"root", &root) - .map_err(|_| Error::TranscriptError)?; - - let n_rows = mat.n; - let n_cols = mat.m; - let n_ext_cols = ext_mat.m; - // 4. The commitment is just the root, but since each commitment could be to a differently-sized polynomial, we also add some metadata. let commitment = LinCodePCCommitment { metadata: Metadata { @@ -276,92 +289,67 @@ where commitment, None, )); + states.push(state); } - let com_len = &commitments.len(); - Ok((commitments, vec![(); *com_len])) + Ok((commitments, states)) } fn open<'a>( ck: &Self::CommitterKey, - labeled_polynomials: impl IntoIterator>, + _labeled_polynomials: impl IntoIterator>, commitments: impl IntoIterator>, point: &'a P::Point, - _challenge_generator: &mut crate::challenge::ChallengeGenerator, - _rands: impl IntoIterator, + sponge: &mut S, + states: impl IntoIterator, _rng: Option<&mut dyn RngCore>, ) -> Result where P: 'a, - Self::Randomness: 'a, + Self::CommitmentState: 'a, Self::Commitment: 'a, { let mut proof_array = LPCPArray::default(); - let labeled_commitments: Vec<&'a LabeledCommitment> = - commitments.into_iter().collect(); - let labeled_polynomials: Vec<&'a LabeledPolynomial> = - labeled_polynomials.into_iter().collect(); - - if labeled_commitments.len() != labeled_polynomials.len() { - return Err(Error::IncorrectInputLength(format!( - "Mismatched lengths: {} commitments, {} polynomials", - labeled_commitments.len(), - labeled_polynomials.len() - ))); - } - for i in 0..labeled_polynomials.len() { - let polynomial = labeled_polynomials[i].polynomial(); - let commitment = labeled_commitments[i].commitment(); + for (labeled_commitment, state) in commitments.into_iter().zip(states) { + let commitment = labeled_commitment.commitment(); let n_rows = commitment.metadata.n_rows; let n_cols = commitment.metadata.n_cols; - let root = &commitment.root; // 1. Arrange the coefficients of the polynomial into a matrix, // and apply encoding to get `ext_mat`. - let (mat, ext_mat) = L::compute_matrices(polynomial, ck); - // 2. Create the Merkle tree from the hashes of each column. - let col_tree = create_merkle_tree::( - &ext_mat, - ck.leaf_hash_params(), - ck.two_to_one_params(), - ck.col_hash_params(), + let Self::CommitmentState { + mat, + ext_mat, + leaves: col_hashes, + } = state; + let mut col_hashes: Vec = + col_hashes.clone().into_iter().map(|h| h.into()).collect(); + + let col_tree = create_merkle_tree::( + &mut col_hashes, + ck.leaf_hash_param(), + ck.two_to_one_hash_param(), )?; // 3. Generate vector `b` to left-multiply the matrix. let (_, b) = L::tensor(point, n_cols, n_rows); - let mut transcript = IOPTranscript::new(b"transcript"); - transcript - .append_serializable_element(b"root", root) - .map_err(|_| Error::TranscriptError)?; + sponge.absorb(&to_bytes!(&commitment.root).map_err(|_| Error::TranscriptError)?); // If we are checking well-formedness, we need to compute the well-formedness proof (which is just r.M) and append it to the transcript. let well_formedness = if ck.check_well_formedness() { - let mut r = Vec::new(); - for _ in 0..n_rows { - r.push( - transcript - .get_and_append_challenge(b"r") - .map_err(|_| Error::TranscriptError)?, - ); - } + let r = sponge.squeeze_field_elements::(n_rows); let v = mat.row_mul(&r); - transcript - .append_serializable_element(b"v", &v) - .map_err(|_| Error::TranscriptError)?; + sponge.absorb(&v); Some(v) } else { None }; let point_vec = L::point_to_vec(point.clone()); - for element in point_vec.iter() { - transcript - .append_serializable_element(b"point", element) - .map_err(|_| Error::TranscriptError)?; - } + sponge.absorb(&point_vec); proof_array.push(LinCodePCProof { // Compute the opening proof and append b.M to the transcript. @@ -372,7 +360,7 @@ where &mat, &ext_mat, &col_tree, - &mut transcript, + sponge, )?, well_formedness, }); @@ -387,31 +375,19 @@ where point: &'a P::Point, values: impl IntoIterator, proof_array: &Self::Proof, - _challenge_generator: &mut crate::challenge::ChallengeGenerator, + sponge: &mut S, _rng: Option<&mut dyn RngCore>, ) -> Result where Self::Commitment: 'a, { - let labeled_commitments: Vec<&'a LabeledCommitment> = - commitments.into_iter().collect(); - let values: Vec = values.into_iter().collect(); - - if labeled_commitments.len() != proof_array.len() - || labeled_commitments.len() != values.len() - { - return Err(Error::IncorrectInputLength( - format!( - "Mismatched lengths: {} proofs were provided for {} commitments with {} claimed values",labeled_commitments.len(), proof_array.len(), values.len() - ) - )); - } - let leaf_hash_params: &<::LeafHash as CRHScheme>::Parameters = - vk.leaf_hash_params(); - let two_to_one_params: &<::TwoToOneHash as TwoToOneCRHScheme>::Parameters = - vk.two_to_one_params(); + let leaf_hash_param: &<::LeafHash as CRHScheme>::Parameters = + vk.leaf_hash_param(); + let two_to_one_hash_param: &<::TwoToOneHash as TwoToOneCRHScheme>::Parameters = + vk.two_to_one_hash_param(); - for (i, labeled_commitment) in labeled_commitments.iter().enumerate() { + for (i, (labeled_commitment, value)) in commitments.into_iter().zip(values).enumerate() { + let proof = &proof_array[i]; let commitment = labeled_commitment.commitment(); let n_rows = commitment.metadata.n_rows; let n_cols = commitment.metadata.n_cols; @@ -419,31 +395,19 @@ where let root = &commitment.root; let t = calculate_t::(vk.sec_param(), vk.distance(), n_ext_cols)?; - let mut transcript = IOPTranscript::new(b"transcript"); - transcript - .append_serializable_element(b"root", &commitment.root) - .map_err(|_| Error::TranscriptError)?; + sponge.absorb(&to_bytes!(&commitment.root).map_err(|_| Error::TranscriptError)?); let out = if vk.check_well_formedness() { - if proof_array[i].well_formedness.is_none() { + if proof.well_formedness.is_none() { return Err(Error::InvalidCommitment); } - let tmp = &proof_array[i].well_formedness.as_ref(); - let well_formedness = tmp.unwrap(); - let mut r = Vec::with_capacity(n_rows); - for _ in 0..n_rows { - r.push( - transcript - .get_and_append_challenge(b"r") - .map_err(|_| Error::TranscriptError)?, - ); - } + let tmp = &proof.well_formedness.as_ref(); + let v = tmp.unwrap(); + let r = sponge.squeeze_field_elements::(n_rows); // Upon sending `v` to the Verifier, add it to the sponge. The claim is that v = r.M. - transcript - .append_serializable_element(b"v", well_formedness) - .map_err(|_| Error::TranscriptError)?; + sponge.absorb(&v); - (Some(well_formedness), Some(r)) + (Some(v), Some(r)) } else { (None, None) }; @@ -451,36 +415,35 @@ where // 1. Seed the transcript with the point and the recieved vector // TODO Consider removing the evaluation point from the transcript. let point_vec = L::point_to_vec(point.clone()); - for element in point_vec.iter() { - transcript - .append_serializable_element(b"point", element) - .map_err(|_| Error::TranscriptError)?; - } - transcript - .append_serializable_element(b"v", &proof_array[i].opening.v) - .map_err(|_| Error::TranscriptError)?; + sponge.absorb(&point_vec); + sponge.absorb(&proof.opening.v); // 2. Ask random oracle for the `t` indices where the checks happen. - let indices = get_indices_from_transcript::(n_ext_cols, t, &mut transcript)?; + let indices = get_indices_from_sponge(n_ext_cols, t, sponge)?; // 3. Hash the received columns into leaf hashes. - let col_hashes: Vec = proof_array[i] + let col_hashes: Vec = proof .opening .columns .iter() - .map(|c| hash_column::(c.clone(), vk.col_hash_params()).unwrap()) + .map(|c| { + H::evaluate(vk.col_hash_params(), c.clone()) + .map_err(|_| Error::HashingError) + .unwrap() + .into() + }) .collect(); // 4. Verify the paths for each of the leaf hashes - this is only run once, // even if we have a well-formedness check (i.e., we save sending and checking the columns). // See "Concrete optimizations to the commitment scheme", p.12 of [Brakedown](https://eprint.iacr.org/2021/1043.pdf). for (j, (leaf, q_j)) in col_hashes.iter().zip(indices.iter()).enumerate() { - let path = &proof_array[i].opening.paths[j]; + let path = &proof.opening.paths[j]; if path.leaf_index != *q_j { return Err(Error::InvalidCommitment); } - path.verify(leaf_hash_params, two_to_one_params, root, leaf.clone()) + path.verify(leaf_hash_param, two_to_one_hash_param, root, leaf.clone()) .map_err(|_| Error::InvalidCommitment)?; } @@ -494,7 +457,7 @@ where }; // 5. Compute the encoding w = E(v). - let w = L::encode(&proof_array[i].opening.v, vk)?; + let w = L::encode(&proof.opening.v, vk)?; // 6. Compute `a`, `b` to right- and left- multiply with the matrix `M`. let (a, b) = L::tensor(point, n_cols, n_rows); @@ -507,12 +470,12 @@ where for (transcript_index, matrix_index) in indices.iter().enumerate() { check_inner_product( &r, - &proof_array[i].opening.columns[transcript_index], + &proof.opening.columns[transcript_index], w_well_formedness[*matrix_index], )?; check_inner_product( &b, - &proof_array[i].opening.columns[transcript_index], + &proof.opening.columns[transcript_index], w[*matrix_index], )?; } @@ -520,13 +483,13 @@ where for (transcript_index, matrix_index) in indices.iter().enumerate() { check_inner_product( &b, - &proof_array[i].opening.columns[transcript_index], + &proof.opening.columns[transcript_index], w[*matrix_index], )?; } } - if inner_product(&proof_array[i].opening.v, &a) != values[i] { + if inner_product(&proof.opening.v, &a) != value { eprintln!("Function check: claimed value in position {i} does not match the evaluation of the committed polynomial in the same position"); return Ok(false); } @@ -537,58 +500,45 @@ where } // TODO maybe this can go to utils -fn create_merkle_tree( - ext_mat: &Matrix, - leaf_hash_params: &<::LeafHash as CRHScheme>::Parameters, - two_to_one_params: &<::TwoToOneHash as TwoToOneCRHScheme>::Parameters, - col_hash_params: &H::Parameters, +fn create_merkle_tree( + leaves: &mut Vec, + leaf_hash_param: &<::LeafHash as CRHScheme>::Parameters, + two_to_one_hash_param: &<::TwoToOneHash as TwoToOneCRHScheme>::Parameters, ) -> Result, Error> where - F: PrimeField, C: Config, - H: CRHScheme, - Vec: Borrow<::Input>, - H::Output: Into, - C::Leaf: Default + Clone + Send, + C::Leaf: Default + Clone + Send + AsRef, { - let ext_mat_cols = ext_mat.cols(); - - let mut col_hashes: Vec = cfg_into_iter!(ext_mat_cols) - .map(|col| hash_column::(col, &col_hash_params).unwrap()) - .collect(); - // pad the column hashes with zeroes - let next_pow_of_two = col_hashes.len().next_power_of_two(); - col_hashes.resize(next_pow_of_two, ::default()); + let next_pow_of_two = leaves.len().next_power_of_two(); + leaves.resize(next_pow_of_two, ::default()); - MerkleTree::::new(leaf_hash_params, two_to_one_params, col_hashes) + MerkleTree::::new(leaf_hash_param, two_to_one_hash_param, leaves) .map_err(|_| Error::HashingError) } -fn generate_proof( +fn generate_proof( sec_param: usize, distance: (usize, usize), b: &[F], mat: &Matrix, ext_mat: &Matrix, col_tree: &MerkleTree, - transcript: &mut IOPTranscript, + sponge: &mut S, ) -> Result, Error> where - F: PrimeField, + F: PrimeField + Absorb, C: Config, + S: CryptographicSponge, { let t = calculate_t::(sec_param, distance, ext_mat.m)?; // 1. left-multiply the matrix by `b`. let v = mat.row_mul(b); - - transcript - .append_serializable_element(b"v", &v) - .map_err(|_| Error::TranscriptError)?; + sponge.absorb(&v); // 2. Generate t column indices to test the linear combination on. - let indices = get_indices_from_transcript(ext_mat.m, t, transcript)?; + let indices = get_indices_from_sponge(ext_mat.m, t, sponge)?; // 3. Compute Merkle tree paths for the requested columns. let mut queried_columns = Vec::with_capacity(t); diff --git a/poly-commit/src/linear_codes/multilinear_brakedown/mod.rs b/poly-commit/src/linear_codes/multilinear_brakedown/mod.rs index 27cfc2a6..bcfd9f0a 100644 --- a/poly-commit/src/linear_codes/multilinear_brakedown/mod.rs +++ b/poly-commit/src/linear_codes/multilinear_brakedown/mod.rs @@ -44,16 +44,16 @@ where _max_degree: usize, num_vars: Option, rng: &mut R, - leaf_hash_params: <::LeafHash as CRHScheme>::Parameters, - two_to_one_params: <::TwoToOneHash as TwoToOneCRHScheme>::Parameters, + leaf_hash_param: <::LeafHash as CRHScheme>::Parameters, + two_to_one_hash_param: <::TwoToOneHash as TwoToOneCRHScheme>::Parameters, col_hash_params: H::Parameters, ) -> Self::LinCodePCParams { Self::LinCodePCParams::default( rng, 1 << num_vars.unwrap(), true, - leaf_hash_params, - two_to_one_params, + leaf_hash_param, + two_to_one_hash_param, col_hash_params, ) } diff --git a/poly-commit/src/linear_codes/multilinear_brakedown/tests.rs b/poly-commit/src/linear_codes/multilinear_brakedown/tests.rs index 1a820650..e4be256f 100644 --- a/poly-commit/src/linear_codes/multilinear_brakedown/tests.rs +++ b/poly-commit/src/linear_codes/multilinear_brakedown/tests.rs @@ -4,7 +4,6 @@ mod tests { use crate::linear_codes::LinearCodePCS; use crate::utils::test_sponge; use crate::{ - challenge::ChallengeGenerator, linear_codes::{utils::*, BrakedownPCParams, MultilinearBrakedown, PolynomialCommitment}, LabeledPolynomial, }; @@ -86,8 +85,8 @@ mod tests { let mut rng = &mut test_rng(); let num_vars = 11; // just to make sure we have the right degree given the FFT domain for our field - let leaf_hash_params = ::setup(&mut rng).unwrap(); - let two_to_one_params = ::setup(&mut rng) + let leaf_hash_param = ::setup(&mut rng).unwrap(); + let two_to_one_hash_param = ::setup(&mut rng) .unwrap() .clone(); let col_hash_params = as CRHScheme>::setup(&mut rng).unwrap(); @@ -98,8 +97,8 @@ mod tests { rng, 1 << num_vars, check_well_formedness, - leaf_hash_params, - two_to_one_params, + leaf_hash_param, + two_to_one_hash_param, col_hash_params, ); @@ -114,22 +113,19 @@ mod tests { ); let mut test_sponge = test_sponge::(); - let (c, rands) = BrakedownPCS::::commit(&ck, &[labeled_poly.clone()], None).unwrap(); + let (c, states) = BrakedownPCS::::commit(&ck, &[labeled_poly.clone()], None).unwrap(); let point = rand_point(Some(num_vars), rand_chacha); let value = labeled_poly.evaluate(&point); - let mut challenge_generator: ChallengeGenerator> = - ChallengeGenerator::new_univariate(&mut test_sponge); - let proof = BrakedownPCS::::open( &ck, &[labeled_poly], &c, &point, - &mut (challenge_generator.clone()), - &rands, + &mut (test_sponge.clone()), + &states, None, ) .unwrap(); @@ -139,7 +135,7 @@ mod tests { &point, [value], &proof, - &mut challenge_generator, + &mut test_sponge, None ) .unwrap()); diff --git a/poly-commit/src/linear_codes/utils.rs b/poly-commit/src/linear_codes/utils.rs index 941c85e4..2b1d8e73 100644 --- a/poly-commit/src/linear_codes/utils.rs +++ b/poly-commit/src/linear_codes/utils.rs @@ -1,11 +1,6 @@ -use core::borrow::Borrow; - -use crate::utils::IOPTranscript; use crate::{utils::ceil_div, Error}; - -use ark_crypto_primitives::{crh::CRHScheme, merkle_tree::Config}; +use ark_crypto_primitives::sponge::CryptographicSponge; use ark_ff::{Field, PrimeField}; - use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; use ark_std::string::ToString; use ark_std::vec::Vec; @@ -16,7 +11,8 @@ use num_traits::Float; #[cfg(test)] use { crate::to_bytes, - ark_std::{marker::PhantomData, rand::RngCore}, + ark_crypto_primitives::crh::CRHScheme, + ark_std::{borrow::Borrow, marker::PhantomData, rand::RngCore}, digest::Digest, }; @@ -113,35 +109,18 @@ pub(crate) fn get_num_bytes(n: usize) -> usize { ceil_div((usize::BITS - n.leading_zeros()) as usize, 8) } -#[inline] -pub(crate) fn hash_column(array: Vec, params: &H::Parameters) -> Result -where - F: PrimeField, - C: Config, - H: CRHScheme, - Vec: Borrow<::Input>, - C::Leaf: Sized, - H::Output: Into, -{ - H::evaluate(params, array) - .map_err(|_| Error::HashingError) - .map(|x| x.into()) -} - /// Generate `t` (not necessarily distinct) random points in `[0, n)` /// using the current state of the `transcript`. -pub(crate) fn get_indices_from_transcript( +pub(crate) fn get_indices_from_sponge( n: usize, t: usize, - transcript: &mut IOPTranscript, + sponge: &mut S, ) -> Result, Error> { let bytes_to_squeeze = get_num_bytes(n); let mut indices = Vec::with_capacity(t); for _ in 0..t { - let mut bytes: Vec = vec![0; bytes_to_squeeze]; - transcript - .get_and_append_byte_challenge(b"i", &mut bytes) - .map_err(|_| Error::TranscriptError)?; + let bytes = sponge.squeeze_bytes(bytes_to_squeeze); + sponge.absorb(&bytes); // get the usize from Vec: let ind = bytes.iter().fold(0, |acc, &x| (acc << 8) + x as usize); diff --git a/poly-commit/src/marlin/marlin_pc/data_structures.rs b/poly-commit/src/marlin/marlin_pc/data_structures.rs index 2b09e03a..203e3201 100644 --- a/poly-commit/src/marlin/marlin_pc/data_structures.rs +++ b/poly-commit/src/marlin/marlin_pc/data_structures.rs @@ -1,6 +1,6 @@ use crate::{ - DenseUVPolynomial, PCCommitment, PCCommitterKey, PCPreparedCommitment, PCPreparedVerifierKey, - PCRandomness, PCVerifierKey, Vec, + DenseUVPolynomial, PCCommitment, PCCommitmentState, PCCommitterKey, PCPreparedCommitment, + PCPreparedVerifierKey, PCVerifierKey, Vec, }; use ark_ec::pairing::Pairing; use ark_ec::AdditiveGroup; @@ -360,7 +360,8 @@ impl<'a, F: PrimeField, P: DenseUVPolynomial> AddAssign<(F, &'a Randomness> PCRandomness for Randomness { +impl> PCCommitmentState for Randomness { + type Randomness = Self; fn empty() -> Self { Self { rand: kzg10::Randomness::empty(), diff --git a/poly-commit/src/marlin/marlin_pc/mod.rs b/poly-commit/src/marlin/marlin_pc/mod.rs index 39c4e362..7fbfba07 100644 --- a/poly-commit/src/marlin/marlin_pc/mod.rs +++ b/poly-commit/src/marlin/marlin_pc/mod.rs @@ -2,7 +2,7 @@ use crate::{kzg10, marlin::Marlin, PCCommitterKey, CHALLENGE_SIZE}; use crate::{BTreeMap, BTreeSet, ToString, Vec}; use crate::{BatchLCProof, Error, Evaluations, QuerySet}; use crate::{LabeledCommitment, LabeledPolynomial, LinearCombination}; -use crate::{PCRandomness, PCUniversalParams, PolynomialCommitment}; +use crate::{PCCommitmentState, PCUniversalParams, PolynomialCommitment}; use ark_ec::pairing::Pairing; use ark_ec::AffineRepr; use ark_ec::CurveGroup; @@ -12,7 +12,6 @@ use ark_std::rand::RngCore; use ark_std::{marker::PhantomData, ops::Div, vec}; mod data_structures; -use crate::challenge::ChallengeGenerator; use ark_crypto_primitives::sponge::CryptographicSponge; pub use data_structures::*; @@ -66,7 +65,7 @@ where type CommitterKey = CommitterKey; type VerifierKey = VerifierKey; type Commitment = Commitment; - type Randomness = Randomness; + type CommitmentState = Randomness; type Proof = kzg10::Proof; type BatchProof = Vec; type Error = Error; @@ -180,7 +179,7 @@ where ) -> Result< ( Vec>, - Vec, + Vec, ), Self::Error, > @@ -191,7 +190,7 @@ where let commit_time = start_timer!(|| "Committing to polynomials"); let mut commitments = Vec::new(); - let mut randomness = Vec::new(); + let mut states = Vec::new(); for p in polynomials { let label = p.label(); @@ -232,17 +231,17 @@ where }; let comm = Commitment { comm, shifted_comm }; - let rand = Randomness { rand, shifted_rand }; + let state = Randomness { rand, shifted_rand }; commitments.push(LabeledCommitment::new( label.to_string(), comm, degree_bound, )); - randomness.push(rand); + states.push(state); end_timer!(commit_time); } end_timer!(commit_time); - Ok((commitments, randomness)) + Ok((commitments, states)) } /// On input a polynomial `p` and a point `point`, outputs a proof for the same. @@ -251,13 +250,13 @@ where labeled_polynomials: impl IntoIterator>, _commitments: impl IntoIterator>, point: &'a P::Point, - opening_challenges: &mut ChallengeGenerator, - rands: impl IntoIterator, + sponge: &mut S, + states: impl IntoIterator, _rng: Option<&mut dyn RngCore>, ) -> Result where P: 'a, - Self::Randomness: 'a, + Self::CommitmentState: 'a, Self::Commitment: 'a, { let mut p = P::zero(); @@ -267,7 +266,7 @@ where let mut shifted_r_witness = P::zero(); let mut enforce_degree_bound = false; - for (polynomial, rand) in labeled_polynomials.into_iter().zip(rands) { + for (polynomial, rand) in labeled_polynomials.into_iter().zip(states) { let degree_bound = polynomial.degree_bound(); assert_eq!(degree_bound.is_some(), rand.shifted_rand.is_some()); @@ -283,7 +282,7 @@ where )?; // compute next challenges challenge^j and challenge^{j+1}. - let challenge_j = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + let challenge_j = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; assert_eq!(degree_bound.is_some(), rand.shifted_rand.is_some()); @@ -299,7 +298,7 @@ where *point, &shifted_rand, )?; - let challenge_j_1 = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + let challenge_j_1 = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; let shifted_witness = shift_polynomial(ck, &witness, degree_bound); @@ -347,7 +346,7 @@ where point: &'a P::Point, values: impl IntoIterator, proof: &Self::Proof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, _rng: Option<&mut dyn RngCore>, ) -> Result where @@ -358,7 +357,7 @@ where Marlin::::accumulate_commitments_and_values( commitments, values, - opening_challenges, + sponge, Some(vk), )?; let combined_comm = kzg10::Commitment(combined_comm.into()); @@ -373,7 +372,7 @@ where query_set: &QuerySet, values: &Evaluations, proof: &Self::BatchProof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, rng: &mut R, ) -> Result where @@ -384,7 +383,7 @@ where commitments, query_set, values, - opening_challenges, + sponge, Some(vk), )?; assert_eq!(proof.len(), combined_queries.len()); @@ -407,13 +406,13 @@ where polynomials: impl IntoIterator>, commitments: impl IntoIterator>, query_set: &QuerySet, - opening_challenges: &mut ChallengeGenerator, - rands: impl IntoIterator, + sponge: &mut S, + states: impl IntoIterator, rng: Option<&mut dyn RngCore>, ) -> Result, Self::Error> where P: 'a, - Self::Randomness: 'a, + Self::CommitmentState: 'a, Self::Commitment: 'a, { Marlin::::open_combinations( @@ -422,8 +421,8 @@ where polynomials, commitments, query_set, - opening_challenges, - rands, + sponge, + states, rng, ) } @@ -437,7 +436,7 @@ where query_set: &QuerySet, evaluations: &Evaluations, proof: &BatchLCProof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, rng: &mut R, ) -> Result where @@ -450,7 +449,7 @@ where query_set, evaluations, proof, - opening_challenges, + sponge, rng, ) } @@ -462,19 +461,19 @@ where labeled_polynomials: impl IntoIterator>, commitments: impl IntoIterator>>, query_set: &QuerySet, - opening_challenges: &mut ChallengeGenerator, - rands: impl IntoIterator, + sponge: &mut S, + states: impl IntoIterator, rng: Option<&mut dyn RngCore>, ) -> Result>, Error> where P: 'a, - Self::Randomness: 'a, + Self::CommitmentState: 'a, Self::Commitment: 'a, { let rng = &mut crate::optional_rng::OptionalRng(rng); let poly_rand_comm: BTreeMap<_, _> = labeled_polynomials .into_iter() - .zip(rands) + .zip(states) .zip(commitments.into_iter()) .map(|((poly, r), comm)| (poly.label(), (poly, r, comm))) .collect(); @@ -497,7 +496,7 @@ where let mut proofs = Vec::new(); for (_point_label, (point, labels)) in query_to_labels_map.into_iter() { let mut query_polys: Vec<&'a LabeledPolynomial<_, _>> = Vec::new(); - let mut query_rands: Vec<&'a Self::Randomness> = Vec::new(); + let mut query_states: Vec<&'a Self::CommitmentState> = Vec::new(); let mut query_comms: Vec<&'a LabeledCommitment> = Vec::new(); for label in labels { @@ -507,7 +506,7 @@ where })?; query_polys.push(polynomial); - query_rands.push(rand); + query_states.push(rand); query_comms.push(comm); } @@ -517,8 +516,8 @@ where query_polys, query_comms, point, - opening_challenges, - query_rands, + sponge, + query_states, Some(rng), )?; diff --git a/poly-commit/src/marlin/marlin_pst13_pc/data_structures.rs b/poly-commit/src/marlin/marlin_pst13_pc/data_structures.rs index 8ccf300b..9cc8d73b 100644 --- a/poly-commit/src/marlin/marlin_pst13_pc/data_structures.rs +++ b/poly-commit/src/marlin/marlin_pst13_pc/data_structures.rs @@ -1,6 +1,6 @@ use crate::{BTreeMap, Vec}; use crate::{ - PCCommitterKey, PCPreparedVerifierKey, PCRandomness, PCUniversalParams, PCVerifierKey, + PCCommitmentState, PCCommitterKey, PCPreparedVerifierKey, PCUniversalParams, PCVerifierKey, }; use ark_ec::pairing::Pairing; use ark_poly::DenseMVPolynomial; @@ -362,12 +362,13 @@ where } } -impl PCRandomness for Randomness +impl PCCommitmentState for Randomness where E: Pairing, P: DenseMVPolynomial, P::Point: Index, { + type Randomness = Self; fn empty() -> Self { Self { blinding_polynomial: P::zero(), diff --git a/poly-commit/src/marlin/marlin_pst13_pc/mod.rs b/poly-commit/src/marlin/marlin_pst13_pc/mod.rs index ac47c2a7..eee026d7 100644 --- a/poly-commit/src/marlin/marlin_pst13_pc/mod.rs +++ b/poly-commit/src/marlin/marlin_pst13_pc/mod.rs @@ -5,10 +5,14 @@ use crate::{ }; use crate::{BatchLCProof, Error, Evaluations, QuerySet}; use crate::{LabeledCommitment, LabeledPolynomial, LinearCombination}; -use crate::{PCRandomness, PCUniversalParams, PolynomialCommitment}; +use crate::{PCCommitmentState, PCUniversalParams, PolynomialCommitment}; use crate::{ToString, Vec}; use ark_ec::AffineRepr; -use ark_ec::{pairing::Pairing, scalar_mul::fixed_base::FixedBase, CurveGroup, VariableBaseMSM}; +use ark_ec::{ + pairing::Pairing, + scalar_mul::{BatchMulPreprocessing, ScalarMul}, + CurveGroup, VariableBaseMSM, +}; use ark_ff::{One, PrimeField, UniformRand, Zero}; use ark_poly::{multivariate::Term, DenseMVPolynomial}; use ark_std::rand::RngCore; @@ -20,7 +24,6 @@ pub use data_structures::*; mod combinations; use combinations::*; -use crate::challenge::ChallengeGenerator; use ark_crypto_primitives::sponge::CryptographicSponge; #[cfg(feature = "parallel")] use rayon::prelude::*; @@ -151,7 +154,7 @@ where type CommitterKey = CommitterKey; type VerifierKey = VerifierKey; type Commitment = marlin_pc::Commitment; - type Randomness = Randomness; + type CommitmentState = Randomness; type Proof = Proof; type BatchProof = Vec; type Error = Error; @@ -211,47 +214,33 @@ where }) .unzip(); - let scalar_bits = E::ScalarField::MODULUS_BIT_SIZE as usize; let g_time = start_timer!(|| "Generating powers of G"); - let window_size = FixedBase::get_mul_window_size(max_degree + 1); - let g_table = FixedBase::get_window_table(scalar_bits, window_size, g); - let mut powers_of_g = - FixedBase::msm::(scalar_bits, window_size, &g_table, &powers_of_beta); - powers_of_g.push(g); + let mut powers_of_g = g.batch_mul(&powers_of_beta); + powers_of_g.push(g.into_affine()); powers_of_beta_terms.push(P::Term::new(vec![])); end_timer!(g_time); let gamma_g_time = start_timer!(|| "Generating powers of gamma * G"); - let window_size = FixedBase::get_mul_window_size(max_degree + 2); - let gamma_g_table = FixedBase::get_window_table(scalar_bits, window_size, gamma_g); // Each element `i` of `powers_of_gamma_g` is a vector of length `max_degree+1` // containing `betas[i]^j \gamma G` for `j` from 1 to `max_degree+1` to support // up to `max_degree` queries let mut powers_of_gamma_g = vec![Vec::new(); num_vars]; + let gamma_g_table = BatchMulPreprocessing::new(gamma_g, max_degree + 1); + ark_std::cfg_iter_mut!(powers_of_gamma_g) .enumerate() .for_each(|(i, v)| { - let mut powers_of_beta = Vec::with_capacity(max_degree); + let mut powers_of_beta = Vec::with_capacity(max_degree + 1); let mut cur = E::ScalarField::one(); for _ in 0..=max_degree { cur *= &betas[i]; powers_of_beta.push(cur); } - *v = FixedBase::msm::( - scalar_bits, - window_size, - &gamma_g_table, - &powers_of_beta, - ); + *v = gamma_g_table.batch_mul(&powers_of_beta); }); end_timer!(gamma_g_time); - let powers_of_g = E::G1::normalize_batch(&powers_of_g); let gamma_g = gamma_g.into_affine(); - let powers_of_gamma_g = powers_of_gamma_g - .into_iter() - .map(|v| E::G1::normalize_batch(&v)) - .collect(); let beta_h: Vec<_> = betas.iter().map(|b| h.mul(b).into_affine()).collect(); let h = h.into_affine(); let prepared_h = h.into(); @@ -343,7 +332,7 @@ where ) -> Result< ( Vec>, - Vec, + Vec, ), Self::Error, > @@ -440,26 +429,26 @@ where labeled_polynomials: impl IntoIterator>, _commitments: impl IntoIterator>, point: &P::Point, - opening_challenges: &mut ChallengeGenerator, - rands: impl IntoIterator, + sponge: &mut S, + states: impl IntoIterator, _rng: Option<&mut dyn RngCore>, ) -> Result where P: 'a, - Self::Randomness: 'a, + Self::CommitmentState: 'a, Self::Commitment: 'a, { // Compute random linear combinations of committed polynomials and randomness let mut p = P::zero(); let mut r = Randomness::empty(); - for (polynomial, rand) in labeled_polynomials.into_iter().zip(rands) { + for (polynomial, state) in labeled_polynomials.into_iter().zip(states) { Self::check_degrees_and_bounds(ck.supported_degree, &polynomial)?; // compute challenge^j and challenge^{j+1}. - let challenge_j = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + let challenge_j = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; p += (challenge_j, polynomial.polynomial()); - r += (challenge_j, rand); + r += (challenge_j, state); } let open_time = start_timer!(|| format!("Opening polynomial of degree {}", p.degree())); @@ -538,7 +527,7 @@ where point: &'a P::Point, values: impl IntoIterator, proof: &Self::Proof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, _rng: Option<&mut dyn RngCore>, ) -> Result where @@ -550,7 +539,7 @@ where Marlin::::accumulate_commitments_and_values( commitments, values, - opening_challenges, + sponge, None, )?; // Compute both sides of the pairing equation @@ -582,7 +571,7 @@ where query_set: &QuerySet, values: &Evaluations, proof: &Self::BatchProof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, rng: &mut R, ) -> Result where @@ -593,7 +582,7 @@ where commitments, query_set, values, - opening_challenges, + sponge, None, )?; let check_time = @@ -660,13 +649,13 @@ where polynomials: impl IntoIterator>, commitments: impl IntoIterator>, query_set: &QuerySet, - opening_challenges: &mut ChallengeGenerator, - rands: impl IntoIterator, + sponge: &mut S, + states: impl IntoIterator, rng: Option<&mut dyn RngCore>, ) -> Result, Self::Error> where P: 'a, - Self::Randomness: 'a, + Self::CommitmentState: 'a, Self::Commitment: 'a, { Marlin::::open_combinations( @@ -675,8 +664,8 @@ where polynomials, commitments, query_set, - opening_challenges, - rands, + sponge, + states, rng, ) } @@ -690,7 +679,7 @@ where eqn_query_set: &QuerySet, eqn_evaluations: &Evaluations, proof: &BatchLCProof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, rng: &mut R, ) -> Result where @@ -703,7 +692,7 @@ where eqn_query_set, eqn_evaluations, proof, - opening_challenges, + sponge, rng, ) } diff --git a/poly-commit/src/marlin/mod.rs b/poly-commit/src/marlin/mod.rs index 4bd4fe27..d7e7f5a1 100644 --- a/poly-commit/src/marlin/mod.rs +++ b/poly-commit/src/marlin/mod.rs @@ -1,9 +1,9 @@ -use crate::{challenge::ChallengeGenerator, CHALLENGE_SIZE}; +use crate::CHALLENGE_SIZE; use crate::{kzg10, Error}; use crate::{BTreeMap, BTreeSet, Debug, RngCore, String, ToString, Vec}; use crate::{BatchLCProof, LabeledPolynomial, LinearCombination}; use crate::{Evaluations, LabeledCommitment, QuerySet}; -use crate::{PCRandomness, Polynomial, PolynomialCommitment}; +use crate::{PCCommitmentState, Polynomial, PolynomialCommitment}; use ark_crypto_primitives::sponge::CryptographicSponge; use ark_ec::pairing::Pairing; use ark_ec::AffineRepr; @@ -110,7 +110,7 @@ where fn accumulate_commitments_and_values<'a>( commitments: impl IntoIterator>>, values: impl IntoIterator, - challenge_gen: &mut ChallengeGenerator, + sponge: &mut S, vk: Option<&marlin_pc::VerifierKey>, ) -> Result<(E::G1, E::ScalarField), Error> { let acc_time = start_timer!(|| "Accumulating commitments and values"); @@ -121,13 +121,14 @@ where let commitment = labeled_commitment.commitment(); assert_eq!(degree_bound.is_some(), commitment.shifted_comm.is_some()); - let challenge_i = challenge_gen.try_next_challenge_of_size(CHALLENGE_SIZE); + let challenge_i = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; combined_comm += &commitment.comm.0.mul(challenge_i); combined_value += &(value * &challenge_i); if let Some(degree_bound) = degree_bound { - let challenge_i_1 = challenge_gen.try_next_challenge_of_size(CHALLENGE_SIZE); + let challenge_i_1: E::ScalarField = + sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; let shifted_comm = commitment.shifted_comm.as_ref().unwrap().0.into_group(); @@ -152,7 +153,7 @@ where commitments: impl IntoIterator>>, query_set: &QuerySet, evaluations: &Evaluations, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, vk: Option<&marlin_pc::VerifierKey>, ) -> Result<(Vec>, Vec, Vec), Error> where @@ -199,7 +200,7 @@ where let (c, v) = Self::accumulate_commitments_and_values( comms_to_combine, values_to_combine, - opening_challenges, + sponge, vk, )?; end_timer!(lc_time); @@ -227,8 +228,8 @@ where polynomials: impl IntoIterator>, commitments: impl IntoIterator>, query_set: &QuerySet, - opening_challenges: &mut ChallengeGenerator, - rands: impl IntoIterator, + sponge: &mut S, + states: impl IntoIterator, rng: Option<&mut dyn RngCore>, ) -> Result, Error> where @@ -241,18 +242,18 @@ where Commitment = marlin_pc::Commitment, Error = Error, >, - PC::Randomness: 'a + AddAssign<(E::ScalarField, &'a PC::Randomness)>, + PC::CommitmentState: 'a + AddAssign<(E::ScalarField, &'a PC::CommitmentState)>, PC::Commitment: 'a, { let label_map = polynomials .into_iter() - .zip(rands) + .zip(states) .zip(commitments) .map(|((p, r), c)| (p.label(), (p, r, c))) .collect::>(); let mut lc_polynomials = Vec::new(); - let mut lc_randomness = Vec::new(); + let mut lc_states: Vec = Vec::new(); let mut lc_commitments = Vec::new(); let mut lc_info = Vec::new(); @@ -262,13 +263,13 @@ where let mut degree_bound = None; let mut hiding_bound = None; - let mut randomness = PC::Randomness::empty(); + let mut randomness = PC::CommitmentState::empty(); let mut coeffs_and_comms = Vec::new(); let num_polys = lc.len(); for (coeff, label) in lc.iter().filter(|(_, l)| !l.is_one()) { let label: &String = label.try_into().expect("cannot be one!"); - let &(cur_poly, cur_rand, cur_comm) = + let &(cur_poly, cur_state, cur_comm) = label_map.get(label).ok_or(Error::MissingPolynomial { label: label.to_string(), })?; @@ -284,14 +285,14 @@ where // Some(_) > None, always. hiding_bound = core::cmp::max(hiding_bound, cur_poly.hiding_bound()); poly += (*coeff, cur_poly.polynomial()); - randomness += (*coeff, cur_rand); + randomness += (*coeff, cur_state); coeffs_and_comms.push((*coeff, cur_comm.commitment())); } let lc_poly = LabeledPolynomial::new(lc_label.clone(), poly, degree_bound, hiding_bound); lc_polynomials.push(lc_poly); - lc_randomness.push(randomness); + lc_states.push(randomness); lc_commitments.push(Self::combine_commitments(coeffs_and_comms)); lc_info.push((lc_label, degree_bound)); } @@ -308,8 +309,8 @@ where lc_polynomials.iter(), lc_commitments.iter(), &query_set, - opening_challenges, - lc_randomness.iter(), + sponge, + lc_states.iter(), rng, )?; @@ -323,7 +324,7 @@ where query_set: &QuerySet, evaluations: &Evaluations, proof: &BatchLCProof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, rng: &mut R, ) -> Result where @@ -404,7 +405,7 @@ where &query_set, &evaluations, proof, - opening_challenges, + sponge, rng, ) } diff --git a/poly-commit/src/multilinear_pc/mod.rs b/poly-commit/src/multilinear_pc/mod.rs index eff86ab9..0973e822 100644 --- a/poly-commit/src/multilinear_pc/mod.rs +++ b/poly-commit/src/multilinear_pc/mod.rs @@ -1,9 +1,10 @@ use crate::multilinear_pc::data_structures::{ Commitment, CommitterKey, Proof, UniversalParams, VerifierKey, }; +use ark_ec::scalar_mul::BatchMulPreprocessing; use ark_ec::AffineRepr; use ark_ec::{pairing::Pairing, CurveGroup}; -use ark_ec::{scalar_mul::fixed_base::FixedBase, VariableBaseMSM}; +use ark_ec::{scalar_mul::ScalarMul, VariableBaseMSM}; use ark_ff::{Field, PrimeField}; use ark_ff::{One, Zero}; use ark_poly::{DenseMultilinearExtension, MultilinearExtension}; @@ -27,14 +28,11 @@ impl MultilinearPC { /// setup pub fn setup(num_vars: usize, rng: &mut R) -> UniversalParams { assert!(num_vars > 0, "constant polynomial not supported"); - let g: E::G1 = E::G1::rand(rng); - let h: E::G2 = E::G2::rand(rng); - let g = g.into_affine(); - let h = h.into_affine(); + let g = E::G1::rand(rng); + let h = E::G2::rand(rng); let mut powers_of_g = Vec::new(); let mut powers_of_h = Vec::new(); let t: Vec<_> = (0..num_vars).map(|_| E::ScalarField::rand(rng)).collect(); - let scalar_bits = E::ScalarField::MODULUS_BIT_SIZE as usize; let mut eq: LinkedList> = LinkedList::from_iter(eq_extension(&t).into_iter()); @@ -54,29 +52,15 @@ impl MultilinearPC { } let mut pp_powers = Vec::new(); - let mut total_scalars = 0; for i in 0..num_vars { let eq = eq_arr.pop_front().unwrap(); let pp_k_powers = (0..(1 << (num_vars - i))).map(|x| eq[x]); pp_powers.extend(pp_k_powers); - total_scalars += 1 << (num_vars - i); } - let window_size = FixedBase::get_mul_window_size(total_scalars); - let g_table = FixedBase::get_window_table(scalar_bits, window_size, g.into_group()); - let h_table = FixedBase::get_window_table(scalar_bits, window_size, h.into_group()); - - let pp_g = E::G1::normalize_batch(&FixedBase::msm( - scalar_bits, - window_size, - &g_table, - &pp_powers, - )); - let pp_h = E::G2::normalize_batch(&FixedBase::msm( - scalar_bits, - window_size, - &h_table, - &pp_powers, - )); + + let g_table = BatchMulPreprocessing::new(g, num_vars); + let pp_g = g_table.batch_mul(&pp_powers); + let pp_h = h.batch_mul(&pp_powers); let mut start = 0; for i in 0..num_vars { let size = 1 << (num_vars - i); @@ -89,18 +73,14 @@ impl MultilinearPC { // uncomment to measure the time for calculating vp // let vp_generation_timer = start_timer!(|| "VP generation"); - let g_mask = { - let window_size = FixedBase::get_mul_window_size(num_vars); - let g_table = FixedBase::get_window_table(scalar_bits, window_size, g.into_group()); - E::G1::normalize_batch(&FixedBase::msm(scalar_bits, window_size, &g_table, &t)) - }; + let g_mask = g_table.batch_mul(&t); // end_timer!(vp_generation_timer); UniversalParams { num_vars, - g, + g: g.into_affine(), g_mask, - h, + h: h.into_affine(), powers_of_g, powers_of_h, } @@ -199,11 +179,7 @@ impl MultilinearPC { ) -> bool { let left = E::pairing(commitment.g_product.into_group() - &vk.g.mul(value), vk.h); - let scalar_size = E::ScalarField::MODULUS_BIT_SIZE as usize; - let window_size = FixedBase::get_mul_window_size(vk.nv); - - let g_table = FixedBase::get_window_table(scalar_size, window_size, vk.g.into_group()); - let g_mul: Vec = FixedBase::msm(scalar_size, window_size, &g_table, point); + let g_mul = vk.g.into_group().batch_mul(point); let pairing_lefts: Vec<_> = (0..vk.nv) .map(|i| vk.g_mask_random[i].into_group() - &g_mul[i]) diff --git a/poly-commit/src/sonic_pc/mod.rs b/poly-commit/src/sonic_pc/mod.rs index b989b323..caf9b79c 100644 --- a/poly-commit/src/sonic_pc/mod.rs +++ b/poly-commit/src/sonic_pc/mod.rs @@ -2,7 +2,7 @@ use crate::{kzg10, PCCommitterKey, CHALLENGE_SIZE}; use crate::{BTreeMap, BTreeSet, String, ToString, Vec}; use crate::{BatchLCProof, DenseUVPolynomial, Error, Evaluations, QuerySet}; use crate::{LabeledCommitment, LabeledPolynomial, LinearCombination}; -use crate::{PCRandomness, PCUniversalParams, PolynomialCommitment}; +use crate::{PCCommitmentState, PCUniversalParams, PolynomialCommitment}; use ark_ec::AffineRepr; use ark_ec::CurveGroup; @@ -12,7 +12,6 @@ use ark_std::rand::RngCore; use ark_std::{convert::TryInto, marker::PhantomData, ops::Div, ops::Mul, vec}; mod data_structures; -use crate::challenge::ChallengeGenerator; use ark_crypto_primitives::sponge::CryptographicSponge; pub use data_structures::*; @@ -47,12 +46,12 @@ where point: P::Point, values: impl IntoIterator, proof: &kzg10::Proof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, randomizer: Option, ) { let acc_time = start_timer!(|| "Accumulating elements"); - let mut curr_challenge = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + let mut curr_challenge = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; // Keeps track of running combination of values let mut combined_values = E::ScalarField::zero(); @@ -73,7 +72,7 @@ where // Accumulate values in the BTreeMap *combined_comms.entry(degree_bound).or_insert(E::G1::zero()) += &comm_with_challenge; - curr_challenge = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + curr_challenge = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; } // Push expected results into list of elems. Power will be the negative of the expected power @@ -146,7 +145,7 @@ where type CommitterKey = CommitterKey; type VerifierKey = VerifierKey; type Commitment = Commitment; - type Randomness = Randomness; + type CommitmentState = Randomness; type Proof = kzg10::Proof; type BatchProof = Vec; type Error = Error; @@ -281,7 +280,7 @@ where ) -> Result< ( Vec>, - Vec, + Vec, ), Self::Error, > @@ -291,7 +290,7 @@ where let rng = &mut crate::optional_rng::OptionalRng(rng); let commit_time = start_timer!(|| "Committing to polynomials"); let mut labeled_comms: Vec> = Vec::new(); - let mut randomness: Vec = Vec::new(); + let mut randomness: Vec = Vec::new(); for labeled_polynomial in polynomials { let enforced_degree_bounds: Option<&[usize]> = ck @@ -345,21 +344,21 @@ where labeled_polynomials: impl IntoIterator>, _commitments: impl IntoIterator>, point: &'a P::Point, - opening_challenges: &mut ChallengeGenerator, - rands: impl IntoIterator, + sponge: &mut S, + states: impl IntoIterator, _rng: Option<&mut dyn RngCore>, ) -> Result where - Self::Randomness: 'a, + Self::CommitmentState: 'a, Self::Commitment: 'a, P: 'a, { let mut combined_polynomial = P::zero(); let mut combined_rand = kzg10::Randomness::empty(); - let mut curr_challenge = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + let mut curr_challenge = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; - for (polynomial, rand) in labeled_polynomials.into_iter().zip(rands) { + for (polynomial, state) in labeled_polynomials.into_iter().zip(states) { let enforced_degree_bounds: Option<&[usize]> = ck .enforced_degree_bounds .as_ref() @@ -373,8 +372,8 @@ where )?; combined_polynomial += (curr_challenge, polynomial.polynomial()); - combined_rand += (curr_challenge, rand); - curr_challenge = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + combined_rand += (curr_challenge, state); + curr_challenge = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; } let proof_time = start_timer!(|| "Creating proof for polynomials"); @@ -390,7 +389,7 @@ where point: &'a P::Point, values: impl IntoIterator, proof: &Self::Proof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, _rng: Option<&mut dyn RngCore>, ) -> Result where @@ -410,7 +409,7 @@ where *point, values, proof, - opening_challenges, + sponge, None, ); @@ -430,7 +429,7 @@ where query_set: &QuerySet, values: &Evaluations, proof: &Self::BatchProof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, rng: &mut R, ) -> Result where @@ -481,7 +480,7 @@ where *point, values_to_combine.into_iter(), p, - opening_challenges, + sponge, Some(randomizer), ); @@ -502,24 +501,24 @@ where polynomials: impl IntoIterator>, commitments: impl IntoIterator>, query_set: &QuerySet, - opening_challenges: &mut ChallengeGenerator, - rands: impl IntoIterator, + sponge: &mut S, + states: impl IntoIterator, rng: Option<&mut dyn RngCore>, ) -> Result, Self::Error> where - Self::Randomness: 'a, + Self::CommitmentState: 'a, Self::Commitment: 'a, P: 'a, { let label_map = polynomials .into_iter() - .zip(rands) + .zip(states) .zip(commitments) - .map(|((p, r), c)| (p.label(), (p, r, c))) + .map(|((p, s), c)| (p.label(), (p, s, c))) .collect::>(); let mut lc_polynomials = Vec::new(); - let mut lc_randomness = Vec::new(); + let mut lc_states = Vec::new(); let mut lc_commitments = Vec::new(); let mut lc_info = Vec::new(); @@ -528,13 +527,13 @@ where let mut poly = P::zero(); let mut degree_bound = None; let mut hiding_bound = None; - let mut randomness = Self::Randomness::empty(); + let mut state = Self::CommitmentState::empty(); let mut comm = E::G1::zero(); let num_polys = lc.len(); for (coeff, label) in lc.iter().filter(|(_, l)| !l.is_one()) { let label: &String = label.try_into().expect("cannot be one!"); - let &(cur_poly, cur_rand, curr_comm) = + let &(cur_poly, cur_state, curr_comm) = label_map.get(label).ok_or(Error::MissingPolynomial { label: label.to_string(), })?; @@ -553,14 +552,14 @@ where // Some(_) > None, always. hiding_bound = core::cmp::max(hiding_bound, cur_poly.hiding_bound()); poly += (*coeff, cur_poly.polynomial()); - randomness += (*coeff, cur_rand); + state += (*coeff, cur_state); comm += &curr_comm.commitment().0.mul(*coeff); } let lc_poly = LabeledPolynomial::new(lc_label.clone(), poly, degree_bound, hiding_bound); lc_polynomials.push(lc_poly); - lc_randomness.push(randomness); + lc_states.push(state); lc_commitments.push(comm); lc_info.push((lc_label, degree_bound)); } @@ -581,8 +580,8 @@ where lc_polynomials.iter(), lc_commitments.iter(), &query_set, - opening_challenges, - lc_randomness.iter(), + sponge, + lc_states.iter(), rng, )?; Ok(BatchLCProof { proof, evals: None }) @@ -597,7 +596,7 @@ where eqn_query_set: &QuerySet, eqn_evaluations: &Evaluations, proof: &BatchLCProof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, rng: &mut R, ) -> Result where @@ -666,7 +665,7 @@ where &eqn_query_set, &evaluations, proof, - opening_challenges, + sponge, rng, ) } diff --git a/poly-commit/src/streaming_kzg/data_structures.rs b/poly-commit/src/streaming_kzg/data_structures.rs index 7adaf005..c8b19c83 100644 --- a/poly-commit/src/streaming_kzg/data_structures.rs +++ b/poly-commit/src/streaming_kzg/data_structures.rs @@ -141,7 +141,7 @@ where /// Stream implementation of foleded polynomial. #[derive(Clone, Copy)] -pub struct FoldedPolynomialStream<'a, F, S>(FoldedPolynomialTree<'a, F, S>, usize); +pub struct FoldedPolynomialStream<'a, F, S>(FoldedPolynomialTree<'a, F, S>); /// Iterator implementation of foleded polynomial. pub struct FoldedPolynomialStreamIter<'a, F, I> { challenges: &'a [F], @@ -158,8 +158,7 @@ where /// Initialize a new folded polynomial stream. pub fn new(coefficients: &'a S, challenges: &'a [F]) -> Self { let tree = FoldedPolynomialTree::new(coefficients, challenges); - let len = challenges.len(); - Self(tree, len) + Self(tree) } } @@ -241,7 +240,7 @@ fn test_folded_polynomial() { let challenges = vec![F::one(), two]; let coefficients_stream = coefficients.as_slice(); let foldstream = FoldedPolynomialTree::new(&coefficients_stream, challenges.as_slice()); - let fold_stream = FoldedPolynomialStream(foldstream, 2); + let fold_stream = FoldedPolynomialStream(foldstream); assert_eq!(fold_stream.len(), 1); assert_eq!( fold_stream.iter().next(), @@ -253,7 +252,7 @@ fn test_folded_polynomial() { let challenges = vec![F::one(); 4]; let coefficients_stream = coefficients.as_slice(); let foldstream = FoldedPolynomialTree::new(&coefficients_stream, challenges.as_slice()); - let fold_stream = FoldedPolynomialStream(foldstream, 4).iter(); + let fold_stream = FoldedPolynomialStream(foldstream).iter(); assert_eq!(fold_stream.last(), Some(coefficients.iter().sum())); } diff --git a/poly-commit/src/streaming_kzg/time.rs b/poly-commit/src/streaming_kzg/time.rs index 8c7fa2f8..b8d52093 100644 --- a/poly-commit/src/streaming_kzg/time.rs +++ b/poly-commit/src/streaming_kzg/time.rs @@ -1,9 +1,9 @@ //! An impementation of a time-efficient version of Kate et al's polynomial commitment, //! with optimization from [\[BDFG20\]](https://eprint.iacr.org/2020/081.pdf). use ark_ec::pairing::Pairing; -use ark_ec::scalar_mul::fixed_base::FixedBase; +use ark_ec::scalar_mul::ScalarMul; use ark_ec::CurveGroup; -use ark_ff::{PrimeField, Zero}; +use ark_ff::Zero; use ark_poly::{univariate::DensePolynomial, DenseUVPolynomial}; use ark_std::{borrow::Borrow, ops::Div, ops::Mul, rand::RngCore, vec::Vec, UniformRand}; @@ -50,11 +50,7 @@ impl CommitterKey { let powers_of_tau = powers(tau, max_degree + 1); let g = E::G1::rand(rng); - let window_size = FixedBase::get_mul_window_size(max_degree + 1); - let scalar_bits = E::ScalarField::MODULUS_BIT_SIZE as usize; - let g_table = FixedBase::get_window_table(scalar_bits, window_size, g); - let powers_of_g_proj = FixedBase::msm(scalar_bits, window_size, &g_table, &powers_of_tau); - let powers_of_g = E::G1::normalize_batch(&powers_of_g_proj); + let powers_of_g = g.batch_mul(&powers_of_tau); let g2 = E::G2::rand(rng).into_affine(); let powers_of_g2 = powers_of_tau diff --git a/poly-commit/src/utils.rs b/poly-commit/src/utils.rs index 34e41197..3b2a336f 100644 --- a/poly-commit/src/utils.rs +++ b/poly-commit/src/utils.rs @@ -1,5 +1,3 @@ -use core::marker::PhantomData; - #[cfg(not(feature = "std"))] use num_traits::Float; @@ -9,12 +7,9 @@ use rayon::{ prelude::IndexedParallelIterator, }; -use ark_ff::{Field, PrimeField}; -use ark_serialize::CanonicalSerialize; +use ark_ff::Field; +use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; use ark_std::vec::Vec; -use merlin::Transcript; - -use crate::Error; /// Takes as input a struct, and converts them to a series of bytes. All traits /// that implement `CanonicalSerialize` can be automatically converted to bytes @@ -50,7 +45,8 @@ pub(crate) fn ceil_div(x: usize, y: usize) -> usize { (x + y - 1) / y } -#[derive(Debug)] +#[derive(Derivative, CanonicalSerialize, CanonicalDeserialize)] +#[derivative(Default(bound = ""), Clone(bound = ""), Debug(bound = ""))] pub struct Matrix { pub(crate) n: usize, pub(crate) m: usize, @@ -159,86 +155,6 @@ pub(crate) fn inner_product(v1: &[F], v2: &[F]) -> F { .sum() } -/// The following struct is taken from jellyfish repository. Once they change -/// their dependency on `crypto-primitive`, we use their crate instead of -/// a copy-paste. We needed the newer `crypto-primitive` for serializing. -#[derive(Clone)] -pub(crate) struct IOPTranscript { - transcript: Transcript, - is_empty: bool, - #[doc(hidden)] - phantom: PhantomData, -} - -// TODO: merge this with jf_plonk::transcript -impl IOPTranscript { - /// Create a new IOP transcript. - pub(crate) fn new(label: &'static [u8]) -> Self { - Self { - transcript: Transcript::new(label), - is_empty: true, - phantom: PhantomData, - } - } - - /// Append the message to the transcript. - pub(crate) fn append_message(&mut self, label: &'static [u8], msg: &[u8]) -> Result<(), Error> { - self.transcript.append_message(label, msg); - self.is_empty = false; - Ok(()) - } - - /// Append the message to the transcript. - pub(crate) fn append_serializable_element( - &mut self, - label: &'static [u8], - group_elem: &S, - ) -> Result<(), Error> { - self.append_message( - label, - &to_bytes!(group_elem).map_err(|_| Error::TranscriptError)?, - ) - } - - /// Generate the challenge from the current transcript - /// and append it to the transcript. - /// - /// The output field element is statistical uniform as long - /// as the field has a size less than 2^384. - pub(crate) fn get_and_append_challenge(&mut self, label: &'static [u8]) -> Result { - // we need to reject when transcript is empty - if self.is_empty { - return Err(Error::TranscriptError); - } - - let mut buf = [0u8; 64]; - self.transcript.challenge_bytes(label, &mut buf); - let challenge = F::from_le_bytes_mod_order(&buf); - self.append_serializable_element(label, &challenge)?; - Ok(challenge) - } - - /// Generate the challenge from the current transcript - /// and append it to the transcript. - /// - /// Without exposing the internal field `transcript`, - /// this is a wrapper around getting bytes as opposed to field elements. - pub(crate) fn get_and_append_byte_challenge( - &mut self, - label: &'static [u8], - dest: &mut [u8], - ) -> Result<(), Error> { - // we need to reject when transcript is empty - if self.is_empty { - return Err(Error::TranscriptError); - } - - self.transcript.challenge_bytes(label, dest); - self.append_message(label, dest)?; - Ok(()) - } -} - #[inline] #[cfg(test)] pub(crate) fn to_field(v: Vec) -> Vec { @@ -248,6 +164,8 @@ pub(crate) fn to_field(v: Vec) -> Vec { // TODO: replace by https://github.com/arkworks-rs/crypto-primitives/issues/112. #[cfg(test)] use ark_crypto_primitives::sponge::poseidon::PoseidonSponge; +#[cfg(test)] +use ark_ff::PrimeField; #[cfg(test)] pub(crate) fn test_sponge() -> PoseidonSponge { From a4ed4e10cef99499d396d28386d138f26aa2365b Mon Sep 17 00:00:00 2001 From: Hossein Moghaddas Date: Wed, 17 Jan 2024 13:50:44 +0100 Subject: [PATCH 43/75] Update a comment --- poly-commit/src/linear_codes/utils.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/poly-commit/src/linear_codes/utils.rs b/poly-commit/src/linear_codes/utils.rs index 2b1d8e73..df2dc1cc 100644 --- a/poly-commit/src/linear_codes/utils.rs +++ b/poly-commit/src/linear_codes/utils.rs @@ -16,7 +16,8 @@ use { digest::Digest, }; -/// This is CSC format https://shorturl.at/fpL17 +/// This is CSC format +/// https://en.wikipedia.org/wiki/Sparse_matrix#Compressed_sparse_column_(CSC_or_CCS) #[derive(Derivative, CanonicalSerialize, CanonicalDeserialize)] #[derivative(Clone(bound = ""), Debug(bound = ""))] pub struct SprsMat { From 55d7b58375b163e655c7f9983c1e7450531ea486 Mon Sep 17 00:00:00 2001 From: Hossein Moghaddas Date: Thu, 18 Jan 2024 12:04:01 +0100 Subject: [PATCH 44/75] Delete `IOPTranscript`, update with master (#50) (aka Hyrax++) * Add the trait bounds * Add `CommitmentState` * Update benches for the new type * Fix the name of local variable * Merge `PCCommitmentState` with `PCRandomness` * Update `README.md` * Fix a bug * Change `Randomness` to `CommitmentState` * Maybe `empty` not return `Self` * Make `empty` return `Self` * Rename `rand` to `state` * Partially integrate the new design into Hyrax * Update Hyrax with the shared state * Rename nonnative to emulated, as in `r1cs-std` (#137) * Rename nonnative to emulated, as in `r1cs-std` * Run `fmt` * Temporarily change `Cargo.toml` * Revert `Cargo.toml` * Refactor `FoldedPolynomialStream` partially * Substitute `ChallengeGenerator` by the generic sponge (#139) * Rename nonnative to emulated, as in `r1cs-std` * Run `fmt` * Temporarily change `Cargo.toml` * Substitute `ChallengeGenerator` with the generic sponge * Run `fmt` * Remove the extra file * Update modules * Delete the unnecessary loop * Revert `Cargo.toml` * Refactor `FoldedPolynomialStream` partially * Update README * Make the diff more readable * Bring the whitespace back * Make diff more readable, 2 * Fix according to breaking changes in `ark-ec` (#141) * Fix for KZG10 * Fix the breaking changes in `ark-ec` * Remove the extra loop * Fix the loop range * re-use the preprocessing table * also re-use the preprocessing table for multilinear_pc --------- Co-authored-by: mmagician * Auxiliary opening data (#134) * Add the trait bounds * Add `CommitmentState` * Update benches for the new type * Fix the name of local variable * Merge `PCCommitmentState` with `PCRandomness` * Update `README.md` * Fix a bug * Put `Randomness` in `CommitmentState` * Add a comment * Remove the extra loop * Update the comment for `CommitmentState` Co-authored-by: Marcin * cargo fmt --------- Co-authored-by: Marcin * `batch_mul_with_preprocessing` no longer takes `self` as argument (#142) * batch_mul_with_preprocessing no longer takes `self` as argument * Apply suggestions from code review Co-authored-by: Pratyush Mishra * fix variable name --------- Co-authored-by: Pratyush Mishra * Remove ChallengeGenerator for Ligero (#56) * Squash and merge `delete-chalgen` onto here * Fix for `ChallengeGenerator` * Delete `IOPTranscript` for Hyrax (#55) * Use the sponge generic and rearrange `use`s * Use sponge instead of `IOPTransript` * Fix benches * Remove the extra loop --------- Co-authored-by: mmagician Co-authored-by: Pratyush Mishra --- README.md | 16 +- bench-templates/src/lib.rs | 22 +- poly-commit/benches/hyrax_times.rs | 3 +- poly-commit/src/challenge.rs | 61 -- poly-commit/src/constraints.rs | 20 +- poly-commit/src/data_structures.rs | 12 +- poly-commit/src/hyrax/data_structures.rs | 24 +- poly-commit/src/hyrax/mod.rs | 133 ++-- poly-commit/src/hyrax/tests.rs | 82 +- poly-commit/src/ipa_pc/data_structures.rs | 3 +- poly-commit/src/ipa_pc/mod.rs | 89 +-- poly-commit/src/kzg10/data_structures.rs | 3 +- poly-commit/src/kzg10/mod.rs | 49 +- poly-commit/src/lib.rs | 723 +++++++++--------- .../src/marlin/marlin_pc/data_structures.rs | 7 +- poly-commit/src/marlin/marlin_pc/mod.rs | 65 +- .../marlin/marlin_pst13_pc/data_structures.rs | 5 +- poly-commit/src/marlin/marlin_pst13_pc/mod.rs | 73 +- poly-commit/src/marlin/mod.rs | 41 +- poly-commit/src/multilinear_pc/mod.rs | 48 +- poly-commit/src/sonic_pc/mod.rs | 65 +- .../src/streaming_kzg/data_structures.rs | 9 +- poly-commit/src/streaming_kzg/time.rs | 10 +- poly-commit/src/utils.rs | 76 +- 24 files changed, 715 insertions(+), 924 deletions(-) delete mode 100644 poly-commit/src/challenge.rs diff --git a/README.md b/README.md index 9697bdb9..64c2ea2f 100644 --- a/README.md +++ b/README.md @@ -57,7 +57,7 @@ This trait defines the interface for a polynomial commitment scheme. It is recom // In this example, we will commit to a single polynomial, open it first at one point, and then batched at two points, and finally verify the proofs. // We will use the KZG10 polynomial commitment scheme, following the approach from Marlin. -use ark_poly_commit::{Polynomial, marlin_pc::MarlinKZG10, LabeledPolynomial, PolynomialCommitment, QuerySet, Evaluations, challenge::ChallengeGenerator}; +use ark_poly_commit::{Polynomial, marlin_pc::MarlinKZG10, LabeledPolynomial, PolynomialCommitment, QuerySet, Evaluations}; use ark_bls12_377::Bls12_377; use ark_crypto_primitives::sponge::poseidon::{PoseidonSponge, PoseidonConfig}; use ark_crypto_primitives::sponge::CryptographicSponge; @@ -128,17 +128,15 @@ let (ck, vk) = PCS::trim(&pp, degree, 2, Some(&[degree])).unwrap(); // 3. PolynomialCommitment::commit // The prover commits to the polynomial using their committer key `ck`. -let (comms, rands) = PCS::commit(&ck, [&labeled_poly], Some(rng)).unwrap(); - -let challenge_generator: ChallengeGenerator<::ScalarField, Sponge_Bls12_377> = ChallengeGenerator::new_univariate(&mut test_sponge); +let (comms, states) = PCS::commit(&ck, [&labeled_poly], Some(rng)).unwrap(); // 4a. PolynomialCommitment::open // Opening proof at a single point. -let proof_single = PCS::open(&ck, [&labeled_poly], &comms, &point_1, &mut (challenge_generator.clone()), &rands, None).unwrap(); +let proof_single = PCS::open(&ck, [&labeled_poly], &comms, &point_1, &mut (test_sponge.clone()), &states, None).unwrap(); // 5a. PolynomialCommitment::check // Verifying the proof at a single point, given the commitment, the point, the claimed evaluation, and the proof. -assert!(PCS::check(&vk, &comms, &point_1, [secret_poly.evaluate(&point_1)], &proof_single, &mut (challenge_generator.clone()), Some(rng)).unwrap()); +assert!(PCS::check(&vk, &comms, &point_1, [secret_poly.evaluate(&point_1)], &proof_single, &mut (test_sponge.clone()), Some(rng)).unwrap()); let mut query_set = QuerySet::new(); let mut values = Evaluations::new(); @@ -155,8 +153,8 @@ let proof_batched = PCS::batch_open( [&labeled_poly], &comms, &query_set, - &mut (challenge_generator.clone()), - &rands, + &mut (test_sponge.clone()), + &states, Some(rng), ).unwrap(); @@ -167,7 +165,7 @@ assert!(PCS::batch_check( &query_set, &values, &proof_batched, - &mut (challenge_generator.clone()), + &mut (test_sponge.clone()), rng, ).unwrap()); ``` diff --git a/bench-templates/src/lib.rs b/bench-templates/src/lib.rs index 552f5924..8a656589 100644 --- a/bench-templates/src/lib.rs +++ b/bench-templates/src/lib.rs @@ -11,7 +11,7 @@ use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; use core::time::Duration; use std::time::Instant; -use ark_poly_commit::{challenge::ChallengeGenerator, LabeledPolynomial, PolynomialCommitment}; +use ark_poly_commit::{LabeledPolynomial, PolynomialCommitment}; pub use criterion::*; pub use paste::paste; @@ -123,7 +123,7 @@ where let labeled_poly = LabeledPolynomial::new("test".to_string(), rand_poly(num_vars, rng), None, None); - let (coms, randomness) = PCS::commit(&ck, [&labeled_poly], Some(rng)).unwrap(); + let (coms, states) = PCS::commit(&ck, [&labeled_poly], Some(rng)).unwrap(); let point = rand_point(num_vars, rng); let start = Instant::now(); @@ -132,8 +132,8 @@ where [&labeled_poly], &coms, &point, - &mut ChallengeGenerator::new_univariate(&mut test_sponge()), - &randomness, + &mut test_sponge(), + &states, Some(rng), ) .unwrap(); @@ -157,7 +157,7 @@ where let labeled_poly = LabeledPolynomial::new("test".to_string(), rand_poly(num_vars, rng), None, None); - let (coms, randomness) = PCS::commit(&ck, [&labeled_poly], Some(rng)).unwrap(); + let (coms, states) = PCS::commit(&ck, [&labeled_poly], Some(rng)).unwrap(); let point = P::Point::rand(rng); let proofs = PCS::open( @@ -165,8 +165,8 @@ where [&labeled_poly], &coms, &point, - &mut ChallengeGenerator::new_univariate(&mut test_sponge()), - &randomness, + &mut test_sponge(), + &states, Some(rng), ) .unwrap(); @@ -194,7 +194,7 @@ where let labeled_poly = LabeledPolynomial::new("test".to_string(), rand_poly(num_vars, rng), None, None); - let (coms, randomness) = PCS::commit(&ck, [&labeled_poly], Some(rng)).unwrap(); + let (coms, states) = PCS::commit(&ck, [&labeled_poly], Some(rng)).unwrap(); let point = rand_point(num_vars, rng); let claimed_eval = labeled_poly.evaluate(&point); let proof = PCS::open( @@ -202,8 +202,8 @@ where [&labeled_poly], &coms, &point, - &mut ChallengeGenerator::new_univariate(&mut test_sponge()), - &randomness, + &mut test_sponge(), + &states, Some(rng), ) .unwrap(); @@ -215,7 +215,7 @@ where &point, [claimed_eval], &proof, - &mut ChallengeGenerator::new_univariate(&mut test_sponge()), + &mut test_sponge(), None, ) .unwrap(); diff --git a/poly-commit/benches/hyrax_times.rs b/poly-commit/benches/hyrax_times.rs index 7f579cab..c76753df 100644 --- a/poly-commit/benches/hyrax_times.rs +++ b/poly-commit/benches/hyrax_times.rs @@ -1,3 +1,4 @@ +use ark_crypto_primitives::sponge::poseidon::PoseidonSponge; use ark_pcs_bench_templates::*; use ark_poly::{DenseMultilinearExtension, MultilinearExtension}; @@ -8,7 +9,7 @@ use ark_poly_commit::hyrax::HyraxPC; use rand_chacha::ChaCha20Rng; // Hyrax PCS over BN254 -type Hyrax254 = HyraxPC>; +type Hyrax254 = HyraxPC, PoseidonSponge>; fn rand_poly_hyrax( num_vars: usize, diff --git a/poly-commit/src/challenge.rs b/poly-commit/src/challenge.rs deleted file mode 100644 index 23b3c9d1..00000000 --- a/poly-commit/src/challenge.rs +++ /dev/null @@ -1,61 +0,0 @@ -use ark_crypto_primitives::sponge::{CryptographicSponge, FieldElementSize}; -use ark_ff::PrimeField; - -/// `ChallengeGenerator` generates opening challenges using multivariate or univariate strategy. -/// For multivariate strategy, each challenge is freshly squeezed from a sponge. -/// For univariate strategy, each challenge is a power of one squeezed element from sponge. -/// -/// Note that mutable reference cannot be cloned. -#[derive(Clone)] -pub enum ChallengeGenerator { - /// Each challenge is freshly squeezed from a sponge. - Multivariate(S), - /// Each challenge is a power of one squeezed element from sponge. - /// - /// `Univariate(generator, next_element)` - Univariate(F, F), -} - -impl ChallengeGenerator { - /// Returns a challenge generator with multivariate strategy. Each challenge is freshly squeezed - /// from a sponge. - pub fn new_multivariate(sponge: S) -> Self { - Self::Multivariate(sponge) - } - - /// Returns a challenge generator with univariate strategy. Each challenge is a power of one - /// squeezed element from sponge. - pub fn new_univariate(sponge: &mut S) -> Self { - let gen = sponge.squeeze_field_elements(1)[0]; - Self::Univariate(gen, gen) - } - - /// Returns a challenge of size `size`. - /// * If `self == Self::Multivariate(...)`, then this squeezes out a challenge of size `size`. - /// * If `self == Self::Univariate(...)`, then this ignores the `size` argument and simply squeezes out - /// the next field element. - pub fn try_next_challenge_of_size(&mut self, size: FieldElementSize) -> F { - match self { - // multivariate (full) - Self::Multivariate(sponge) => sponge.squeeze_field_elements_with_sizes(&[size])[0], - // univariate - Self::Univariate(gen, next) => { - let result = next.clone(); - *next *= *gen; - result - } - } - } - /// Returns the next challenge generated. - pub fn next_challenge(&mut self) -> F { - self.try_next_challenge_of_size(FieldElementSize::Full) - } - - /// Returns the sponge state if `self` is multivariate. Returns `None` otherwise. - pub fn into_sponge(self) -> Option { - match self { - Self::Multivariate(s) => Some(s), - _ => None, - } - } -} diff --git a/poly-commit/src/constraints.rs b/poly-commit/src/constraints.rs index e6fb5d4f..1300509a 100644 --- a/poly-commit/src/constraints.rs +++ b/poly-commit/src/constraints.rs @@ -5,7 +5,7 @@ use crate::{ use ark_crypto_primitives::sponge::CryptographicSponge; use ark_ff::PrimeField; use ark_poly::Polynomial; -use ark_r1cs_std::fields::nonnative::NonNativeFieldVar; +use ark_r1cs_std::fields::emulated_fp::EmulatedFpVar; use ark_r1cs_std::{fields::fp::FpVar, prelude::*}; use ark_relations::r1cs::{ConstraintSystemRef, Namespace, Result as R1CSResult, SynthesisError}; use ark_std::{borrow::Borrow, cmp::Eq, cmp::PartialEq, hash::Hash, marker::Sized}; @@ -24,8 +24,8 @@ pub enum LinearCombinationCoeffVar), + /// Other coefficient, represented as a "emulated" field element. + Var(EmulatedFpVar), } /// An allocated version of `LinearCombination`. @@ -60,7 +60,7 @@ impl let (f, lc_term) = term; let fg = - NonNativeFieldVar::new_variable(ark_relations::ns!(cs, "term"), || Ok(f), mode) + EmulatedFpVar::new_variable(ark_relations::ns!(cs, "term"), || Ok(f), mode) .unwrap(); (LinearCombinationCoeffVar::Var(fg), lc_term.clone()) @@ -79,12 +79,12 @@ impl pub struct PCCheckRandomDataVar { /// Opening challenges. /// The prover and the verifier MUST use the same opening challenges. - pub opening_challenges: Vec>, + pub opening_challenges: Vec>, /// Bit representations of the opening challenges. pub opening_challenges_bits: Vec>>, /// Batching random numbers. /// The verifier can choose these numbers freely, as long as they are random. - pub batching_rands: Vec>, + pub batching_rands: Vec>, /// Bit representations of the batching random numbers. pub batching_rands_bits: Vec>>, } @@ -172,7 +172,7 @@ pub struct LabeledPointVar { /// MUST be a unique identifier in a query set. pub name: String, /// The point value. - pub value: NonNativeFieldVar, + pub value: EmulatedFpVar, } /// An allocated version of `QuerySet`. @@ -184,7 +184,7 @@ pub struct QuerySetVar( /// An allocated version of `Evaluations`. #[derive(Clone)] pub struct EvaluationsVar( - pub HashMap, NonNativeFieldVar>, + pub HashMap, EmulatedFpVar>, ); impl EvaluationsVar { @@ -192,8 +192,8 @@ impl EvaluationsVar, - ) -> Result, SynthesisError> { + point: &EmulatedFpVar, + ) -> Result, SynthesisError> { let key = LabeledPointVar:: { name: String::from(lc_string), value: point.clone(), diff --git a/poly-commit/src/data_structures.rs b/poly-commit/src/data_structures.rs index 4a5eec21..2b942ee1 100644 --- a/poly-commit/src/data_structures.rs +++ b/poly-commit/src/data_structures.rs @@ -70,9 +70,12 @@ pub trait PCPreparedCommitment: Clone { fn prepare(comm: &UNPREPARED) -> Self; } -/// Defines the minimal interface of commitment randomness for any polynomial -/// commitment scheme. -pub trait PCRandomness: Clone + CanonicalSerialize + CanonicalDeserialize { +/// Defines the minimal interface of commitment state for any polynomial +/// commitment scheme. It might be randomness etc. +pub trait PCCommitmentState: Clone + CanonicalSerialize + CanonicalDeserialize { + /// This is the type of `Randomness` that the `rand` method returns + type Randomness: Clone + CanonicalSerialize + CanonicalDeserialize; + /// Outputs empty randomness that does not hide the commitment. fn empty() -> Self; @@ -86,9 +89,8 @@ pub trait PCRandomness: Clone + CanonicalSerialize + CanonicalDeserialize { has_degree_bound: bool, num_vars: Option, rng: &mut R, - ) -> Self; + ) -> Self::Randomness; } - /// A proof of satisfaction of linear combinations. #[derive(Clone, CanonicalSerialize, CanonicalDeserialize)] pub struct BatchLCProof { diff --git a/poly-commit/src/hyrax/data_structures.rs b/poly-commit/src/hyrax/data_structures.rs index fbdd69a9..aa58b7cf 100644 --- a/poly-commit/src/hyrax/data_structures.rs +++ b/poly-commit/src/hyrax/data_structures.rs @@ -3,7 +3,10 @@ use ark_ff::PrimeField; use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; use ark_std::{rand::RngCore, vec::Vec}; -use crate::{PCCommitment, PCCommitterKey, PCRandomness, PCUniversalParams, PCVerifierKey}; +use crate::{ + utils::Matrix, PCCommitment, PCCommitmentState, PCCommitterKey, PCUniversalParams, + PCVerifierKey, +}; /// `UniversalParams` amounts to a Pederson commitment key of sufficient length #[derive(Derivative, CanonicalSerialize, CanonicalDeserialize)] @@ -77,9 +80,24 @@ impl PCCommitment for HyraxCommitment { pub(crate) type HyraxRandomness = Vec; +/// Hyrax Commitment State blah blah blah blah +/// blah blah blah blah +/// blah blah blah blah +/// blah blah blah blah +#[derive(Derivative, CanonicalSerialize, CanonicalDeserialize)] +#[derivative(Default(bound = ""), Clone(bound = ""), Debug(bound = ""))] +pub struct HyraxCommitmentState +where + F: PrimeField, +{ + pub(crate) randomness: HyraxRandomness, + pub(crate) mat: Matrix, +} + /// A vector of scalars, each of which multiplies the distinguished group /// element in the Pederson commitment key for a different commitment -impl PCRandomness for HyraxRandomness { +impl PCCommitmentState for HyraxCommitmentState { + type Randomness = HyraxRandomness; fn empty() -> Self { unimplemented!() } @@ -89,7 +107,7 @@ impl PCRandomness for HyraxRandomness { _has_degree_bound: bool, _num_vars: Option, rng: &mut R, - ) -> Self { + ) -> Self::Randomness { (0..num_queries).map(|_| F::rand(rng)).collect() } } diff --git a/poly-commit/src/hyrax/mod.rs b/poly-commit/src/hyrax/mod.rs index 67937470..d5536137 100644 --- a/poly-commit/src/hyrax/mod.rs +++ b/poly-commit/src/hyrax/mod.rs @@ -1,30 +1,26 @@ -mod data_structures; -mod utils; -pub use data_structures::*; - -#[cfg(test)] -mod tests; - -use ark_crypto_primitives::sponge::poseidon::PoseidonSponge; +use crate::hyrax::utils::tensor_prime; +use crate::to_bytes; +use crate::utils::{inner_product, scalar_by_vector, vector_sum, Matrix}; +use crate::{ + hyrax::utils::flat_to_matrix_column_major, Error, LabeledCommitment, LabeledPolynomial, + PolynomialCommitment, +}; +use ark_crypto_primitives::sponge::{Absorb, CryptographicSponge}; use ark_ec::{AffineRepr, CurveGroup, VariableBaseMSM}; use ark_ff::PrimeField; use ark_poly::MultilinearExtension; -use ark_std::{rand::RngCore, string::ToString, vec::Vec, UniformRand}; +use ark_std::{marker::PhantomData, rand::RngCore, string::ToString, vec::Vec, UniformRand}; use blake2::Blake2s256; -use core::marker::PhantomData; use digest::Digest; #[cfg(feature = "parallel")] use rayon::prelude::*; -use crate::hyrax::utils::tensor_prime; -use crate::utils::{inner_product, scalar_by_vector, vector_sum, IOPTranscript, Matrix}; - -use crate::{ - challenge::ChallengeGenerator, hyrax::utils::flat_to_matrix_column_major, Error, - LabeledCommitment, LabeledPolynomial, PolynomialCommitment, -}; - +mod data_structures; +pub use data_structures::*; +#[cfg(test)] +mod tests; +mod utils; /// String of bytes used to seed the randomness during the setup function. /// Note that the latter should never be used in production environments. pub const PROTOCOL_NAME: &'static [u8] = b"Hyrax protocol"; @@ -70,11 +66,18 @@ pub struct HyraxPC< G: AffineRepr, // A polynomial type representing multilinear polynomials P: MultilinearExtension, + // The sponge used in the protocol as random oracle + S: CryptographicSponge, > { - _phantom: PhantomData<(G, P)>, + _phantom: PhantomData<(G, P, S)>, } -impl> HyraxPC { +impl HyraxPC +where + G: AffineRepr, + P: MultilinearExtension, + S: CryptographicSponge, +{ /// Pedersen commitment to a vector of scalars as described in appendix A.1 /// of the reference article. /// The caller must either directly pass hiding exponent `r` inside Some, @@ -116,19 +119,18 @@ impl> HyraxPC { } } -impl> - PolynomialCommitment< - G::ScalarField, - P, - // Dummy sponge - required by the trait, not used in this implementation - PoseidonSponge, - > for HyraxPC +impl PolynomialCommitment for HyraxPC +where + G: AffineRepr, + G::ScalarField: Absorb, + P: MultilinearExtension, + S: CryptographicSponge, { type UniversalParams = HyraxUniversalParams; type CommitterKey = HyraxCommitterKey; type VerifierKey = HyraxVerifierKey; type Commitment = HyraxCommitment; - type Randomness = HyraxRandomness; + type CommitmentState = HyraxCommitmentState; type Proof = Vec>; type BatchProof = Vec; type Error = Error; @@ -222,7 +224,7 @@ impl> ) -> Result< ( Vec>, - Vec, + Vec, ), Self::Error, > @@ -230,7 +232,7 @@ impl> P: 'a, { let mut coms = Vec::new(); - let mut rands = Vec::new(); + let mut states = Vec::new(); #[cfg(not(feature = "parallel"))] let rng_inner = rng.expect("Committing to polynomials requires a random generator"); @@ -270,10 +272,13 @@ impl> let l_comm = LabeledCommitment::new(label.to_string(), com, Some(1)); coms.push(l_comm); - rands.push(com_rands); + states.push(HyraxCommitmentState { + randomness: com_rands, + mat: Matrix::new_from_rows(m), + }); } - Ok((coms, rands)) + Ok((coms, states)) } /// Opens a list of polynomial commitments at a desired point. This @@ -292,25 +297,18 @@ impl> /// polynomial. /// - The number of variables of a polynomial doesn't match that of the /// point. - /// - /// # Disregarded arguments - /// - `opening_challenges` fn open<'a>( ck: &Self::CommitterKey, labeled_polynomials: impl IntoIterator>, commitments: impl IntoIterator>, point: &'a P::Point, - // Not used and not generic on the cryptographic sponge S - _opening_challenges: &mut ChallengeGenerator< - G::ScalarField, - PoseidonSponge, - >, - rands: impl IntoIterator, + sponge: &mut S, + states: impl IntoIterator, rng: Option<&mut dyn RngCore>, ) -> Result where Self::Commitment: 'a, - Self::Randomness: 'a, + Self::CommitmentState: 'a, P: 'a, { let n = point.len(); @@ -339,9 +337,9 @@ impl> let rng_inner = rng.expect("Opening polynomials requires randomness"); - for (l_poly, (l_com, randomness)) in labeled_polynomials + for (l_poly, (l_com, state)) in labeled_polynomials .into_iter() - .zip(commitments.into_iter().zip(rands.into_iter())) + .zip(commitments.into_iter().zip(states.into_iter())) { let label = l_poly.label(); if label != l_com.label() { @@ -361,28 +359,24 @@ impl> }); } - // Initialising the transcript - let mut transcript: IOPTranscript = IOPTranscript::new(b"transcript"); - // Absorbing public parameters - transcript.append_serializable_element(b"public parameters", ck)?; + sponge.absorb(&to_bytes!(ck).map_err(|_| Error::TranscriptError)?); // Absorbing the commitment to the polynomial - transcript.append_serializable_element(b"commitment", &com.row_coms)?; + sponge.absorb(&to_bytes!(&com.row_coms).map_err(|_| Error::TranscriptError)?); // Absorbing the point - transcript.append_serializable_element(b"point", point)?; + sponge.absorb(point); // Commiting to the matrix formed by the polynomial coefficients - let t_aux = flat_to_matrix_column_major(&poly.to_evaluations(), dim, dim); - let t = Matrix::new_from_rows(t_aux); + let t = &state.mat; let lt = t.row_mul(&l); // t_prime coincides witht he Pedersen commitment to lt with the // randomnes r_lt computed here let r_lt = cfg_iter!(l) - .zip(cfg_iter!(randomness)) + .zip(cfg_iter!(state.randomness)) .map(|(l, r)| *l * r) .sum::(); @@ -406,15 +400,15 @@ impl> let (com_b, r_b) = Self::pedersen_commit(ck, &[b], None, Some(rng_inner)); // Absorbing the commitment to the evaluation - transcript.append_serializable_element(b"com_eval", &com_eval)?; + sponge.absorb(&to_bytes!(&com_eval).map_err(|_| Error::TranscriptError)?); // Absorbing the two auxiliary commitments - transcript.append_serializable_element(b"com_d", &com_d)?; - transcript.append_serializable_element(b"com_b", &com_b)?; + sponge.absorb(&to_bytes!(&com_d).map_err(|_| Error::TranscriptError)?); + sponge.absorb(&to_bytes!(&com_b).map_err(|_| Error::TranscriptError)?); // Receive the random challenge c from the verifier, i.e. squeeze // it from the transcript. - let c = transcript.get_and_append_challenge(b"c").unwrap(); + let c = sponge.squeeze_field_elements(1)[0]; let z = vector_sum(&d, &scalar_by_vector(c, <)); let z_d = c * r_lt + r_d; @@ -442,7 +436,6 @@ impl> /// point (specifically, commitment length should be 2^(point-length/2)). /// /// # Disregarded arguments - /// - `opening_challenges` /// - `rng` fn check<'a>( vk: &Self::VerifierKey, @@ -450,11 +443,7 @@ impl> point: &'a P::Point, _values: impl IntoIterator, proof: &Self::Proof, - // Not used and not generic on the cryptographic sponge S - _opening_challenges: &mut ChallengeGenerator< - G::ScalarField, - PoseidonSponge, - >, + sponge: &mut S, _rng: Option<&mut dyn RngCore>, ) -> Result where @@ -506,29 +495,25 @@ impl> .collect::>(); let t_prime: G = ::msm_bigint(row_coms, &l_bigint).into(); - // Construct transcript and squeeze the challenge c from it - - let mut transcript: IOPTranscript = IOPTranscript::new(b"transcript"); - // Absorbing public parameters - transcript.append_serializable_element(b"public parameters", vk)?; + sponge.absorb(&to_bytes!(vk).map_err(|_| Error::TranscriptError)?); // Absorbing the commitment to the polynomial - transcript.append_serializable_element(b"commitment", row_coms)?; + sponge.absorb(&to_bytes!(row_coms).map_err(|_| Error::TranscriptError)?); // Absorbing the point - transcript.append_serializable_element(b"point", point)?; + sponge.absorb(point); // Absorbing the commitment to the evaluation - transcript.append_serializable_element(b"com_eval", com_eval)?; + sponge.absorb(&to_bytes!(com_eval).map_err(|_| Error::TranscriptError)?); // Absorbing the two auxiliary commitments - transcript.append_serializable_element(b"com_d", com_d)?; - transcript.append_serializable_element(b"com_b", com_b)?; + sponge.absorb(&to_bytes!(com_d).map_err(|_| Error::TranscriptError)?); + sponge.absorb(&to_bytes!(com_b).map_err(|_| Error::TranscriptError)?); // Receive the random challenge c from the verifier, i.e. squeeze // it from the transcript. - let c = transcript.get_and_append_challenge(b"c").unwrap(); + let c: G::ScalarField = sponge.squeeze_field_elements(1)[0]; // First check let com_z_zd = Self::pedersen_commit(vk, z, Some(*z_d), None).0; diff --git a/poly-commit/src/hyrax/tests.rs b/poly-commit/src/hyrax/tests.rs index f471b49f..713dd7f3 100644 --- a/poly-commit/src/hyrax/tests.rs +++ b/poly-commit/src/hyrax/tests.rs @@ -1,3 +1,7 @@ +use crate::hyrax::HyraxPC; +use crate::tests::*; +use crate::utils::test_sponge; +use crate::{LabeledPolynomial, PolynomialCommitment}; use ark_bls12_377::G1Affine; use ark_crypto_primitives::sponge::poseidon::PoseidonSponge; use ark_ec::AffineRepr; @@ -7,24 +11,16 @@ use ark_poly::{DenseMultilinearExtension, MultilinearExtension}; use ark_std::test_rng; use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; -use crate::challenge::ChallengeGenerator; -use crate::hyrax::HyraxPC; - -use crate::utils::test_sponge; -use crate::{LabeledPolynomial, PolynomialCommitment}; - -use crate::tests::*; - // The test structure is largely taken from the multilinear_ligero module // inside this crate // ****************** types ****************** -type Fr = ::ScalarField; -type Hyrax381 = HyraxPC>; - type Fq = ::ScalarField; -type Hyrax377 = HyraxPC>; +type Hyrax377 = HyraxPC, PoseidonSponge>; + +type Fr = ::ScalarField; +type Hyrax381 = HyraxPC, PoseidonSponge>; // ******** auxiliary test functions ******** @@ -84,15 +80,13 @@ fn test_hyrax_construction() { // Dummy argument let mut test_sponge = test_sponge::(); - let mut challenge_generator: ChallengeGenerator> = - ChallengeGenerator::new_univariate(&mut test_sponge); let proof = Hyrax381::open( &ck, &[l_poly], &c, &point, - &mut (challenge_generator.clone()), + &mut (test_sponge.clone()), &rands, Some(chacha), ) @@ -104,7 +98,7 @@ fn test_hyrax_construction() { &point, [value], &proof, - &mut challenge_generator, + &mut test_sponge, Some(chacha), ) .unwrap()); @@ -112,35 +106,35 @@ fn test_hyrax_construction() { #[test] fn hyrax_single_poly_test() { - single_poly_test::<_, _, Hyrax381, _>( + single_poly_test::<_, _, Hyrax377, _>( Some(10), - rand_poly::, - rand_point::, + rand_poly, + rand_point, poseidon_sponge_for_test, ) - .expect("test failed for bls12-381"); - single_poly_test::<_, _, Hyrax377, _>( + .expect("test failed for bls12-377"); + single_poly_test::<_, _, Hyrax381, _>( Some(10), - rand_poly::, - rand_point::, + rand_poly, + rand_point, poseidon_sponge_for_test, ) - .expect("test failed for bls12-377"); + .expect("test failed for bls12-381"); } #[test] fn hyrax_constant_poly_test() { single_poly_test::<_, _, Hyrax377, _>( Some(0), - constant_poly::, - rand_point::, + constant_poly, + rand_point, poseidon_sponge_for_test, ) .expect("test failed for bls12-377"); single_poly_test::<_, _, Hyrax381, _>( Some(0), - constant_poly::, - rand_point::, + constant_poly, + rand_point, poseidon_sponge_for_test, ) .expect("test failed for bls12-381"); @@ -150,15 +144,15 @@ fn hyrax_constant_poly_test() { fn hyrax_full_end_to_end_test() { full_end_to_end_test::<_, _, Hyrax377, _>( Some(8), - rand_poly::, - rand_point::, + rand_poly, + rand_point, poseidon_sponge_for_test, ) .expect("test failed for bls12-377"); full_end_to_end_test::<_, _, Hyrax381, _>( Some(10), - rand_poly::, - rand_point::, + rand_poly, + rand_point, poseidon_sponge_for_test, ) .expect("test failed for bls12-381"); @@ -168,15 +162,15 @@ fn hyrax_full_end_to_end_test() { fn hyrax_single_equation_test() { single_equation_test::<_, _, Hyrax377, _>( Some(6), - rand_poly::, - rand_point::, + rand_poly, + rand_point, poseidon_sponge_for_test, ) .expect("test failed for bls12-377"); single_equation_test::<_, _, Hyrax381, _>( Some(6), - rand_poly::, - rand_point::, + rand_poly, + rand_point, poseidon_sponge_for_test, ) .expect("test failed for bls12-381"); @@ -186,15 +180,15 @@ fn hyrax_single_equation_test() { fn hyrax_two_equation_test() { two_equation_test::<_, _, Hyrax377, _>( Some(10), - rand_poly::, - rand_point::, + rand_poly, + rand_point, poseidon_sponge_for_test, ) .expect("test failed for bls12-377"); two_equation_test::<_, _, Hyrax381, _>( Some(10), - rand_poly::, - rand_point::, + rand_poly, + rand_point, poseidon_sponge_for_test, ) .expect("test failed for bls12-381"); @@ -204,15 +198,15 @@ fn hyrax_two_equation_test() { fn hyrax_full_end_to_end_equation_test() { full_end_to_end_equation_test::<_, _, Hyrax377, _>( Some(8), - rand_poly::, - rand_point::, + rand_poly, + rand_point, poseidon_sponge_for_test, ) .expect("test failed for bls12-377"); full_end_to_end_equation_test::<_, _, Hyrax381, _>( Some(8), - rand_poly::, - rand_point::, + rand_poly, + rand_point, poseidon_sponge_for_test, ) .expect("test failed for bls12-381"); diff --git a/poly-commit/src/ipa_pc/data_structures.rs b/poly-commit/src/ipa_pc/data_structures.rs index 7ba56c95..84fcb7f2 100644 --- a/poly-commit/src/ipa_pc/data_structures.rs +++ b/poly-commit/src/ipa_pc/data_structures.rs @@ -146,7 +146,8 @@ pub struct Randomness { pub shifted_rand: Option, } -impl PCRandomness for Randomness { +impl PCCommitmentState for Randomness { + type Randomness = Self; fn empty() -> Self { Self { rand: G::ScalarField::zero(), diff --git a/poly-commit/src/ipa_pc/mod.rs b/poly-commit/src/ipa_pc/mod.rs index 25752d78..43a40852 100644 --- a/poly-commit/src/ipa_pc/mod.rs +++ b/poly-commit/src/ipa_pc/mod.rs @@ -1,7 +1,7 @@ use crate::{BTreeMap, BTreeSet, String, ToString, Vec, CHALLENGE_SIZE}; use crate::{BatchLCProof, DenseUVPolynomial, Error, Evaluations, QuerySet}; use crate::{LabeledCommitment, LabeledPolynomial, LinearCombination}; -use crate::{PCCommitterKey, PCRandomness, PCUniversalParams, PolynomialCommitment}; +use crate::{PCCommitmentState, PCCommitterKey, PCUniversalParams, PolynomialCommitment}; use ark_ec::{AffineRepr, CurveGroup, VariableBaseMSM}; use ark_ff::{Field, One, PrimeField, UniformRand, Zero}; @@ -15,7 +15,6 @@ pub use data_structures::*; #[cfg(feature = "parallel")] use rayon::prelude::*; -use crate::challenge::ChallengeGenerator; use ark_crypto_primitives::sponge::CryptographicSponge; use digest::Digest; @@ -105,7 +104,7 @@ where point: G::ScalarField, values: impl IntoIterator, proof: &Proof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, ) -> Option> { let check_time = start_timer!(|| "Succinct checking"); @@ -117,7 +116,8 @@ where let mut combined_commitment_proj = G::Group::zero(); let mut combined_v = G::ScalarField::zero(); - let mut cur_challenge = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + let mut cur_challenge: G::ScalarField = + sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; let labeled_commitments = commitments.into_iter(); let values = values.into_iter(); @@ -126,7 +126,7 @@ where let commitment = labeled_commitment.commitment(); combined_v += &(cur_challenge * &value); combined_commitment_proj += &labeled_commitment.commitment().comm.mul(cur_challenge); - cur_challenge = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + cur_challenge = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; let degree_bound = labeled_commitment.degree_bound(); assert_eq!(degree_bound.is_some(), commitment.shifted_comm.is_some()); @@ -137,7 +137,7 @@ where combined_commitment_proj += &commitment.shifted_comm.unwrap().mul(cur_challenge); } - cur_challenge = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + cur_challenge = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; } let mut combined_commitment = combined_commitment_proj.into_affine(); @@ -347,7 +347,7 @@ where type CommitterKey = CommitterKey; type VerifierKey = VerifierKey; type Commitment = Commitment; - type Randomness = Randomness; + type CommitmentState = Randomness; type Proof = Proof; type BatchProof = Vec; type Error = Error; @@ -418,7 +418,7 @@ where ) -> Result< ( Vec>, - Vec, + Vec, ), Self::Error, > @@ -427,7 +427,7 @@ where { let rng = &mut crate::optional_rng::OptionalRng(rng); let mut comms = Vec::new(); - let mut rands = Vec::new(); + let mut states = Vec::new(); let commit_time = start_timer!(|| "Committing to polynomials"); for labeled_polynomial in polynomials { @@ -446,7 +446,7 @@ where hiding_bound, )); - let randomness = if let Some(h) = hiding_bound { + let state = if let Some(h) = hiding_bound { Randomness::rand(h, degree_bound.is_some(), None, rng) } else { Randomness::empty() @@ -456,7 +456,7 @@ where &ck.comm_key[..(polynomial.degree() + 1)], &polynomial.coeffs(), Some(ck.s), - Some(randomness.rand), + Some(state.rand), ) .into(); @@ -465,7 +465,7 @@ where &ck.comm_key[(ck.supported_degree() - d)..], &polynomial.coeffs(), Some(ck.s), - randomness.shifted_rand, + state.shifted_rand, ) .into() }); @@ -474,13 +474,13 @@ where let labeled_comm = LabeledCommitment::new(label.to_string(), commitment, degree_bound); comms.push(labeled_comm); - rands.push(randomness); + states.push(state); end_timer!(commit_time); } end_timer!(commit_time); - Ok((comms, rands)) + Ok((comms, states)) } fn open<'a>( @@ -488,13 +488,13 @@ where labeled_polynomials: impl IntoIterator>, commitments: impl IntoIterator>, point: &'a P::Point, - opening_challenges: &mut ChallengeGenerator, - rands: impl IntoIterator, + sponge: &mut S, + states: impl IntoIterator, rng: Option<&mut dyn RngCore>, ) -> Result where Self::Commitment: 'a, - Self::Randomness: 'a, + Self::CommitmentState: 'a, P: 'a, { let mut combined_polynomial = P::zero(); @@ -504,15 +504,15 @@ where let mut has_hiding = false; let polys_iter = labeled_polynomials.into_iter(); - let rands_iter = rands.into_iter(); + let states_iter = states.into_iter(); let comms_iter = commitments.into_iter(); let combine_time = start_timer!(|| "Combining polynomials, randomness, and commitments."); - let mut cur_challenge = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + let mut cur_challenge = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; - for (labeled_polynomial, (labeled_commitment, randomness)) in - polys_iter.zip(comms_iter.zip(rands_iter)) + for (labeled_polynomial, (labeled_commitment, state)) in + polys_iter.zip(comms_iter.zip(states_iter)) { let label = labeled_polynomial.label(); assert_eq!(labeled_polynomial.label(), labeled_commitment.label()); @@ -528,10 +528,10 @@ where if hiding_bound.is_some() { has_hiding = true; - combined_rand += &(cur_challenge * &randomness.rand); + combined_rand += &(cur_challenge * &state.rand); } - cur_challenge = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + cur_challenge = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; let has_degree_bound = degree_bound.is_some(); @@ -554,7 +554,7 @@ where combined_commitment_proj += &commitment.shifted_comm.unwrap().mul(cur_challenge); if hiding_bound.is_some() { - let shifted_rand = randomness.shifted_rand; + let shifted_rand = state.shifted_rand; assert!( shifted_rand.is_some(), "shifted_rand.is_none() for {}", @@ -564,7 +564,7 @@ where } } - cur_challenge = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + cur_challenge = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; } end_timer!(combine_time); @@ -739,7 +739,7 @@ where point: &'a P::Point, values: impl IntoIterator, proof: &Self::Proof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, _rng: Option<&mut dyn RngCore>, ) -> Result where @@ -762,8 +762,7 @@ where )); } - let check_poly = - Self::succinct_check(vk, commitments, *point, values, proof, opening_challenges); + let check_poly = Self::succinct_check(vk, commitments, *point, values, proof, sponge); if check_poly.is_none() { return Ok(false); @@ -790,7 +789,7 @@ where query_set: &QuerySet, values: &Evaluations, proof: &Self::BatchProof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, rng: &mut R, ) -> Result where @@ -833,14 +832,8 @@ where vals.push(*v_i); } - let check_poly = Self::succinct_check( - vk, - comms.into_iter(), - *point, - vals.into_iter(), - p, - opening_challenges, - ); + let check_poly = + Self::succinct_check(vk, comms.into_iter(), *point, vals.into_iter(), p, sponge); if check_poly.is_none() { return Ok(false); @@ -876,24 +869,24 @@ where polynomials: impl IntoIterator>, commitments: impl IntoIterator>, query_set: &QuerySet, - opening_challenges: &mut ChallengeGenerator, - rands: impl IntoIterator, + sponge: &mut S, + states: impl IntoIterator, rng: Option<&mut dyn RngCore>, ) -> Result, Self::Error> where - Self::Randomness: 'a, + Self::CommitmentState: 'a, Self::Commitment: 'a, P: 'a, { let label_poly_map = polynomials .into_iter() - .zip(rands) + .zip(states) .zip(commitments) - .map(|((p, r), c)| (p.label(), (p, r, c))) + .map(|((p, s), c)| (p.label(), (p, s, c))) .collect::>(); let mut lc_polynomials = Vec::new(); - let mut lc_randomness = Vec::new(); + let mut lc_states = Vec::new(); let mut lc_commitments = Vec::new(); let mut lc_info = Vec::new(); @@ -951,7 +944,7 @@ where let lc_poly = LabeledPolynomial::new(lc_label.clone(), poly, degree_bound, hiding_bound); lc_polynomials.push(lc_poly); - lc_randomness.push(Randomness { + lc_states.push(Randomness { rand: combined_rand, shifted_rand: combined_shifted_rand, }); @@ -971,8 +964,8 @@ where lc_polynomials.iter(), lc_commitments.iter(), &query_set, - opening_challenges, - lc_randomness.iter(), + sponge, + lc_states.iter(), rng, )?; Ok(BatchLCProof { proof, evals: None }) @@ -987,7 +980,7 @@ where eqn_query_set: &QuerySet, eqn_evaluations: &Evaluations, proof: &BatchLCProof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, rng: &mut R, ) -> Result where @@ -1060,7 +1053,7 @@ where &eqn_query_set, &evaluations, proof, - opening_challenges, + sponge, rng, ) } diff --git a/poly-commit/src/kzg10/data_structures.rs b/poly-commit/src/kzg10/data_structures.rs index 60626e70..d648f19f 100644 --- a/poly-commit/src/kzg10/data_structures.rs +++ b/poly-commit/src/kzg10/data_structures.rs @@ -420,7 +420,8 @@ impl> Randomness { } } -impl> PCRandomness for Randomness { +impl> PCCommitmentState for Randomness { + type Randomness = Self; fn empty() -> Self { Self { blinding_polynomial: P::zero(), diff --git a/poly-commit/src/kzg10/mod.rs b/poly-commit/src/kzg10/mod.rs index a6ea5752..508db2cb 100644 --- a/poly-commit/src/kzg10/mod.rs +++ b/poly-commit/src/kzg10/mod.rs @@ -5,10 +5,10 @@ //! proposed by Kate, Zaverucha, and Goldberg ([KZG10](http://cacr.uwaterloo.ca/techreports/2010/cacr2010-10.pdf)). //! This construction achieves extractability in the algebraic group model (AGM). -use crate::{BTreeMap, Error, LabeledPolynomial, PCRandomness, ToString, Vec}; +use crate::{BTreeMap, Error, LabeledPolynomial, PCCommitmentState, ToString, Vec}; use ark_ec::AffineRepr; use ark_ec::{pairing::Pairing, CurveGroup}; -use ark_ec::{scalar_mul::fixed_base::FixedBase, VariableBaseMSM}; +use ark_ec::{scalar_mul::ScalarMul, VariableBaseMSM}; use ark_ff::{One, PrimeField, UniformRand, Zero}; use ark_poly::DenseUVPolynomial; use ark_std::{format, marker::PhantomData, ops::Div, ops::Mul, vec}; @@ -66,36 +66,27 @@ where let gamma_g = E::G1::rand(rng); let h = E::G2::rand(rng); + // powers_of_beta = [1, b, ..., b^(max_degree + 1)], len = max_degree + 2 let mut powers_of_beta = vec![E::ScalarField::one()]; - let mut cur = beta; - for _ in 0..max_degree { + for _ in 0..=max_degree { powers_of_beta.push(cur); cur *= β } - let window_size = FixedBase::get_mul_window_size(max_degree + 1); - - let scalar_bits = E::ScalarField::MODULUS_BIT_SIZE as usize; let g_time = start_timer!(|| "Generating powers of G"); - let g_table = FixedBase::get_window_table(scalar_bits, window_size, g); - let powers_of_g = - FixedBase::msm::(scalar_bits, window_size, &g_table, &powers_of_beta); + let powers_of_g = g.batch_mul(&powers_of_beta[0..max_degree + 1]); end_timer!(g_time); - let gamma_g_time = start_timer!(|| "Generating powers of gamma * G"); - let gamma_g_table = FixedBase::get_window_table(scalar_bits, window_size, gamma_g); - let mut powers_of_gamma_g = - FixedBase::msm::(scalar_bits, window_size, &gamma_g_table, &powers_of_beta); - // Add an additional power of gamma_g, because we want to be able to support - // up to D queries. - powers_of_gamma_g.push(powers_of_gamma_g.last().unwrap().mul(&beta)); - end_timer!(gamma_g_time); - let powers_of_g = E::G1::normalize_batch(&powers_of_g); - let powers_of_gamma_g = E::G1::normalize_batch(&powers_of_gamma_g) + // Use the entire `powers_of_beta`, since we want to be able to support + // up to D queries. + let gamma_g_time = start_timer!(|| "Generating powers of gamma * G"); + let powers_of_gamma_g = gamma_g + .batch_mul(&powers_of_beta) .into_iter() .enumerate() .collect(); + end_timer!(gamma_g_time); let neg_powers_of_h_time = start_timer!(|| "Generating negative powers of h in G2"); let neg_powers_of_h = if produce_g2_powers { @@ -106,20 +97,10 @@ where cur /= β } - let neg_h_table = FixedBase::get_window_table(scalar_bits, window_size, h); - let neg_powers_of_h = FixedBase::msm::( - scalar_bits, - window_size, - &neg_h_table, - &neg_powers_of_beta, - ); - - let affines = E::G2::normalize_batch(&neg_powers_of_h); - let mut affines_map = BTreeMap::new(); - affines.into_iter().enumerate().for_each(|(i, a)| { - affines_map.insert(i, a); - }); - affines_map + h.batch_mul(&neg_powers_of_beta) + .into_iter() + .enumerate() + .collect() } else { BTreeMap::new() }; diff --git a/poly-commit/src/lib.rs b/poly-commit/src/lib.rs index 08f08b72..0e1587ee 100644 --- a/poly-commit/src/lib.rs +++ b/poly-commit/src/lib.rs @@ -9,7 +9,7 @@ #![deny(renamed_and_removed_lints, stable_features, unused_allocation)] #![deny(unused_comparisons, bare_trait_objects, unused_must_use)] #![forbid(unsafe_code)] -#![doc = include_str!("../README.md")] +#![doc = include_str!("../../README.md")] #[allow(unused)] #[macro_use] @@ -101,8 +101,6 @@ pub mod sonic_pc; /// [pcdas]: https://eprint.iacr.org/2020/499 pub mod ipa_pc; -/// Defines the challenge strategies and challenge generator. -pub mod challenge; /// A multilinear polynomial commitment scheme that converts n-variate multilinear polynomial into /// n quotient UV polynomial. This scheme is based on hardness of the discrete logarithm /// in prime-order groups. Construction is detailed in [[XZZPD19]][xzzpd19] and [[ZGKPP18]][zgkpp18] @@ -111,7 +109,6 @@ pub mod challenge; /// [zgkpp]: https://ieeexplore.ieee.org/document/8418645 pub mod multilinear_pc; -use crate::challenge::ChallengeGenerator; use ark_crypto_primitives::sponge::{CryptographicSponge, FieldElementSize}; /// Multivariate polynomial commitment based on the construction in /// [[PST13]][pst] with batching and (optional) hiding property inspired @@ -169,8 +166,11 @@ pub trait PolynomialCommitment, S: Cryptographic type VerifierKey: PCVerifierKey; /// The commitment to a polynomial. type Commitment: PCCommitment + Default; - /// The commitment randomness. - type Randomness: PCRandomness; + /// Auxiliary state of the commitment, output by the `commit` phase. + /// It contains information that can be reused by the committer + /// during the `open` phase, such as the commitment randomness. + /// Not to be shared with the verifier. + type CommitmentState: PCCommitmentState; /// The evaluation proof for a single point. type Proof: Clone; /// The evaluation proof for a query set. @@ -216,7 +216,7 @@ pub trait PolynomialCommitment, S: Cryptographic ) -> Result< ( Vec>, - Vec, + Vec, ), Self::Error, > @@ -229,13 +229,13 @@ pub trait PolynomialCommitment, S: Cryptographic labeled_polynomials: impl IntoIterator>, commitments: impl IntoIterator>, point: &'a P::Point, - challenge_generator: &mut ChallengeGenerator, - rands: impl IntoIterator, + sponge: &mut S, + states: impl IntoIterator, rng: Option<&mut dyn RngCore>, ) -> Result where P: 'a, - Self::Randomness: 'a, + Self::CommitmentState: 'a, Self::Commitment: 'a; /// check but with individual challenges @@ -245,7 +245,7 @@ pub trait PolynomialCommitment, S: Cryptographic point: &'a P::Point, values: impl IntoIterator, proof: &Self::Proof, - challenge_generator: &mut ChallengeGenerator, + sponge: &mut S, rng: Option<&mut dyn RngCore>, ) -> Result where @@ -265,13 +265,13 @@ pub trait PolynomialCommitment, S: Cryptographic labeled_polynomials: impl IntoIterator>, commitments: impl IntoIterator>, query_set: &QuerySet, - challenge_generator: &mut ChallengeGenerator, - rands: impl IntoIterator, + sponge: &mut S, + states: impl IntoIterator, rng: Option<&mut dyn RngCore>, ) -> Result where P: 'a, - Self::Randomness: 'a, + Self::CommitmentState: 'a, Self::Commitment: 'a, { // The default implementation achieves proceeds by rearranging the queries in @@ -279,16 +279,16 @@ pub trait PolynomialCommitment, S: Cryptographic // the same point, then opening their commitments simultaneously with a // single call to `open` (per point) let rng = &mut crate::optional_rng::OptionalRng(rng); - let poly_rand_comm: BTreeMap<_, _> = labeled_polynomials + let poly_st_comm: BTreeMap<_, _> = labeled_polynomials .into_iter() - .zip(rands) + .zip(states) .zip(commitments.into_iter()) - .map(|((poly, r), comm)| (poly.label(), (poly, r, comm))) + .map(|((poly, st), comm)| (poly.label(), (poly, st, comm))) .collect(); let open_time = start_timer!(|| format!( "Opening {} polynomials at query set of size {}", - poly_rand_comm.len(), + poly_st_comm.len(), query_set.len(), )); @@ -311,20 +311,20 @@ pub trait PolynomialCommitment, S: Cryptographic let mut proofs = Vec::new(); for (_point_label, (point, labels)) in query_to_labels_map.into_iter() { let mut query_polys: Vec<&'a LabeledPolynomial<_, _>> = Vec::new(); - let mut query_rands: Vec<&'a Self::Randomness> = Vec::new(); + let mut query_states: Vec<&'a Self::CommitmentState> = Vec::new(); let mut query_comms: Vec<&'a LabeledCommitment> = Vec::new(); // Constructing matching vectors with the polynomial, commitment // randomness and actual commitment for each polynomial being // queried at `point` for label in labels { - let (polynomial, rand, comm) = - poly_rand_comm.get(label).ok_or(Error::MissingPolynomial { + let (polynomial, state, comm) = + poly_st_comm.get(label).ok_or(Error::MissingPolynomial { label: label.to_string(), })?; query_polys.push(polynomial); - query_rands.push(rand); + query_states.push(state); query_comms.push(comm); } @@ -337,8 +337,8 @@ pub trait PolynomialCommitment, S: Cryptographic query_polys, query_comms, &point, - challenge_generator, - query_rands, + sponge, + query_states, Some(rng), )?; @@ -370,7 +370,7 @@ pub trait PolynomialCommitment, S: Cryptographic query_set: &QuerySet, evaluations: &Evaluations, proof: &Self::BatchProof, - challenge_generator: &mut ChallengeGenerator, + sponge: &mut S, rng: &mut R, ) -> Result where @@ -428,15 +428,7 @@ pub trait PolynomialCommitment, S: Cryptographic // Verify all proofs referring to the current point simultaneously // with a single call to `check` - result &= Self::check( - vk, - comms, - &point, - values, - &proof, - challenge_generator, - Some(rng), - )?; + result &= Self::check(vk, comms, &point, values, &proof, sponge, Some(rng))?; end_timer!(proof_time); } Ok(result) @@ -450,12 +442,12 @@ pub trait PolynomialCommitment, S: Cryptographic polynomials: impl IntoIterator>, commitments: impl IntoIterator>, query_set: &QuerySet, - challenge_generator: &mut ChallengeGenerator, - rands: impl IntoIterator, + sponge: &mut S, + states: impl IntoIterator, rng: Option<&mut dyn RngCore>, ) -> Result, Self::Error> where - Self::Randomness: 'a, + Self::CommitmentState: 'a, Self::Commitment: 'a, P: 'a, { @@ -476,8 +468,8 @@ pub trait PolynomialCommitment, S: Cryptographic polynomials, commitments, &poly_query_set, - challenge_generator, - rands, + sponge, + states, rng, )?; Ok(BatchLCProof { @@ -495,7 +487,7 @@ pub trait PolynomialCommitment, S: Cryptographic eqn_query_set: &QuerySet, eqn_evaluations: &Evaluations, proof: &BatchLCProof, - challenge_generator: &mut ChallengeGenerator, + sponge: &mut S, rng: &mut R, ) -> Result where @@ -566,7 +558,7 @@ pub trait PolynomialCommitment, S: Cryptographic &poly_query_set, &poly_evals, proof, - challenge_generator, + sponge, rng, )?; if !pc_result { @@ -678,88 +670,83 @@ pub mod tests { PC: PolynomialCommitment, S: CryptographicSponge, { - let challenge_generators = vec![ - ChallengeGenerator::new_multivariate(sponge()), - ChallengeGenerator::new_univariate(&mut sponge()), - ]; - - for challenge_gen in challenge_generators { - let rng = &mut ChaCha20Rng::from_rng(test_rng()).unwrap(); - let max_degree = 100; - let pp = PC::setup(max_degree, None, rng)?; - for _ in 0..10 { - let supported_degree = Uniform::from(1..=max_degree).sample(rng); - assert!( - max_degree >= supported_degree, - "max_degree < supported_degree" - ); - - let mut labels = Vec::new(); - let mut polynomials = Vec::new(); - let mut degree_bounds = Vec::new(); - - for i in 0..10 { - let label = format!("Test{}", i); - labels.push(label.clone()); - let degree_bound = 1usize; - let hiding_bound = Some(1); - degree_bounds.push(degree_bound); - - polynomials.push(LabeledPolynomial::new( - label, - rand_poly(supported_degree, None, rng), - Some(degree_bound), - hiding_bound, - )); - } + let sponge = sponge(); + + let rng = &mut ChaCha20Rng::from_rng(test_rng()).unwrap(); + let max_degree = 100; + let pp = PC::setup(max_degree, None, rng)?; + for _ in 0..10 { + let supported_degree = Uniform::from(1..=max_degree).sample(rng); + assert!( + max_degree >= supported_degree, + "max_degree < supported_degree" + ); + + let mut labels = Vec::new(); + let mut polynomials = Vec::new(); + let mut degree_bounds = Vec::new(); + + for i in 0..10 { + let label = format!("Test{}", i); + labels.push(label.clone()); + let degree_bound = 1usize; + let hiding_bound = Some(1); + degree_bounds.push(degree_bound); + + polynomials.push(LabeledPolynomial::new( + label, + rand_poly(supported_degree, None, rng), + Some(degree_bound), + hiding_bound, + )); + } - let supported_hiding_bound = polynomials - .iter() - .map(|p| p.hiding_bound().unwrap_or(0)) - .max() - .unwrap_or(0); - println!("supported degree: {:?}", supported_degree); - println!("supported hiding bound: {:?}", supported_hiding_bound); - let (ck, vk) = PC::trim( - &pp, - supported_degree, - supported_hiding_bound, - Some(degree_bounds.as_slice()), - )?; - println!("Trimmed"); + let supported_hiding_bound = polynomials + .iter() + .map(|p| p.hiding_bound().unwrap_or(0)) + .max() + .unwrap_or(0); + println!("supported degree: {:?}", supported_degree); + println!("supported hiding bound: {:?}", supported_hiding_bound); + let (ck, vk) = PC::trim( + &pp, + supported_degree, + supported_hiding_bound, + Some(degree_bounds.as_slice()), + )?; + println!("Trimmed"); - let (comms, rands) = PC::commit(&ck, &polynomials, Some(rng))?; + let (comms, rands) = PC::commit(&ck, &polynomials, Some(rng))?; - let mut query_set = QuerySet::new(); - let mut values = Evaluations::new(); - let point = rand_point(None, rng); - for (i, label) in labels.iter().enumerate() { - query_set.insert((label.clone(), (format!("{}", i), point.clone()))); - let value = polynomials[i].evaluate(&point); - values.insert((label.clone(), point.clone()), value); - } - println!("Generated query set"); - - let proof = PC::batch_open( - &ck, - &polynomials, - &comms, - &query_set, - &mut (challenge_gen.clone()), - &rands, - Some(rng), - )?; - let result = PC::batch_check( - &vk, - &comms, - &query_set, - &values, - &proof, - &mut (challenge_gen.clone()), - rng, - )?; - assert!(result, "proof was incorrect, Query set: {:#?}", query_set); + let mut query_set = QuerySet::new(); + let mut values = Evaluations::new(); + let point = rand_point(None, rng); + for (i, label) in labels.iter().enumerate() { + query_set.insert((label.clone(), (format!("{}", i), point.clone()))); + let value = polynomials[i].evaluate(&point); + values.insert((label.clone(), point.clone()), value); } + println!("Generated query set"); + + let proof = PC::batch_open( + &ck, + &polynomials, + &comms, + &query_set, + &mut (sponge.clone()), + &rands, + Some(rng), + )?; + let result = PC::batch_check( + &vk, + &comms, + &query_set, + &values, + &proof, + &mut (sponge.clone()), + rng, + )?; + assert!(result, "proof was incorrect, Query set: {:#?}", query_set); } Ok(()) @@ -786,127 +773,123 @@ pub mod tests { sponge, } = info; - let challenge_gens = vec![ - ChallengeGenerator::new_multivariate(sponge()), - ChallengeGenerator::new_univariate(&mut sponge()), - ]; + let sponge = sponge(); - for challenge_gen in challenge_gens { - let rng = &mut ChaCha20Rng::from_rng(test_rng()).unwrap(); - // If testing multivariate polynomials, make the max degree lower - let max_degree = match num_vars { - Some(_) => max_degree.unwrap_or(Uniform::from(2..=10).sample(rng)), - None => max_degree.unwrap_or(Uniform::from(2..=64).sample(rng)), + let rng = &mut ChaCha20Rng::from_rng(test_rng()).unwrap(); + // If testing multivariate polynomials, make the max degree lower + let max_degree = match num_vars { + Some(_) => max_degree.unwrap_or(Uniform::from(2..=10).sample(rng)), + None => max_degree.unwrap_or(Uniform::from(2..=64).sample(rng)), + }; + let pp = PC::setup(max_degree, num_vars, rng)?; + + for _ in 0..num_iters { + let supported_degree = + supported_degree.unwrap_or(Uniform::from(1..=max_degree).sample(rng)); + assert!( + max_degree >= supported_degree, + "max_degree < supported_degree" + ); + let mut polynomials: Vec> = Vec::new(); + let mut degree_bounds = if enforce_degree_bounds { + Some(Vec::new()) + } else { + None }; - let pp = PC::setup(max_degree, num_vars, rng)?; - - for _ in 0..num_iters { - let supported_degree = - supported_degree.unwrap_or(Uniform::from(1..=max_degree).sample(rng)); - assert!( - max_degree >= supported_degree, - "max_degree < supported_degree" - ); - let mut polynomials: Vec> = Vec::new(); - let mut degree_bounds = if enforce_degree_bounds { - Some(Vec::new()) + + let mut labels = Vec::new(); + println!("Sampled supported degree"); + + // Generate polynomials + let num_points_in_query_set = Uniform::from(1..=max_num_queries).sample(rng); + for i in 0..num_polynomials { + let label = format!("Test{}", i); + labels.push(label.clone()); + let degree = Uniform::from(1..=supported_degree).sample(rng); + let degree_bound = if let Some(degree_bounds) = &mut degree_bounds { + let range = Uniform::from(degree..=supported_degree); + let degree_bound = range.sample(rng); + degree_bounds.push(degree_bound); + Some(degree_bound) } else { None }; - let mut labels = Vec::new(); - println!("Sampled supported degree"); - - // Generate polynomials - let num_points_in_query_set = Uniform::from(1..=max_num_queries).sample(rng); - for i in 0..num_polynomials { - let label = format!("Test{}", i); - labels.push(label.clone()); - let degree = Uniform::from(1..=supported_degree).sample(rng); - let degree_bound = if let Some(degree_bounds) = &mut degree_bounds { - let range = Uniform::from(degree..=supported_degree); - let degree_bound = range.sample(rng); - degree_bounds.push(degree_bound); - Some(degree_bound) - } else { - None - }; - - let hiding_bound = if num_points_in_query_set >= degree { - Some(degree) - } else { - Some(num_points_in_query_set) - }; + let hiding_bound = if num_points_in_query_set >= degree { + Some(degree) + } else { + Some(num_points_in_query_set) + }; - polynomials.push(LabeledPolynomial::new( - label, - rand_poly(degree, num_vars, rng).into(), - degree_bound, - hiding_bound, - )) - } - let supported_hiding_bound = polynomials - .iter() - .map(|p| p.hiding_bound().unwrap_or(0)) - .max() - .unwrap_or(0); - println!("supported degree: {:?}", supported_degree); - println!("supported hiding bound: {:?}", supported_hiding_bound); - println!("num_points_in_query_set: {:?}", num_points_in_query_set); - let (ck, vk) = PC::trim( - &pp, - supported_degree, - supported_hiding_bound, - degree_bounds.as_ref().map(|s| s.as_slice()), - )?; - println!("Trimmed"); + polynomials.push(LabeledPolynomial::new( + label, + rand_poly(degree, num_vars, rng).into(), + degree_bound, + hiding_bound, + )) + } + let supported_hiding_bound = polynomials + .iter() + .map(|p| p.hiding_bound().unwrap_or(0)) + .max() + .unwrap_or(0); + println!("supported degree: {:?}", supported_degree); + println!("supported hiding bound: {:?}", supported_hiding_bound); + println!("num_points_in_query_set: {:?}", num_points_in_query_set); + let (ck, vk) = PC::trim( + &pp, + supported_degree, + supported_hiding_bound, + degree_bounds.as_ref().map(|s| s.as_slice()), + )?; + println!("Trimmed"); - let (comms, rands) = PC::commit(&ck, &polynomials, Some(rng))?; + let (comms, rands) = PC::commit(&ck, &polynomials, Some(rng))?; - // Construct query set - let mut query_set = QuerySet::new(); - let mut values = Evaluations::new(); - for _ in 0..num_points_in_query_set { - let point = rand_point(num_vars, rng); - for (i, label) in labels.iter().enumerate() { - query_set.insert((label.clone(), (format!("{}", i), point.clone()))); - let value = polynomials[i].evaluate(&point); - values.insert((label.clone(), point.clone()), value); - } + // Construct query set + let mut query_set = QuerySet::new(); + let mut values = Evaluations::new(); + for _ in 0..num_points_in_query_set { + let point = rand_point(num_vars, rng); + for (i, label) in labels.iter().enumerate() { + query_set.insert((label.clone(), (format!("{}", i), point.clone()))); + let value = polynomials[i].evaluate(&point); + values.insert((label.clone(), point.clone()), value); } - println!("Generated query set"); - - let proof = PC::batch_open( - &ck, - &polynomials, - &comms, - &query_set, - &mut (challenge_gen.clone()), - &rands, - Some(rng), - )?; - let result = PC::batch_check( - &vk, - &comms, - &query_set, - &values, - &proof, - &mut (challenge_gen.clone()), - rng, - )?; - if !result { - println!( - "Failed with {} polynomials, num_points_in_query_set: {:?}", - num_polynomials, num_points_in_query_set - ); - println!("Degree of polynomials:",); - for poly in polynomials { - println!("Degree: {:?}", poly.degree()); - } + } + println!("Generated query set"); + + let proof = PC::batch_open( + &ck, + &polynomials, + &comms, + &query_set, + &mut (sponge.clone()), + &rands, + Some(rng), + )?; + let result = PC::batch_check( + &vk, + &comms, + &query_set, + &values, + &proof, + &mut (sponge.clone()), + rng, + )?; + if !result { + println!( + "Failed with {} polynomials, num_points_in_query_set: {:?}", + num_polynomials, num_points_in_query_set + ); + println!("Degree of polynomials:",); + for poly in polynomials { + println!("Degree: {:?}", poly.degree()); } - assert!(result, "proof was incorrect, Query set: {:#?}", query_set); } + assert!(result, "proof was incorrect, Query set: {:#?}", query_set); } + Ok(()) } @@ -931,167 +914,163 @@ pub mod tests { sponge, } = info; - let challenge_gens = vec![ - ChallengeGenerator::new_multivariate(sponge()), - ChallengeGenerator::new_univariate(&mut sponge()), - ]; + let sponge = sponge(); - for challenge_gen in challenge_gens { - let rng = &mut ChaCha20Rng::from_rng(test_rng()).unwrap(); - // If testing multivariate polynomials, make the max degree lower - let max_degree = match num_vars { - Some(_) => max_degree.unwrap_or(Uniform::from(2..=10).sample(rng)), - None => max_degree.unwrap_or(Uniform::from(2..=64).sample(rng)), + let rng = &mut ChaCha20Rng::from_rng(test_rng()).unwrap(); + // If testing multivariate polynomials, make the max degree lower + let max_degree = match num_vars { + Some(_) => max_degree.unwrap_or(Uniform::from(2..=10).sample(rng)), + None => max_degree.unwrap_or(Uniform::from(2..=64).sample(rng)), + }; + let pp = PC::setup(max_degree, num_vars, rng)?; + + for _ in 0..num_iters { + let supported_degree = + supported_degree.unwrap_or(Uniform::from(1..=max_degree).sample(rng)); + assert!( + max_degree >= supported_degree, + "max_degree < supported_degree" + ); + let mut polynomials = Vec::new(); + let mut degree_bounds = if enforce_degree_bounds { + Some(Vec::new()) + } else { + None }; - let pp = PC::setup(max_degree, num_vars, rng)?; - - for _ in 0..num_iters { - let supported_degree = - supported_degree.unwrap_or(Uniform::from(1..=max_degree).sample(rng)); - assert!( - max_degree >= supported_degree, - "max_degree < supported_degree" - ); - let mut polynomials = Vec::new(); - let mut degree_bounds = if enforce_degree_bounds { - Some(Vec::new()) + + let mut labels = Vec::new(); + println!("Sampled supported degree"); + + // Generate polynomials + let num_points_in_query_set = Uniform::from(1..=max_num_queries).sample(rng); + for i in 0..num_polynomials { + let label = format!("Test{}", i); + labels.push(label.clone()); + let degree = Uniform::from(1..=supported_degree).sample(rng); + let degree_bound = if let Some(degree_bounds) = &mut degree_bounds { + if rng.gen() { + let range = Uniform::from(degree..=supported_degree); + let degree_bound = range.sample(rng); + degree_bounds.push(degree_bound); + Some(degree_bound) + } else { + None + } } else { None }; - let mut labels = Vec::new(); - println!("Sampled supported degree"); - - // Generate polynomials - let num_points_in_query_set = Uniform::from(1..=max_num_queries).sample(rng); - for i in 0..num_polynomials { - let label = format!("Test{}", i); - labels.push(label.clone()); - let degree = Uniform::from(1..=supported_degree).sample(rng); - let degree_bound = if let Some(degree_bounds) = &mut degree_bounds { - if rng.gen() { - let range = Uniform::from(degree..=supported_degree); - let degree_bound = range.sample(rng); - degree_bounds.push(degree_bound); - Some(degree_bound) + let hiding_bound = if num_points_in_query_set >= degree { + Some(degree) + } else { + Some(num_points_in_query_set) + }; + println!("Hiding bound: {:?}", hiding_bound); + + polynomials.push(LabeledPolynomial::new( + label, + rand_poly(degree, num_vars, rng), + degree_bound, + hiding_bound, + )) + } + println!("supported degree: {:?}", supported_degree); + println!("num_points_in_query_set: {:?}", num_points_in_query_set); + println!("{:?}", degree_bounds); + println!("{}", num_polynomials); + println!("{}", enforce_degree_bounds); + + let (ck, vk) = PC::trim( + &pp, + supported_degree, + supported_degree, + degree_bounds.as_ref().map(|s| s.as_slice()), + )?; + println!("Trimmed"); + + let (comms, rands) = PC::commit(&ck, &polynomials, Some(rng))?; + + // Let's construct our equations + let mut linear_combinations = Vec::new(); + let mut query_set = QuerySet::new(); + let mut values = Evaluations::new(); + for i in 0..num_points_in_query_set { + let point = rand_point(num_vars, rng); + for j in 0..num_equations.unwrap() { + let label = format!("query {} eqn {}", i, j); + let mut lc = LinearCombination::empty(label.clone()); + + let mut value = F::zero(); + let should_have_degree_bounds: bool = rng.gen(); + for (k, label) in labels.iter().enumerate() { + if should_have_degree_bounds { + value += &polynomials[k].evaluate(&point); + lc.push((F::one(), label.to_string().into())); + break; } else { - None - } - } else { - None - }; - - let hiding_bound = if num_points_in_query_set >= degree { - Some(degree) - } else { - Some(num_points_in_query_set) - }; - println!("Hiding bound: {:?}", hiding_bound); - - polynomials.push(LabeledPolynomial::new( - label, - rand_poly(degree, num_vars, rng), - degree_bound, - hiding_bound, - )) - } - println!("supported degree: {:?}", supported_degree); - println!("num_points_in_query_set: {:?}", num_points_in_query_set); - println!("{:?}", degree_bounds); - println!("{}", num_polynomials); - println!("{}", enforce_degree_bounds); - - let (ck, vk) = PC::trim( - &pp, - supported_degree, - supported_degree, - degree_bounds.as_ref().map(|s| s.as_slice()), - )?; - println!("Trimmed"); - - let (comms, rands) = PC::commit(&ck, &polynomials, Some(rng))?; - - // Let's construct our equations - let mut linear_combinations = Vec::new(); - let mut query_set = QuerySet::new(); - let mut values = Evaluations::new(); - for i in 0..num_points_in_query_set { - let point = rand_point(num_vars, rng); - for j in 0..num_equations.unwrap() { - let label = format!("query {} eqn {}", i, j); - let mut lc = LinearCombination::empty(label.clone()); - - let mut value = F::zero(); - let should_have_degree_bounds: bool = rng.gen(); - for (k, label) in labels.iter().enumerate() { - if should_have_degree_bounds { - value += &polynomials[k].evaluate(&point); - lc.push((F::one(), label.to_string().into())); - break; + let poly = &polynomials[k]; + if poly.degree_bound().is_some() { + continue; } else { - let poly = &polynomials[k]; - if poly.degree_bound().is_some() { - continue; - } else { - assert!(poly.degree_bound().is_none()); - let coeff = F::rand(rng); - value += &(coeff * poly.evaluate(&point)); - lc.push((coeff, label.to_string().into())); - } + assert!(poly.degree_bound().is_none()); + let coeff = F::rand(rng); + value += &(coeff * poly.evaluate(&point)); + lc.push((coeff, label.to_string().into())); } } - values.insert((label.clone(), point.clone()), value); - if !lc.is_empty() { - linear_combinations.push(lc); - // Insert query - query_set.insert((label.clone(), (format!("{}", i), point.clone()))); - } } - } - if linear_combinations.is_empty() { - continue; - } - println!("Generated query set"); - println!("Linear combinations: {:?}", linear_combinations); - - let proof = PC::open_combinations( - &ck, - &linear_combinations, - &polynomials, - &comms, - &query_set, - &mut (challenge_gen.clone()), - &rands, - Some(rng), - )?; - println!("Generated proof"); - let result = PC::check_combinations( - &vk, - &linear_combinations, - &comms, - &query_set, - &values, - &proof, - &mut (challenge_gen.clone()), - rng, - )?; - if !result { - println!( - "Failed with {} polynomials, num_points_in_query_set: {:?}", - num_polynomials, num_points_in_query_set - ); - println!("Degree of polynomials:",); - for poly in polynomials { - println!("Degree: {:?}", poly.degree()); + values.insert((label.clone(), point.clone()), value); + if !lc.is_empty() { + linear_combinations.push(lc); + // Insert query + query_set.insert((label.clone(), (format!("{}", i), point.clone()))); } } - assert!( - result, - "proof was incorrect, equations: {:#?}", - linear_combinations + } + if linear_combinations.is_empty() { + continue; + } + println!("Generated query set"); + println!("Linear combinations: {:?}", linear_combinations); + + let proof = PC::open_combinations( + &ck, + &linear_combinations, + &polynomials, + &comms, + &query_set, + &mut (sponge.clone()), + &rands, + Some(rng), + )?; + println!("Generated proof"); + let result = PC::check_combinations( + &vk, + &linear_combinations, + &comms, + &query_set, + &values, + &proof, + &mut (sponge.clone()), + rng, + )?; + if !result { + println!( + "Failed with {} polynomials, num_points_in_query_set: {:?}", + num_polynomials, num_points_in_query_set ); + println!("Degree of polynomials:",); + for poly in polynomials { + println!("Degree: {:?}", poly.degree()); + } } + assert!( + result, + "proof was incorrect, equations: {:#?}", + linear_combinations + ); } + Ok(()) } diff --git a/poly-commit/src/marlin/marlin_pc/data_structures.rs b/poly-commit/src/marlin/marlin_pc/data_structures.rs index 2b09e03a..203e3201 100644 --- a/poly-commit/src/marlin/marlin_pc/data_structures.rs +++ b/poly-commit/src/marlin/marlin_pc/data_structures.rs @@ -1,6 +1,6 @@ use crate::{ - DenseUVPolynomial, PCCommitment, PCCommitterKey, PCPreparedCommitment, PCPreparedVerifierKey, - PCRandomness, PCVerifierKey, Vec, + DenseUVPolynomial, PCCommitment, PCCommitmentState, PCCommitterKey, PCPreparedCommitment, + PCPreparedVerifierKey, PCVerifierKey, Vec, }; use ark_ec::pairing::Pairing; use ark_ec::AdditiveGroup; @@ -360,7 +360,8 @@ impl<'a, F: PrimeField, P: DenseUVPolynomial> AddAssign<(F, &'a Randomness> PCRandomness for Randomness { +impl> PCCommitmentState for Randomness { + type Randomness = Self; fn empty() -> Self { Self { rand: kzg10::Randomness::empty(), diff --git a/poly-commit/src/marlin/marlin_pc/mod.rs b/poly-commit/src/marlin/marlin_pc/mod.rs index 39c4e362..7fbfba07 100644 --- a/poly-commit/src/marlin/marlin_pc/mod.rs +++ b/poly-commit/src/marlin/marlin_pc/mod.rs @@ -2,7 +2,7 @@ use crate::{kzg10, marlin::Marlin, PCCommitterKey, CHALLENGE_SIZE}; use crate::{BTreeMap, BTreeSet, ToString, Vec}; use crate::{BatchLCProof, Error, Evaluations, QuerySet}; use crate::{LabeledCommitment, LabeledPolynomial, LinearCombination}; -use crate::{PCRandomness, PCUniversalParams, PolynomialCommitment}; +use crate::{PCCommitmentState, PCUniversalParams, PolynomialCommitment}; use ark_ec::pairing::Pairing; use ark_ec::AffineRepr; use ark_ec::CurveGroup; @@ -12,7 +12,6 @@ use ark_std::rand::RngCore; use ark_std::{marker::PhantomData, ops::Div, vec}; mod data_structures; -use crate::challenge::ChallengeGenerator; use ark_crypto_primitives::sponge::CryptographicSponge; pub use data_structures::*; @@ -66,7 +65,7 @@ where type CommitterKey = CommitterKey; type VerifierKey = VerifierKey; type Commitment = Commitment; - type Randomness = Randomness; + type CommitmentState = Randomness; type Proof = kzg10::Proof; type BatchProof = Vec; type Error = Error; @@ -180,7 +179,7 @@ where ) -> Result< ( Vec>, - Vec, + Vec, ), Self::Error, > @@ -191,7 +190,7 @@ where let commit_time = start_timer!(|| "Committing to polynomials"); let mut commitments = Vec::new(); - let mut randomness = Vec::new(); + let mut states = Vec::new(); for p in polynomials { let label = p.label(); @@ -232,17 +231,17 @@ where }; let comm = Commitment { comm, shifted_comm }; - let rand = Randomness { rand, shifted_rand }; + let state = Randomness { rand, shifted_rand }; commitments.push(LabeledCommitment::new( label.to_string(), comm, degree_bound, )); - randomness.push(rand); + states.push(state); end_timer!(commit_time); } end_timer!(commit_time); - Ok((commitments, randomness)) + Ok((commitments, states)) } /// On input a polynomial `p` and a point `point`, outputs a proof for the same. @@ -251,13 +250,13 @@ where labeled_polynomials: impl IntoIterator>, _commitments: impl IntoIterator>, point: &'a P::Point, - opening_challenges: &mut ChallengeGenerator, - rands: impl IntoIterator, + sponge: &mut S, + states: impl IntoIterator, _rng: Option<&mut dyn RngCore>, ) -> Result where P: 'a, - Self::Randomness: 'a, + Self::CommitmentState: 'a, Self::Commitment: 'a, { let mut p = P::zero(); @@ -267,7 +266,7 @@ where let mut shifted_r_witness = P::zero(); let mut enforce_degree_bound = false; - for (polynomial, rand) in labeled_polynomials.into_iter().zip(rands) { + for (polynomial, rand) in labeled_polynomials.into_iter().zip(states) { let degree_bound = polynomial.degree_bound(); assert_eq!(degree_bound.is_some(), rand.shifted_rand.is_some()); @@ -283,7 +282,7 @@ where )?; // compute next challenges challenge^j and challenge^{j+1}. - let challenge_j = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + let challenge_j = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; assert_eq!(degree_bound.is_some(), rand.shifted_rand.is_some()); @@ -299,7 +298,7 @@ where *point, &shifted_rand, )?; - let challenge_j_1 = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + let challenge_j_1 = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; let shifted_witness = shift_polynomial(ck, &witness, degree_bound); @@ -347,7 +346,7 @@ where point: &'a P::Point, values: impl IntoIterator, proof: &Self::Proof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, _rng: Option<&mut dyn RngCore>, ) -> Result where @@ -358,7 +357,7 @@ where Marlin::::accumulate_commitments_and_values( commitments, values, - opening_challenges, + sponge, Some(vk), )?; let combined_comm = kzg10::Commitment(combined_comm.into()); @@ -373,7 +372,7 @@ where query_set: &QuerySet, values: &Evaluations, proof: &Self::BatchProof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, rng: &mut R, ) -> Result where @@ -384,7 +383,7 @@ where commitments, query_set, values, - opening_challenges, + sponge, Some(vk), )?; assert_eq!(proof.len(), combined_queries.len()); @@ -407,13 +406,13 @@ where polynomials: impl IntoIterator>, commitments: impl IntoIterator>, query_set: &QuerySet, - opening_challenges: &mut ChallengeGenerator, - rands: impl IntoIterator, + sponge: &mut S, + states: impl IntoIterator, rng: Option<&mut dyn RngCore>, ) -> Result, Self::Error> where P: 'a, - Self::Randomness: 'a, + Self::CommitmentState: 'a, Self::Commitment: 'a, { Marlin::::open_combinations( @@ -422,8 +421,8 @@ where polynomials, commitments, query_set, - opening_challenges, - rands, + sponge, + states, rng, ) } @@ -437,7 +436,7 @@ where query_set: &QuerySet, evaluations: &Evaluations, proof: &BatchLCProof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, rng: &mut R, ) -> Result where @@ -450,7 +449,7 @@ where query_set, evaluations, proof, - opening_challenges, + sponge, rng, ) } @@ -462,19 +461,19 @@ where labeled_polynomials: impl IntoIterator>, commitments: impl IntoIterator>>, query_set: &QuerySet, - opening_challenges: &mut ChallengeGenerator, - rands: impl IntoIterator, + sponge: &mut S, + states: impl IntoIterator, rng: Option<&mut dyn RngCore>, ) -> Result>, Error> where P: 'a, - Self::Randomness: 'a, + Self::CommitmentState: 'a, Self::Commitment: 'a, { let rng = &mut crate::optional_rng::OptionalRng(rng); let poly_rand_comm: BTreeMap<_, _> = labeled_polynomials .into_iter() - .zip(rands) + .zip(states) .zip(commitments.into_iter()) .map(|((poly, r), comm)| (poly.label(), (poly, r, comm))) .collect(); @@ -497,7 +496,7 @@ where let mut proofs = Vec::new(); for (_point_label, (point, labels)) in query_to_labels_map.into_iter() { let mut query_polys: Vec<&'a LabeledPolynomial<_, _>> = Vec::new(); - let mut query_rands: Vec<&'a Self::Randomness> = Vec::new(); + let mut query_states: Vec<&'a Self::CommitmentState> = Vec::new(); let mut query_comms: Vec<&'a LabeledCommitment> = Vec::new(); for label in labels { @@ -507,7 +506,7 @@ where })?; query_polys.push(polynomial); - query_rands.push(rand); + query_states.push(rand); query_comms.push(comm); } @@ -517,8 +516,8 @@ where query_polys, query_comms, point, - opening_challenges, - query_rands, + sponge, + query_states, Some(rng), )?; diff --git a/poly-commit/src/marlin/marlin_pst13_pc/data_structures.rs b/poly-commit/src/marlin/marlin_pst13_pc/data_structures.rs index 8ccf300b..9cc8d73b 100644 --- a/poly-commit/src/marlin/marlin_pst13_pc/data_structures.rs +++ b/poly-commit/src/marlin/marlin_pst13_pc/data_structures.rs @@ -1,6 +1,6 @@ use crate::{BTreeMap, Vec}; use crate::{ - PCCommitterKey, PCPreparedVerifierKey, PCRandomness, PCUniversalParams, PCVerifierKey, + PCCommitmentState, PCCommitterKey, PCPreparedVerifierKey, PCUniversalParams, PCVerifierKey, }; use ark_ec::pairing::Pairing; use ark_poly::DenseMVPolynomial; @@ -362,12 +362,13 @@ where } } -impl PCRandomness for Randomness +impl PCCommitmentState for Randomness where E: Pairing, P: DenseMVPolynomial, P::Point: Index, { + type Randomness = Self; fn empty() -> Self { Self { blinding_polynomial: P::zero(), diff --git a/poly-commit/src/marlin/marlin_pst13_pc/mod.rs b/poly-commit/src/marlin/marlin_pst13_pc/mod.rs index ac47c2a7..eee026d7 100644 --- a/poly-commit/src/marlin/marlin_pst13_pc/mod.rs +++ b/poly-commit/src/marlin/marlin_pst13_pc/mod.rs @@ -5,10 +5,14 @@ use crate::{ }; use crate::{BatchLCProof, Error, Evaluations, QuerySet}; use crate::{LabeledCommitment, LabeledPolynomial, LinearCombination}; -use crate::{PCRandomness, PCUniversalParams, PolynomialCommitment}; +use crate::{PCCommitmentState, PCUniversalParams, PolynomialCommitment}; use crate::{ToString, Vec}; use ark_ec::AffineRepr; -use ark_ec::{pairing::Pairing, scalar_mul::fixed_base::FixedBase, CurveGroup, VariableBaseMSM}; +use ark_ec::{ + pairing::Pairing, + scalar_mul::{BatchMulPreprocessing, ScalarMul}, + CurveGroup, VariableBaseMSM, +}; use ark_ff::{One, PrimeField, UniformRand, Zero}; use ark_poly::{multivariate::Term, DenseMVPolynomial}; use ark_std::rand::RngCore; @@ -20,7 +24,6 @@ pub use data_structures::*; mod combinations; use combinations::*; -use crate::challenge::ChallengeGenerator; use ark_crypto_primitives::sponge::CryptographicSponge; #[cfg(feature = "parallel")] use rayon::prelude::*; @@ -151,7 +154,7 @@ where type CommitterKey = CommitterKey; type VerifierKey = VerifierKey; type Commitment = marlin_pc::Commitment; - type Randomness = Randomness; + type CommitmentState = Randomness; type Proof = Proof; type BatchProof = Vec; type Error = Error; @@ -211,47 +214,33 @@ where }) .unzip(); - let scalar_bits = E::ScalarField::MODULUS_BIT_SIZE as usize; let g_time = start_timer!(|| "Generating powers of G"); - let window_size = FixedBase::get_mul_window_size(max_degree + 1); - let g_table = FixedBase::get_window_table(scalar_bits, window_size, g); - let mut powers_of_g = - FixedBase::msm::(scalar_bits, window_size, &g_table, &powers_of_beta); - powers_of_g.push(g); + let mut powers_of_g = g.batch_mul(&powers_of_beta); + powers_of_g.push(g.into_affine()); powers_of_beta_terms.push(P::Term::new(vec![])); end_timer!(g_time); let gamma_g_time = start_timer!(|| "Generating powers of gamma * G"); - let window_size = FixedBase::get_mul_window_size(max_degree + 2); - let gamma_g_table = FixedBase::get_window_table(scalar_bits, window_size, gamma_g); // Each element `i` of `powers_of_gamma_g` is a vector of length `max_degree+1` // containing `betas[i]^j \gamma G` for `j` from 1 to `max_degree+1` to support // up to `max_degree` queries let mut powers_of_gamma_g = vec![Vec::new(); num_vars]; + let gamma_g_table = BatchMulPreprocessing::new(gamma_g, max_degree + 1); + ark_std::cfg_iter_mut!(powers_of_gamma_g) .enumerate() .for_each(|(i, v)| { - let mut powers_of_beta = Vec::with_capacity(max_degree); + let mut powers_of_beta = Vec::with_capacity(max_degree + 1); let mut cur = E::ScalarField::one(); for _ in 0..=max_degree { cur *= &betas[i]; powers_of_beta.push(cur); } - *v = FixedBase::msm::( - scalar_bits, - window_size, - &gamma_g_table, - &powers_of_beta, - ); + *v = gamma_g_table.batch_mul(&powers_of_beta); }); end_timer!(gamma_g_time); - let powers_of_g = E::G1::normalize_batch(&powers_of_g); let gamma_g = gamma_g.into_affine(); - let powers_of_gamma_g = powers_of_gamma_g - .into_iter() - .map(|v| E::G1::normalize_batch(&v)) - .collect(); let beta_h: Vec<_> = betas.iter().map(|b| h.mul(b).into_affine()).collect(); let h = h.into_affine(); let prepared_h = h.into(); @@ -343,7 +332,7 @@ where ) -> Result< ( Vec>, - Vec, + Vec, ), Self::Error, > @@ -440,26 +429,26 @@ where labeled_polynomials: impl IntoIterator>, _commitments: impl IntoIterator>, point: &P::Point, - opening_challenges: &mut ChallengeGenerator, - rands: impl IntoIterator, + sponge: &mut S, + states: impl IntoIterator, _rng: Option<&mut dyn RngCore>, ) -> Result where P: 'a, - Self::Randomness: 'a, + Self::CommitmentState: 'a, Self::Commitment: 'a, { // Compute random linear combinations of committed polynomials and randomness let mut p = P::zero(); let mut r = Randomness::empty(); - for (polynomial, rand) in labeled_polynomials.into_iter().zip(rands) { + for (polynomial, state) in labeled_polynomials.into_iter().zip(states) { Self::check_degrees_and_bounds(ck.supported_degree, &polynomial)?; // compute challenge^j and challenge^{j+1}. - let challenge_j = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + let challenge_j = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; p += (challenge_j, polynomial.polynomial()); - r += (challenge_j, rand); + r += (challenge_j, state); } let open_time = start_timer!(|| format!("Opening polynomial of degree {}", p.degree())); @@ -538,7 +527,7 @@ where point: &'a P::Point, values: impl IntoIterator, proof: &Self::Proof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, _rng: Option<&mut dyn RngCore>, ) -> Result where @@ -550,7 +539,7 @@ where Marlin::::accumulate_commitments_and_values( commitments, values, - opening_challenges, + sponge, None, )?; // Compute both sides of the pairing equation @@ -582,7 +571,7 @@ where query_set: &QuerySet, values: &Evaluations, proof: &Self::BatchProof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, rng: &mut R, ) -> Result where @@ -593,7 +582,7 @@ where commitments, query_set, values, - opening_challenges, + sponge, None, )?; let check_time = @@ -660,13 +649,13 @@ where polynomials: impl IntoIterator>, commitments: impl IntoIterator>, query_set: &QuerySet, - opening_challenges: &mut ChallengeGenerator, - rands: impl IntoIterator, + sponge: &mut S, + states: impl IntoIterator, rng: Option<&mut dyn RngCore>, ) -> Result, Self::Error> where P: 'a, - Self::Randomness: 'a, + Self::CommitmentState: 'a, Self::Commitment: 'a, { Marlin::::open_combinations( @@ -675,8 +664,8 @@ where polynomials, commitments, query_set, - opening_challenges, - rands, + sponge, + states, rng, ) } @@ -690,7 +679,7 @@ where eqn_query_set: &QuerySet, eqn_evaluations: &Evaluations, proof: &BatchLCProof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, rng: &mut R, ) -> Result where @@ -703,7 +692,7 @@ where eqn_query_set, eqn_evaluations, proof, - opening_challenges, + sponge, rng, ) } diff --git a/poly-commit/src/marlin/mod.rs b/poly-commit/src/marlin/mod.rs index 4bd4fe27..d7e7f5a1 100644 --- a/poly-commit/src/marlin/mod.rs +++ b/poly-commit/src/marlin/mod.rs @@ -1,9 +1,9 @@ -use crate::{challenge::ChallengeGenerator, CHALLENGE_SIZE}; +use crate::CHALLENGE_SIZE; use crate::{kzg10, Error}; use crate::{BTreeMap, BTreeSet, Debug, RngCore, String, ToString, Vec}; use crate::{BatchLCProof, LabeledPolynomial, LinearCombination}; use crate::{Evaluations, LabeledCommitment, QuerySet}; -use crate::{PCRandomness, Polynomial, PolynomialCommitment}; +use crate::{PCCommitmentState, Polynomial, PolynomialCommitment}; use ark_crypto_primitives::sponge::CryptographicSponge; use ark_ec::pairing::Pairing; use ark_ec::AffineRepr; @@ -110,7 +110,7 @@ where fn accumulate_commitments_and_values<'a>( commitments: impl IntoIterator>>, values: impl IntoIterator, - challenge_gen: &mut ChallengeGenerator, + sponge: &mut S, vk: Option<&marlin_pc::VerifierKey>, ) -> Result<(E::G1, E::ScalarField), Error> { let acc_time = start_timer!(|| "Accumulating commitments and values"); @@ -121,13 +121,14 @@ where let commitment = labeled_commitment.commitment(); assert_eq!(degree_bound.is_some(), commitment.shifted_comm.is_some()); - let challenge_i = challenge_gen.try_next_challenge_of_size(CHALLENGE_SIZE); + let challenge_i = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; combined_comm += &commitment.comm.0.mul(challenge_i); combined_value += &(value * &challenge_i); if let Some(degree_bound) = degree_bound { - let challenge_i_1 = challenge_gen.try_next_challenge_of_size(CHALLENGE_SIZE); + let challenge_i_1: E::ScalarField = + sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; let shifted_comm = commitment.shifted_comm.as_ref().unwrap().0.into_group(); @@ -152,7 +153,7 @@ where commitments: impl IntoIterator>>, query_set: &QuerySet, evaluations: &Evaluations, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, vk: Option<&marlin_pc::VerifierKey>, ) -> Result<(Vec>, Vec, Vec), Error> where @@ -199,7 +200,7 @@ where let (c, v) = Self::accumulate_commitments_and_values( comms_to_combine, values_to_combine, - opening_challenges, + sponge, vk, )?; end_timer!(lc_time); @@ -227,8 +228,8 @@ where polynomials: impl IntoIterator>, commitments: impl IntoIterator>, query_set: &QuerySet, - opening_challenges: &mut ChallengeGenerator, - rands: impl IntoIterator, + sponge: &mut S, + states: impl IntoIterator, rng: Option<&mut dyn RngCore>, ) -> Result, Error> where @@ -241,18 +242,18 @@ where Commitment = marlin_pc::Commitment, Error = Error, >, - PC::Randomness: 'a + AddAssign<(E::ScalarField, &'a PC::Randomness)>, + PC::CommitmentState: 'a + AddAssign<(E::ScalarField, &'a PC::CommitmentState)>, PC::Commitment: 'a, { let label_map = polynomials .into_iter() - .zip(rands) + .zip(states) .zip(commitments) .map(|((p, r), c)| (p.label(), (p, r, c))) .collect::>(); let mut lc_polynomials = Vec::new(); - let mut lc_randomness = Vec::new(); + let mut lc_states: Vec = Vec::new(); let mut lc_commitments = Vec::new(); let mut lc_info = Vec::new(); @@ -262,13 +263,13 @@ where let mut degree_bound = None; let mut hiding_bound = None; - let mut randomness = PC::Randomness::empty(); + let mut randomness = PC::CommitmentState::empty(); let mut coeffs_and_comms = Vec::new(); let num_polys = lc.len(); for (coeff, label) in lc.iter().filter(|(_, l)| !l.is_one()) { let label: &String = label.try_into().expect("cannot be one!"); - let &(cur_poly, cur_rand, cur_comm) = + let &(cur_poly, cur_state, cur_comm) = label_map.get(label).ok_or(Error::MissingPolynomial { label: label.to_string(), })?; @@ -284,14 +285,14 @@ where // Some(_) > None, always. hiding_bound = core::cmp::max(hiding_bound, cur_poly.hiding_bound()); poly += (*coeff, cur_poly.polynomial()); - randomness += (*coeff, cur_rand); + randomness += (*coeff, cur_state); coeffs_and_comms.push((*coeff, cur_comm.commitment())); } let lc_poly = LabeledPolynomial::new(lc_label.clone(), poly, degree_bound, hiding_bound); lc_polynomials.push(lc_poly); - lc_randomness.push(randomness); + lc_states.push(randomness); lc_commitments.push(Self::combine_commitments(coeffs_and_comms)); lc_info.push((lc_label, degree_bound)); } @@ -308,8 +309,8 @@ where lc_polynomials.iter(), lc_commitments.iter(), &query_set, - opening_challenges, - lc_randomness.iter(), + sponge, + lc_states.iter(), rng, )?; @@ -323,7 +324,7 @@ where query_set: &QuerySet, evaluations: &Evaluations, proof: &BatchLCProof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, rng: &mut R, ) -> Result where @@ -404,7 +405,7 @@ where &query_set, &evaluations, proof, - opening_challenges, + sponge, rng, ) } diff --git a/poly-commit/src/multilinear_pc/mod.rs b/poly-commit/src/multilinear_pc/mod.rs index eff86ab9..0973e822 100644 --- a/poly-commit/src/multilinear_pc/mod.rs +++ b/poly-commit/src/multilinear_pc/mod.rs @@ -1,9 +1,10 @@ use crate::multilinear_pc::data_structures::{ Commitment, CommitterKey, Proof, UniversalParams, VerifierKey, }; +use ark_ec::scalar_mul::BatchMulPreprocessing; use ark_ec::AffineRepr; use ark_ec::{pairing::Pairing, CurveGroup}; -use ark_ec::{scalar_mul::fixed_base::FixedBase, VariableBaseMSM}; +use ark_ec::{scalar_mul::ScalarMul, VariableBaseMSM}; use ark_ff::{Field, PrimeField}; use ark_ff::{One, Zero}; use ark_poly::{DenseMultilinearExtension, MultilinearExtension}; @@ -27,14 +28,11 @@ impl MultilinearPC { /// setup pub fn setup(num_vars: usize, rng: &mut R) -> UniversalParams { assert!(num_vars > 0, "constant polynomial not supported"); - let g: E::G1 = E::G1::rand(rng); - let h: E::G2 = E::G2::rand(rng); - let g = g.into_affine(); - let h = h.into_affine(); + let g = E::G1::rand(rng); + let h = E::G2::rand(rng); let mut powers_of_g = Vec::new(); let mut powers_of_h = Vec::new(); let t: Vec<_> = (0..num_vars).map(|_| E::ScalarField::rand(rng)).collect(); - let scalar_bits = E::ScalarField::MODULUS_BIT_SIZE as usize; let mut eq: LinkedList> = LinkedList::from_iter(eq_extension(&t).into_iter()); @@ -54,29 +52,15 @@ impl MultilinearPC { } let mut pp_powers = Vec::new(); - let mut total_scalars = 0; for i in 0..num_vars { let eq = eq_arr.pop_front().unwrap(); let pp_k_powers = (0..(1 << (num_vars - i))).map(|x| eq[x]); pp_powers.extend(pp_k_powers); - total_scalars += 1 << (num_vars - i); } - let window_size = FixedBase::get_mul_window_size(total_scalars); - let g_table = FixedBase::get_window_table(scalar_bits, window_size, g.into_group()); - let h_table = FixedBase::get_window_table(scalar_bits, window_size, h.into_group()); - - let pp_g = E::G1::normalize_batch(&FixedBase::msm( - scalar_bits, - window_size, - &g_table, - &pp_powers, - )); - let pp_h = E::G2::normalize_batch(&FixedBase::msm( - scalar_bits, - window_size, - &h_table, - &pp_powers, - )); + + let g_table = BatchMulPreprocessing::new(g, num_vars); + let pp_g = g_table.batch_mul(&pp_powers); + let pp_h = h.batch_mul(&pp_powers); let mut start = 0; for i in 0..num_vars { let size = 1 << (num_vars - i); @@ -89,18 +73,14 @@ impl MultilinearPC { // uncomment to measure the time for calculating vp // let vp_generation_timer = start_timer!(|| "VP generation"); - let g_mask = { - let window_size = FixedBase::get_mul_window_size(num_vars); - let g_table = FixedBase::get_window_table(scalar_bits, window_size, g.into_group()); - E::G1::normalize_batch(&FixedBase::msm(scalar_bits, window_size, &g_table, &t)) - }; + let g_mask = g_table.batch_mul(&t); // end_timer!(vp_generation_timer); UniversalParams { num_vars, - g, + g: g.into_affine(), g_mask, - h, + h: h.into_affine(), powers_of_g, powers_of_h, } @@ -199,11 +179,7 @@ impl MultilinearPC { ) -> bool { let left = E::pairing(commitment.g_product.into_group() - &vk.g.mul(value), vk.h); - let scalar_size = E::ScalarField::MODULUS_BIT_SIZE as usize; - let window_size = FixedBase::get_mul_window_size(vk.nv); - - let g_table = FixedBase::get_window_table(scalar_size, window_size, vk.g.into_group()); - let g_mul: Vec = FixedBase::msm(scalar_size, window_size, &g_table, point); + let g_mul = vk.g.into_group().batch_mul(point); let pairing_lefts: Vec<_> = (0..vk.nv) .map(|i| vk.g_mask_random[i].into_group() - &g_mul[i]) diff --git a/poly-commit/src/sonic_pc/mod.rs b/poly-commit/src/sonic_pc/mod.rs index b989b323..caf9b79c 100644 --- a/poly-commit/src/sonic_pc/mod.rs +++ b/poly-commit/src/sonic_pc/mod.rs @@ -2,7 +2,7 @@ use crate::{kzg10, PCCommitterKey, CHALLENGE_SIZE}; use crate::{BTreeMap, BTreeSet, String, ToString, Vec}; use crate::{BatchLCProof, DenseUVPolynomial, Error, Evaluations, QuerySet}; use crate::{LabeledCommitment, LabeledPolynomial, LinearCombination}; -use crate::{PCRandomness, PCUniversalParams, PolynomialCommitment}; +use crate::{PCCommitmentState, PCUniversalParams, PolynomialCommitment}; use ark_ec::AffineRepr; use ark_ec::CurveGroup; @@ -12,7 +12,6 @@ use ark_std::rand::RngCore; use ark_std::{convert::TryInto, marker::PhantomData, ops::Div, ops::Mul, vec}; mod data_structures; -use crate::challenge::ChallengeGenerator; use ark_crypto_primitives::sponge::CryptographicSponge; pub use data_structures::*; @@ -47,12 +46,12 @@ where point: P::Point, values: impl IntoIterator, proof: &kzg10::Proof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, randomizer: Option, ) { let acc_time = start_timer!(|| "Accumulating elements"); - let mut curr_challenge = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + let mut curr_challenge = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; // Keeps track of running combination of values let mut combined_values = E::ScalarField::zero(); @@ -73,7 +72,7 @@ where // Accumulate values in the BTreeMap *combined_comms.entry(degree_bound).or_insert(E::G1::zero()) += &comm_with_challenge; - curr_challenge = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + curr_challenge = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; } // Push expected results into list of elems. Power will be the negative of the expected power @@ -146,7 +145,7 @@ where type CommitterKey = CommitterKey; type VerifierKey = VerifierKey; type Commitment = Commitment; - type Randomness = Randomness; + type CommitmentState = Randomness; type Proof = kzg10::Proof; type BatchProof = Vec; type Error = Error; @@ -281,7 +280,7 @@ where ) -> Result< ( Vec>, - Vec, + Vec, ), Self::Error, > @@ -291,7 +290,7 @@ where let rng = &mut crate::optional_rng::OptionalRng(rng); let commit_time = start_timer!(|| "Committing to polynomials"); let mut labeled_comms: Vec> = Vec::new(); - let mut randomness: Vec = Vec::new(); + let mut randomness: Vec = Vec::new(); for labeled_polynomial in polynomials { let enforced_degree_bounds: Option<&[usize]> = ck @@ -345,21 +344,21 @@ where labeled_polynomials: impl IntoIterator>, _commitments: impl IntoIterator>, point: &'a P::Point, - opening_challenges: &mut ChallengeGenerator, - rands: impl IntoIterator, + sponge: &mut S, + states: impl IntoIterator, _rng: Option<&mut dyn RngCore>, ) -> Result where - Self::Randomness: 'a, + Self::CommitmentState: 'a, Self::Commitment: 'a, P: 'a, { let mut combined_polynomial = P::zero(); let mut combined_rand = kzg10::Randomness::empty(); - let mut curr_challenge = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + let mut curr_challenge = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; - for (polynomial, rand) in labeled_polynomials.into_iter().zip(rands) { + for (polynomial, state) in labeled_polynomials.into_iter().zip(states) { let enforced_degree_bounds: Option<&[usize]> = ck .enforced_degree_bounds .as_ref() @@ -373,8 +372,8 @@ where )?; combined_polynomial += (curr_challenge, polynomial.polynomial()); - combined_rand += (curr_challenge, rand); - curr_challenge = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + combined_rand += (curr_challenge, state); + curr_challenge = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; } let proof_time = start_timer!(|| "Creating proof for polynomials"); @@ -390,7 +389,7 @@ where point: &'a P::Point, values: impl IntoIterator, proof: &Self::Proof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, _rng: Option<&mut dyn RngCore>, ) -> Result where @@ -410,7 +409,7 @@ where *point, values, proof, - opening_challenges, + sponge, None, ); @@ -430,7 +429,7 @@ where query_set: &QuerySet, values: &Evaluations, proof: &Self::BatchProof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, rng: &mut R, ) -> Result where @@ -481,7 +480,7 @@ where *point, values_to_combine.into_iter(), p, - opening_challenges, + sponge, Some(randomizer), ); @@ -502,24 +501,24 @@ where polynomials: impl IntoIterator>, commitments: impl IntoIterator>, query_set: &QuerySet, - opening_challenges: &mut ChallengeGenerator, - rands: impl IntoIterator, + sponge: &mut S, + states: impl IntoIterator, rng: Option<&mut dyn RngCore>, ) -> Result, Self::Error> where - Self::Randomness: 'a, + Self::CommitmentState: 'a, Self::Commitment: 'a, P: 'a, { let label_map = polynomials .into_iter() - .zip(rands) + .zip(states) .zip(commitments) - .map(|((p, r), c)| (p.label(), (p, r, c))) + .map(|((p, s), c)| (p.label(), (p, s, c))) .collect::>(); let mut lc_polynomials = Vec::new(); - let mut lc_randomness = Vec::new(); + let mut lc_states = Vec::new(); let mut lc_commitments = Vec::new(); let mut lc_info = Vec::new(); @@ -528,13 +527,13 @@ where let mut poly = P::zero(); let mut degree_bound = None; let mut hiding_bound = None; - let mut randomness = Self::Randomness::empty(); + let mut state = Self::CommitmentState::empty(); let mut comm = E::G1::zero(); let num_polys = lc.len(); for (coeff, label) in lc.iter().filter(|(_, l)| !l.is_one()) { let label: &String = label.try_into().expect("cannot be one!"); - let &(cur_poly, cur_rand, curr_comm) = + let &(cur_poly, cur_state, curr_comm) = label_map.get(label).ok_or(Error::MissingPolynomial { label: label.to_string(), })?; @@ -553,14 +552,14 @@ where // Some(_) > None, always. hiding_bound = core::cmp::max(hiding_bound, cur_poly.hiding_bound()); poly += (*coeff, cur_poly.polynomial()); - randomness += (*coeff, cur_rand); + state += (*coeff, cur_state); comm += &curr_comm.commitment().0.mul(*coeff); } let lc_poly = LabeledPolynomial::new(lc_label.clone(), poly, degree_bound, hiding_bound); lc_polynomials.push(lc_poly); - lc_randomness.push(randomness); + lc_states.push(state); lc_commitments.push(comm); lc_info.push((lc_label, degree_bound)); } @@ -581,8 +580,8 @@ where lc_polynomials.iter(), lc_commitments.iter(), &query_set, - opening_challenges, - lc_randomness.iter(), + sponge, + lc_states.iter(), rng, )?; Ok(BatchLCProof { proof, evals: None }) @@ -597,7 +596,7 @@ where eqn_query_set: &QuerySet, eqn_evaluations: &Evaluations, proof: &BatchLCProof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, rng: &mut R, ) -> Result where @@ -666,7 +665,7 @@ where &eqn_query_set, &evaluations, proof, - opening_challenges, + sponge, rng, ) } diff --git a/poly-commit/src/streaming_kzg/data_structures.rs b/poly-commit/src/streaming_kzg/data_structures.rs index 7adaf005..c8b19c83 100644 --- a/poly-commit/src/streaming_kzg/data_structures.rs +++ b/poly-commit/src/streaming_kzg/data_structures.rs @@ -141,7 +141,7 @@ where /// Stream implementation of foleded polynomial. #[derive(Clone, Copy)] -pub struct FoldedPolynomialStream<'a, F, S>(FoldedPolynomialTree<'a, F, S>, usize); +pub struct FoldedPolynomialStream<'a, F, S>(FoldedPolynomialTree<'a, F, S>); /// Iterator implementation of foleded polynomial. pub struct FoldedPolynomialStreamIter<'a, F, I> { challenges: &'a [F], @@ -158,8 +158,7 @@ where /// Initialize a new folded polynomial stream. pub fn new(coefficients: &'a S, challenges: &'a [F]) -> Self { let tree = FoldedPolynomialTree::new(coefficients, challenges); - let len = challenges.len(); - Self(tree, len) + Self(tree) } } @@ -241,7 +240,7 @@ fn test_folded_polynomial() { let challenges = vec![F::one(), two]; let coefficients_stream = coefficients.as_slice(); let foldstream = FoldedPolynomialTree::new(&coefficients_stream, challenges.as_slice()); - let fold_stream = FoldedPolynomialStream(foldstream, 2); + let fold_stream = FoldedPolynomialStream(foldstream); assert_eq!(fold_stream.len(), 1); assert_eq!( fold_stream.iter().next(), @@ -253,7 +252,7 @@ fn test_folded_polynomial() { let challenges = vec![F::one(); 4]; let coefficients_stream = coefficients.as_slice(); let foldstream = FoldedPolynomialTree::new(&coefficients_stream, challenges.as_slice()); - let fold_stream = FoldedPolynomialStream(foldstream, 4).iter(); + let fold_stream = FoldedPolynomialStream(foldstream).iter(); assert_eq!(fold_stream.last(), Some(coefficients.iter().sum())); } diff --git a/poly-commit/src/streaming_kzg/time.rs b/poly-commit/src/streaming_kzg/time.rs index 8c7fa2f8..b8d52093 100644 --- a/poly-commit/src/streaming_kzg/time.rs +++ b/poly-commit/src/streaming_kzg/time.rs @@ -1,9 +1,9 @@ //! An impementation of a time-efficient version of Kate et al's polynomial commitment, //! with optimization from [\[BDFG20\]](https://eprint.iacr.org/2020/081.pdf). use ark_ec::pairing::Pairing; -use ark_ec::scalar_mul::fixed_base::FixedBase; +use ark_ec::scalar_mul::ScalarMul; use ark_ec::CurveGroup; -use ark_ff::{PrimeField, Zero}; +use ark_ff::Zero; use ark_poly::{univariate::DensePolynomial, DenseUVPolynomial}; use ark_std::{borrow::Borrow, ops::Div, ops::Mul, rand::RngCore, vec::Vec, UniformRand}; @@ -50,11 +50,7 @@ impl CommitterKey { let powers_of_tau = powers(tau, max_degree + 1); let g = E::G1::rand(rng); - let window_size = FixedBase::get_mul_window_size(max_degree + 1); - let scalar_bits = E::ScalarField::MODULUS_BIT_SIZE as usize; - let g_table = FixedBase::get_window_table(scalar_bits, window_size, g); - let powers_of_g_proj = FixedBase::msm(scalar_bits, window_size, &g_table, &powers_of_tau); - let powers_of_g = E::G1::normalize_batch(&powers_of_g_proj); + let powers_of_g = g.batch_mul(&powers_of_tau); let g2 = E::G2::rand(rng).into_affine(); let powers_of_g2 = powers_of_tau diff --git a/poly-commit/src/utils.rs b/poly-commit/src/utils.rs index 7c4a0575..f06ebf96 100644 --- a/poly-commit/src/utils.rs +++ b/poly-commit/src/utils.rs @@ -1,4 +1,6 @@ -use core::marker::PhantomData; +use ark_ff::Field; +use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; +use ark_std::vec::Vec; #[cfg(feature = "parallel")] use rayon::{ @@ -6,13 +8,6 @@ use rayon::{ prelude::IndexedParallelIterator, }; -use ark_ff::{Field, PrimeField}; -use ark_serialize::CanonicalSerialize; -use ark_std::vec::Vec; -use merlin::Transcript; - -use crate::Error; - /// Takes as input a struct, and converts them to a series of bytes. All traits /// that implement `CanonicalSerialize` can be automatically converted to bytes /// in this manner. @@ -31,7 +26,8 @@ pub(crate) fn ceil_div(x: usize, y: usize) -> usize { (x + y - 1) / y } -#[derive(Debug)] +#[derive(Derivative, CanonicalSerialize, CanonicalDeserialize)] +#[derivative(Default(bound = ""), Clone(bound = ""), Debug(bound = ""))] pub(crate) struct Matrix { pub(crate) n: usize, pub(crate) m: usize, @@ -108,69 +104,11 @@ pub(crate) fn vector_sum(v1: &[F], v2: &[F]) -> Vec { .collect() } -/// The following struct is taken from jellyfish repository. Once they change -/// their dependency on `crypto-primitive`, we use their crate instead of -/// a copy-paste. We needed the newer `crypto-primitive` for serializing. -#[derive(Clone)] -pub(crate) struct IOPTranscript { - transcript: Transcript, - is_empty: bool, - #[doc(hidden)] - phantom: PhantomData, -} - -// TODO: merge this with jf_plonk::transcript -impl IOPTranscript { - /// Create a new IOP transcript. - pub(crate) fn new(label: &'static [u8]) -> Self { - Self { - transcript: Transcript::new(label), - is_empty: true, - phantom: PhantomData, - } - } - - /// Append the message to the transcript. - pub(crate) fn append_message(&mut self, label: &'static [u8], msg: &[u8]) -> Result<(), Error> { - self.transcript.append_message(label, msg); - self.is_empty = false; - Ok(()) - } - - /// Append the message to the transcript. - pub(crate) fn append_serializable_element( - &mut self, - label: &'static [u8], - group_elem: &S, - ) -> Result<(), Error> { - self.append_message( - label, - &to_bytes!(group_elem).map_err(|_| Error::TranscriptError)?, - ) - } - - /// Generate the challenge from the current transcript - /// and append it to the transcript. - /// - /// The output field element is statistical uniform as long - /// as the field has a size less than 2^384. - pub(crate) fn get_and_append_challenge(&mut self, label: &'static [u8]) -> Result { - // we need to reject when transcript is empty - if self.is_empty { - return Err(Error::TranscriptError); - } - - let mut buf = [0u8; 64]; - self.transcript.challenge_bytes(label, &mut buf); - let challenge = F::from_le_bytes_mod_order(&buf); - self.append_serializable_element(label, &challenge)?; - Ok(challenge) - } -} - // TODO: replace by https://github.com/arkworks-rs/crypto-primitives/issues/112. #[cfg(test)] use ark_crypto_primitives::sponge::poseidon::PoseidonSponge; +#[cfg(test)] +use ark_ff::PrimeField; #[cfg(test)] pub(crate) fn test_sponge() -> PoseidonSponge { From 694fa47e8932b9e508e2678b381d689c8058e8bc Mon Sep 17 00:00:00 2001 From: Hossein Moghaddas Date: Thu, 18 Jan 2024 14:08:01 +0100 Subject: [PATCH 45/75] Delete `merlin` from dependencies --- poly-commit/Cargo.toml | 1 - 1 file changed, 1 deletion(-) diff --git a/poly-commit/Cargo.toml b/poly-commit/Cargo.toml index 16bfd95a..d7257277 100644 --- a/poly-commit/Cargo.toml +++ b/poly-commit/Cargo.toml @@ -23,7 +23,6 @@ hashbrown = { version = "0.14", default-features = false, optional = true } digest = "0.10" derivative = { version = "2", features = [ "use_core" ] } rayon = { version = "1", optional = true } -merlin = { version = "3.0.0", default-features = false } [[bench]] name = "ipa_times" From 044d74a85791919c1e49f7e812de046bf2555507 Mon Sep 17 00:00:00 2001 From: Hossein Moghaddas Date: Thu, 18 Jan 2024 14:19:52 +0100 Subject: [PATCH 46/75] Delete `IOPTranscript`, update with master (#51) (aka Ligero++) * Add the trait bounds * Add `CommitmentState` * Update benches for the new type * Fix the name of local variable * Merge `PCCommitmentState` with `PCRandomness` * Update `README.md` * Fix a bug * Simplify `hash_column` * Delete comments * Add `CommitmentState` * Make `fmt` happy * Refactor, remove `hash_columns` * Rename all params * Maybe `empty` not return `Self` * Make `empty` return `Self` * Rename `rand` to `state` * Add type `Randomness` * Ligero+++ (#46) * conversion to `into_iter` is a no-op * remove explicit casts to vecs * rename to use singular of `labeled_commitment` * simplify the iterators even further by zipping two iters * Apply suggestions from code review * Fix tests: sponge config for univariate ligero * Rename nonnative to emulated, as in `r1cs-std` (#137) * Rename nonnative to emulated, as in `r1cs-std` * Run `fmt` * Temporarily change `Cargo.toml` * Revert `Cargo.toml` * Refactor `FoldedPolynomialStream` partially * Substitute `ChallengeGenerator` by the generic sponge (#139) * Rename nonnative to emulated, as in `r1cs-std` * Run `fmt` * Temporarily change `Cargo.toml` * Substitute `ChallengeGenerator` with the generic sponge * Run `fmt` * Remove the extra file * Update modules * Delete the unnecessary loop * Revert `Cargo.toml` * Refactor `FoldedPolynomialStream` partially * Update README * Make the diff more readable * Bring the whitespace back * Make diff more readable, 2 * Fix according to breaking changes in `ark-ec` (#141) * Fix for KZG10 * Fix the breaking changes in `ark-ec` * Remove the extra loop * Fix the loop range * re-use the preprocessing table * also re-use the preprocessing table for multilinear_pc --------- Co-authored-by: mmagician * Auxiliary opening data (#134) * Add the trait bounds * Add `CommitmentState` * Update benches for the new type * Fix the name of local variable * Merge `PCCommitmentState` with `PCRandomness` * Update `README.md` * Fix a bug * Put `Randomness` in `CommitmentState` * Add a comment * Remove the extra loop * Update the comment for `CommitmentState` Co-authored-by: Marcin * cargo fmt --------- Co-authored-by: Marcin * `batch_mul_with_preprocessing` no longer takes `self` as argument (#142) * batch_mul_with_preprocessing no longer takes `self` as argument * Apply suggestions from code review Co-authored-by: Pratyush Mishra * fix variable name --------- Co-authored-by: Pratyush Mishra * Remove `ChallengeGenerator` and `IOPTranscript` for Ligero (#57) * Squash and merge `delete-chalgen` onto here * Fix Ligero for `ChallengeGenerator` and `AsRef` for Merkle tree * Fix tests: sponge config for univariate ligero * Delete `IOPTranscript` for Ligero (#54) * Replace the `IOPTranscript` with `CryptographicSponge` * Delete extra comments * Run fmt * Fix tests: sponge config for univariate ligero * Delete TODOs and do not absorb what you just squeezed * Fix unused import * Revert "Fix unused import" This reverts commit e85af9086180b486b71fc41add00be88ffaf147f. * Try to fix * Remove the extra loop --------- Co-authored-by: mmagician Co-authored-by: Pratyush Mishra --- README.md | 16 +- bench-templates/src/lib.rs | 25 +- poly-commit/src/challenge.rs | 61 -- poly-commit/src/constraints.rs | 20 +- poly-commit/src/data_structures.rs | 12 +- poly-commit/src/ipa_pc/data_structures.rs | 3 +- poly-commit/src/ipa_pc/mod.rs | 89 +-- poly-commit/src/kzg10/data_structures.rs | 3 +- poly-commit/src/kzg10/mod.rs | 49 +- poly-commit/src/lib.rs | 723 +++++++++--------- .../src/linear_codes/data_structures.rs | 27 +- poly-commit/src/linear_codes/ligero.rs | 18 +- poly-commit/src/linear_codes/mod.rs | 288 +++---- .../linear_codes/multilinear_ligero/mod.rs | 8 +- .../linear_codes/multilinear_ligero/tests.rs | 28 +- .../src/linear_codes/univariate_ligero/mod.rs | 8 +- .../linear_codes/univariate_ligero/tests.rs | 27 +- poly-commit/src/linear_codes/utils.rs | 31 +- .../src/marlin/marlin_pc/data_structures.rs | 7 +- poly-commit/src/marlin/marlin_pc/mod.rs | 65 +- .../marlin/marlin_pst13_pc/data_structures.rs | 5 +- poly-commit/src/marlin/marlin_pst13_pc/mod.rs | 73 +- poly-commit/src/marlin/mod.rs | 41 +- poly-commit/src/multilinear_pc/mod.rs | 48 +- poly-commit/src/sonic_pc/mod.rs | 65 +- .../src/streaming_kzg/data_structures.rs | 9 +- poly-commit/src/streaming_kzg/time.rs | 10 +- poly-commit/src/utils.rs | 96 +-- 28 files changed, 776 insertions(+), 1079 deletions(-) delete mode 100644 poly-commit/src/challenge.rs diff --git a/README.md b/README.md index 518d63bd..e9c1e50e 100644 --- a/README.md +++ b/README.md @@ -57,7 +57,7 @@ This trait defines the interface for a polynomial commitment scheme. It is recom // In this example, we will commit to a single polynomial, open it first at one point, and then batched at two points, and finally verify the proofs. // We will use the KZG10 polynomial commitment scheme, following the approach from Marlin. -use ark_poly_commit::{Polynomial, marlin_pc::MarlinKZG10, LabeledPolynomial, PolynomialCommitment, QuerySet, Evaluations, challenge::ChallengeGenerator}; +use ark_poly_commit::{Polynomial, marlin_pc::MarlinKZG10, LabeledPolynomial, PolynomialCommitment, QuerySet, Evaluations}; use ark_bls12_377::Bls12_377; use ark_crypto_primitives::sponge::poseidon::{PoseidonSponge, PoseidonConfig}; use ark_crypto_primitives::sponge::CryptographicSponge; @@ -128,17 +128,15 @@ let (ck, vk) = PCS::trim(&pp, degree, 2, Some(&[degree])).unwrap(); // 3. PolynomialCommitment::commit // The prover commits to the polynomial using their committer key `ck`. -let (comms, rands) = PCS::commit(&ck, [&labeled_poly], Some(rng)).unwrap(); - -let challenge_generator: ChallengeGenerator<::ScalarField, Sponge_Bls12_377> = ChallengeGenerator::new_univariate(&mut test_sponge); +let (comms, states) = PCS::commit(&ck, [&labeled_poly], Some(rng)).unwrap(); // 4a. PolynomialCommitment::open // Opening proof at a single point. -let proof_single = PCS::open(&ck, [&labeled_poly], &comms, &point_1, &mut (challenge_generator.clone()), &rands, None).unwrap(); +let proof_single = PCS::open(&ck, [&labeled_poly], &comms, &point_1, &mut (test_sponge.clone()), &states, None).unwrap(); // 5a. PolynomialCommitment::check // Verifying the proof at a single point, given the commitment, the point, the claimed evaluation, and the proof. -assert!(PCS::check(&vk, &comms, &point_1, [secret_poly.evaluate(&point_1)], &proof_single, &mut (challenge_generator.clone()), Some(rng)).unwrap()); +assert!(PCS::check(&vk, &comms, &point_1, [secret_poly.evaluate(&point_1)], &proof_single, &mut (test_sponge.clone()), Some(rng)).unwrap()); let mut query_set = QuerySet::new(); let mut values = Evaluations::new(); @@ -155,8 +153,8 @@ let proof_batched = PCS::batch_open( [&labeled_poly], &comms, &query_set, - &mut (challenge_generator.clone()), - &rands, + &mut (test_sponge.clone()), + &states, Some(rng), ).unwrap(); @@ -167,7 +165,7 @@ assert!(PCS::batch_check( &query_set, &values, &proof_batched, - &mut (challenge_generator.clone()), + &mut (test_sponge.clone()), rng, ).unwrap()); ``` diff --git a/bench-templates/src/lib.rs b/bench-templates/src/lib.rs index 9500eb9c..1bc1fdbf 100644 --- a/bench-templates/src/lib.rs +++ b/bench-templates/src/lib.rs @@ -17,9 +17,7 @@ use rand_chacha::{ use core::time::Duration; use std::{borrow::Borrow, marker::PhantomData, time::Instant}; -use ark_poly_commit::{ - challenge::ChallengeGenerator, to_bytes, LabeledPolynomial, PolynomialCommitment, -}; +use ark_poly_commit::{to_bytes, LabeledPolynomial, PolynomialCommitment}; pub use criterion::*; pub use paste::paste; @@ -131,7 +129,7 @@ where let labeled_poly = LabeledPolynomial::new("test".to_string(), rand_poly(num_vars, rng), None, None); - let (coms, randomness) = PCS::commit(&ck, [&labeled_poly], Some(rng)).unwrap(); + let (coms, states) = PCS::commit(&ck, [&labeled_poly], Some(rng)).unwrap(); let point = rand_point(num_vars, rng); let start = Instant::now(); @@ -140,8 +138,8 @@ where [&labeled_poly], &coms, &point, - &mut ChallengeGenerator::new_univariate(&mut test_sponge()), - &randomness, + &mut test_sponge(), + &states, Some(rng), ) .unwrap(); @@ -165,7 +163,7 @@ where let labeled_poly = LabeledPolynomial::new("test".to_string(), rand_poly(num_vars, rng), None, None); - let (coms, randomness) = PCS::commit(&ck, [&labeled_poly], Some(rng)).unwrap(); + let (coms, states) = PCS::commit(&ck, [&labeled_poly], Some(rng)).unwrap(); let point = P::Point::rand(rng); let proofs = PCS::open( @@ -173,8 +171,8 @@ where [&labeled_poly], &coms, &point, - &mut ChallengeGenerator::new_univariate(&mut test_sponge()), - &randomness, + &mut test_sponge(), + &states, Some(rng), ) .unwrap(); @@ -202,16 +200,17 @@ where let labeled_poly = LabeledPolynomial::new("test".to_string(), rand_poly(num_vars, rng), None, None); - let (coms, randomness) = PCS::commit(&ck, [&labeled_poly], Some(rng)).unwrap(); + let (coms, states) = PCS::commit(&ck, [&labeled_poly], Some(rng)).unwrap(); let point = rand_point(num_vars, rng); + let claimed_eval = labeled_poly.evaluate(&point); let proof = PCS::open( &ck, [&labeled_poly], &coms, &point, - &mut ChallengeGenerator::new_univariate(&mut test_sponge()), - &randomness, + &mut test_sponge(), + &states, Some(rng), ) .unwrap(); @@ -223,7 +222,7 @@ where &point, [claimed_eval], &proof, - &mut ChallengeGenerator::new_univariate(&mut test_sponge()), + &mut test_sponge(), None, ) .unwrap(); diff --git a/poly-commit/src/challenge.rs b/poly-commit/src/challenge.rs deleted file mode 100644 index 23b3c9d1..00000000 --- a/poly-commit/src/challenge.rs +++ /dev/null @@ -1,61 +0,0 @@ -use ark_crypto_primitives::sponge::{CryptographicSponge, FieldElementSize}; -use ark_ff::PrimeField; - -/// `ChallengeGenerator` generates opening challenges using multivariate or univariate strategy. -/// For multivariate strategy, each challenge is freshly squeezed from a sponge. -/// For univariate strategy, each challenge is a power of one squeezed element from sponge. -/// -/// Note that mutable reference cannot be cloned. -#[derive(Clone)] -pub enum ChallengeGenerator { - /// Each challenge is freshly squeezed from a sponge. - Multivariate(S), - /// Each challenge is a power of one squeezed element from sponge. - /// - /// `Univariate(generator, next_element)` - Univariate(F, F), -} - -impl ChallengeGenerator { - /// Returns a challenge generator with multivariate strategy. Each challenge is freshly squeezed - /// from a sponge. - pub fn new_multivariate(sponge: S) -> Self { - Self::Multivariate(sponge) - } - - /// Returns a challenge generator with univariate strategy. Each challenge is a power of one - /// squeezed element from sponge. - pub fn new_univariate(sponge: &mut S) -> Self { - let gen = sponge.squeeze_field_elements(1)[0]; - Self::Univariate(gen, gen) - } - - /// Returns a challenge of size `size`. - /// * If `self == Self::Multivariate(...)`, then this squeezes out a challenge of size `size`. - /// * If `self == Self::Univariate(...)`, then this ignores the `size` argument and simply squeezes out - /// the next field element. - pub fn try_next_challenge_of_size(&mut self, size: FieldElementSize) -> F { - match self { - // multivariate (full) - Self::Multivariate(sponge) => sponge.squeeze_field_elements_with_sizes(&[size])[0], - // univariate - Self::Univariate(gen, next) => { - let result = next.clone(); - *next *= *gen; - result - } - } - } - /// Returns the next challenge generated. - pub fn next_challenge(&mut self) -> F { - self.try_next_challenge_of_size(FieldElementSize::Full) - } - - /// Returns the sponge state if `self` is multivariate. Returns `None` otherwise. - pub fn into_sponge(self) -> Option { - match self { - Self::Multivariate(s) => Some(s), - _ => None, - } - } -} diff --git a/poly-commit/src/constraints.rs b/poly-commit/src/constraints.rs index e6fb5d4f..1300509a 100644 --- a/poly-commit/src/constraints.rs +++ b/poly-commit/src/constraints.rs @@ -5,7 +5,7 @@ use crate::{ use ark_crypto_primitives::sponge::CryptographicSponge; use ark_ff::PrimeField; use ark_poly::Polynomial; -use ark_r1cs_std::fields::nonnative::NonNativeFieldVar; +use ark_r1cs_std::fields::emulated_fp::EmulatedFpVar; use ark_r1cs_std::{fields::fp::FpVar, prelude::*}; use ark_relations::r1cs::{ConstraintSystemRef, Namespace, Result as R1CSResult, SynthesisError}; use ark_std::{borrow::Borrow, cmp::Eq, cmp::PartialEq, hash::Hash, marker::Sized}; @@ -24,8 +24,8 @@ pub enum LinearCombinationCoeffVar), + /// Other coefficient, represented as a "emulated" field element. + Var(EmulatedFpVar), } /// An allocated version of `LinearCombination`. @@ -60,7 +60,7 @@ impl let (f, lc_term) = term; let fg = - NonNativeFieldVar::new_variable(ark_relations::ns!(cs, "term"), || Ok(f), mode) + EmulatedFpVar::new_variable(ark_relations::ns!(cs, "term"), || Ok(f), mode) .unwrap(); (LinearCombinationCoeffVar::Var(fg), lc_term.clone()) @@ -79,12 +79,12 @@ impl pub struct PCCheckRandomDataVar { /// Opening challenges. /// The prover and the verifier MUST use the same opening challenges. - pub opening_challenges: Vec>, + pub opening_challenges: Vec>, /// Bit representations of the opening challenges. pub opening_challenges_bits: Vec>>, /// Batching random numbers. /// The verifier can choose these numbers freely, as long as they are random. - pub batching_rands: Vec>, + pub batching_rands: Vec>, /// Bit representations of the batching random numbers. pub batching_rands_bits: Vec>>, } @@ -172,7 +172,7 @@ pub struct LabeledPointVar { /// MUST be a unique identifier in a query set. pub name: String, /// The point value. - pub value: NonNativeFieldVar, + pub value: EmulatedFpVar, } /// An allocated version of `QuerySet`. @@ -184,7 +184,7 @@ pub struct QuerySetVar( /// An allocated version of `Evaluations`. #[derive(Clone)] pub struct EvaluationsVar( - pub HashMap, NonNativeFieldVar>, + pub HashMap, EmulatedFpVar>, ); impl EvaluationsVar { @@ -192,8 +192,8 @@ impl EvaluationsVar, - ) -> Result, SynthesisError> { + point: &EmulatedFpVar, + ) -> Result, SynthesisError> { let key = LabeledPointVar:: { name: String::from(lc_string), value: point.clone(), diff --git a/poly-commit/src/data_structures.rs b/poly-commit/src/data_structures.rs index 4a5eec21..2b942ee1 100644 --- a/poly-commit/src/data_structures.rs +++ b/poly-commit/src/data_structures.rs @@ -70,9 +70,12 @@ pub trait PCPreparedCommitment: Clone { fn prepare(comm: &UNPREPARED) -> Self; } -/// Defines the minimal interface of commitment randomness for any polynomial -/// commitment scheme. -pub trait PCRandomness: Clone + CanonicalSerialize + CanonicalDeserialize { +/// Defines the minimal interface of commitment state for any polynomial +/// commitment scheme. It might be randomness etc. +pub trait PCCommitmentState: Clone + CanonicalSerialize + CanonicalDeserialize { + /// This is the type of `Randomness` that the `rand` method returns + type Randomness: Clone + CanonicalSerialize + CanonicalDeserialize; + /// Outputs empty randomness that does not hide the commitment. fn empty() -> Self; @@ -86,9 +89,8 @@ pub trait PCRandomness: Clone + CanonicalSerialize + CanonicalDeserialize { has_degree_bound: bool, num_vars: Option, rng: &mut R, - ) -> Self; + ) -> Self::Randomness; } - /// A proof of satisfaction of linear combinations. #[derive(Clone, CanonicalSerialize, CanonicalDeserialize)] pub struct BatchLCProof { diff --git a/poly-commit/src/ipa_pc/data_structures.rs b/poly-commit/src/ipa_pc/data_structures.rs index 7ba56c95..84fcb7f2 100644 --- a/poly-commit/src/ipa_pc/data_structures.rs +++ b/poly-commit/src/ipa_pc/data_structures.rs @@ -146,7 +146,8 @@ pub struct Randomness { pub shifted_rand: Option, } -impl PCRandomness for Randomness { +impl PCCommitmentState for Randomness { + type Randomness = Self; fn empty() -> Self { Self { rand: G::ScalarField::zero(), diff --git a/poly-commit/src/ipa_pc/mod.rs b/poly-commit/src/ipa_pc/mod.rs index 25752d78..43a40852 100644 --- a/poly-commit/src/ipa_pc/mod.rs +++ b/poly-commit/src/ipa_pc/mod.rs @@ -1,7 +1,7 @@ use crate::{BTreeMap, BTreeSet, String, ToString, Vec, CHALLENGE_SIZE}; use crate::{BatchLCProof, DenseUVPolynomial, Error, Evaluations, QuerySet}; use crate::{LabeledCommitment, LabeledPolynomial, LinearCombination}; -use crate::{PCCommitterKey, PCRandomness, PCUniversalParams, PolynomialCommitment}; +use crate::{PCCommitmentState, PCCommitterKey, PCUniversalParams, PolynomialCommitment}; use ark_ec::{AffineRepr, CurveGroup, VariableBaseMSM}; use ark_ff::{Field, One, PrimeField, UniformRand, Zero}; @@ -15,7 +15,6 @@ pub use data_structures::*; #[cfg(feature = "parallel")] use rayon::prelude::*; -use crate::challenge::ChallengeGenerator; use ark_crypto_primitives::sponge::CryptographicSponge; use digest::Digest; @@ -105,7 +104,7 @@ where point: G::ScalarField, values: impl IntoIterator, proof: &Proof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, ) -> Option> { let check_time = start_timer!(|| "Succinct checking"); @@ -117,7 +116,8 @@ where let mut combined_commitment_proj = G::Group::zero(); let mut combined_v = G::ScalarField::zero(); - let mut cur_challenge = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + let mut cur_challenge: G::ScalarField = + sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; let labeled_commitments = commitments.into_iter(); let values = values.into_iter(); @@ -126,7 +126,7 @@ where let commitment = labeled_commitment.commitment(); combined_v += &(cur_challenge * &value); combined_commitment_proj += &labeled_commitment.commitment().comm.mul(cur_challenge); - cur_challenge = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + cur_challenge = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; let degree_bound = labeled_commitment.degree_bound(); assert_eq!(degree_bound.is_some(), commitment.shifted_comm.is_some()); @@ -137,7 +137,7 @@ where combined_commitment_proj += &commitment.shifted_comm.unwrap().mul(cur_challenge); } - cur_challenge = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + cur_challenge = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; } let mut combined_commitment = combined_commitment_proj.into_affine(); @@ -347,7 +347,7 @@ where type CommitterKey = CommitterKey; type VerifierKey = VerifierKey; type Commitment = Commitment; - type Randomness = Randomness; + type CommitmentState = Randomness; type Proof = Proof; type BatchProof = Vec; type Error = Error; @@ -418,7 +418,7 @@ where ) -> Result< ( Vec>, - Vec, + Vec, ), Self::Error, > @@ -427,7 +427,7 @@ where { let rng = &mut crate::optional_rng::OptionalRng(rng); let mut comms = Vec::new(); - let mut rands = Vec::new(); + let mut states = Vec::new(); let commit_time = start_timer!(|| "Committing to polynomials"); for labeled_polynomial in polynomials { @@ -446,7 +446,7 @@ where hiding_bound, )); - let randomness = if let Some(h) = hiding_bound { + let state = if let Some(h) = hiding_bound { Randomness::rand(h, degree_bound.is_some(), None, rng) } else { Randomness::empty() @@ -456,7 +456,7 @@ where &ck.comm_key[..(polynomial.degree() + 1)], &polynomial.coeffs(), Some(ck.s), - Some(randomness.rand), + Some(state.rand), ) .into(); @@ -465,7 +465,7 @@ where &ck.comm_key[(ck.supported_degree() - d)..], &polynomial.coeffs(), Some(ck.s), - randomness.shifted_rand, + state.shifted_rand, ) .into() }); @@ -474,13 +474,13 @@ where let labeled_comm = LabeledCommitment::new(label.to_string(), commitment, degree_bound); comms.push(labeled_comm); - rands.push(randomness); + states.push(state); end_timer!(commit_time); } end_timer!(commit_time); - Ok((comms, rands)) + Ok((comms, states)) } fn open<'a>( @@ -488,13 +488,13 @@ where labeled_polynomials: impl IntoIterator>, commitments: impl IntoIterator>, point: &'a P::Point, - opening_challenges: &mut ChallengeGenerator, - rands: impl IntoIterator, + sponge: &mut S, + states: impl IntoIterator, rng: Option<&mut dyn RngCore>, ) -> Result where Self::Commitment: 'a, - Self::Randomness: 'a, + Self::CommitmentState: 'a, P: 'a, { let mut combined_polynomial = P::zero(); @@ -504,15 +504,15 @@ where let mut has_hiding = false; let polys_iter = labeled_polynomials.into_iter(); - let rands_iter = rands.into_iter(); + let states_iter = states.into_iter(); let comms_iter = commitments.into_iter(); let combine_time = start_timer!(|| "Combining polynomials, randomness, and commitments."); - let mut cur_challenge = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + let mut cur_challenge = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; - for (labeled_polynomial, (labeled_commitment, randomness)) in - polys_iter.zip(comms_iter.zip(rands_iter)) + for (labeled_polynomial, (labeled_commitment, state)) in + polys_iter.zip(comms_iter.zip(states_iter)) { let label = labeled_polynomial.label(); assert_eq!(labeled_polynomial.label(), labeled_commitment.label()); @@ -528,10 +528,10 @@ where if hiding_bound.is_some() { has_hiding = true; - combined_rand += &(cur_challenge * &randomness.rand); + combined_rand += &(cur_challenge * &state.rand); } - cur_challenge = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + cur_challenge = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; let has_degree_bound = degree_bound.is_some(); @@ -554,7 +554,7 @@ where combined_commitment_proj += &commitment.shifted_comm.unwrap().mul(cur_challenge); if hiding_bound.is_some() { - let shifted_rand = randomness.shifted_rand; + let shifted_rand = state.shifted_rand; assert!( shifted_rand.is_some(), "shifted_rand.is_none() for {}", @@ -564,7 +564,7 @@ where } } - cur_challenge = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + cur_challenge = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; } end_timer!(combine_time); @@ -739,7 +739,7 @@ where point: &'a P::Point, values: impl IntoIterator, proof: &Self::Proof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, _rng: Option<&mut dyn RngCore>, ) -> Result where @@ -762,8 +762,7 @@ where )); } - let check_poly = - Self::succinct_check(vk, commitments, *point, values, proof, opening_challenges); + let check_poly = Self::succinct_check(vk, commitments, *point, values, proof, sponge); if check_poly.is_none() { return Ok(false); @@ -790,7 +789,7 @@ where query_set: &QuerySet, values: &Evaluations, proof: &Self::BatchProof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, rng: &mut R, ) -> Result where @@ -833,14 +832,8 @@ where vals.push(*v_i); } - let check_poly = Self::succinct_check( - vk, - comms.into_iter(), - *point, - vals.into_iter(), - p, - opening_challenges, - ); + let check_poly = + Self::succinct_check(vk, comms.into_iter(), *point, vals.into_iter(), p, sponge); if check_poly.is_none() { return Ok(false); @@ -876,24 +869,24 @@ where polynomials: impl IntoIterator>, commitments: impl IntoIterator>, query_set: &QuerySet, - opening_challenges: &mut ChallengeGenerator, - rands: impl IntoIterator, + sponge: &mut S, + states: impl IntoIterator, rng: Option<&mut dyn RngCore>, ) -> Result, Self::Error> where - Self::Randomness: 'a, + Self::CommitmentState: 'a, Self::Commitment: 'a, P: 'a, { let label_poly_map = polynomials .into_iter() - .zip(rands) + .zip(states) .zip(commitments) - .map(|((p, r), c)| (p.label(), (p, r, c))) + .map(|((p, s), c)| (p.label(), (p, s, c))) .collect::>(); let mut lc_polynomials = Vec::new(); - let mut lc_randomness = Vec::new(); + let mut lc_states = Vec::new(); let mut lc_commitments = Vec::new(); let mut lc_info = Vec::new(); @@ -951,7 +944,7 @@ where let lc_poly = LabeledPolynomial::new(lc_label.clone(), poly, degree_bound, hiding_bound); lc_polynomials.push(lc_poly); - lc_randomness.push(Randomness { + lc_states.push(Randomness { rand: combined_rand, shifted_rand: combined_shifted_rand, }); @@ -971,8 +964,8 @@ where lc_polynomials.iter(), lc_commitments.iter(), &query_set, - opening_challenges, - lc_randomness.iter(), + sponge, + lc_states.iter(), rng, )?; Ok(BatchLCProof { proof, evals: None }) @@ -987,7 +980,7 @@ where eqn_query_set: &QuerySet, eqn_evaluations: &Evaluations, proof: &BatchLCProof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, rng: &mut R, ) -> Result where @@ -1060,7 +1053,7 @@ where &eqn_query_set, &evaluations, proof, - opening_challenges, + sponge, rng, ) } diff --git a/poly-commit/src/kzg10/data_structures.rs b/poly-commit/src/kzg10/data_structures.rs index 60626e70..d648f19f 100644 --- a/poly-commit/src/kzg10/data_structures.rs +++ b/poly-commit/src/kzg10/data_structures.rs @@ -420,7 +420,8 @@ impl> Randomness { } } -impl> PCRandomness for Randomness { +impl> PCCommitmentState for Randomness { + type Randomness = Self; fn empty() -> Self { Self { blinding_polynomial: P::zero(), diff --git a/poly-commit/src/kzg10/mod.rs b/poly-commit/src/kzg10/mod.rs index a6ea5752..508db2cb 100644 --- a/poly-commit/src/kzg10/mod.rs +++ b/poly-commit/src/kzg10/mod.rs @@ -5,10 +5,10 @@ //! proposed by Kate, Zaverucha, and Goldberg ([KZG10](http://cacr.uwaterloo.ca/techreports/2010/cacr2010-10.pdf)). //! This construction achieves extractability in the algebraic group model (AGM). -use crate::{BTreeMap, Error, LabeledPolynomial, PCRandomness, ToString, Vec}; +use crate::{BTreeMap, Error, LabeledPolynomial, PCCommitmentState, ToString, Vec}; use ark_ec::AffineRepr; use ark_ec::{pairing::Pairing, CurveGroup}; -use ark_ec::{scalar_mul::fixed_base::FixedBase, VariableBaseMSM}; +use ark_ec::{scalar_mul::ScalarMul, VariableBaseMSM}; use ark_ff::{One, PrimeField, UniformRand, Zero}; use ark_poly::DenseUVPolynomial; use ark_std::{format, marker::PhantomData, ops::Div, ops::Mul, vec}; @@ -66,36 +66,27 @@ where let gamma_g = E::G1::rand(rng); let h = E::G2::rand(rng); + // powers_of_beta = [1, b, ..., b^(max_degree + 1)], len = max_degree + 2 let mut powers_of_beta = vec![E::ScalarField::one()]; - let mut cur = beta; - for _ in 0..max_degree { + for _ in 0..=max_degree { powers_of_beta.push(cur); cur *= β } - let window_size = FixedBase::get_mul_window_size(max_degree + 1); - - let scalar_bits = E::ScalarField::MODULUS_BIT_SIZE as usize; let g_time = start_timer!(|| "Generating powers of G"); - let g_table = FixedBase::get_window_table(scalar_bits, window_size, g); - let powers_of_g = - FixedBase::msm::(scalar_bits, window_size, &g_table, &powers_of_beta); + let powers_of_g = g.batch_mul(&powers_of_beta[0..max_degree + 1]); end_timer!(g_time); - let gamma_g_time = start_timer!(|| "Generating powers of gamma * G"); - let gamma_g_table = FixedBase::get_window_table(scalar_bits, window_size, gamma_g); - let mut powers_of_gamma_g = - FixedBase::msm::(scalar_bits, window_size, &gamma_g_table, &powers_of_beta); - // Add an additional power of gamma_g, because we want to be able to support - // up to D queries. - powers_of_gamma_g.push(powers_of_gamma_g.last().unwrap().mul(&beta)); - end_timer!(gamma_g_time); - let powers_of_g = E::G1::normalize_batch(&powers_of_g); - let powers_of_gamma_g = E::G1::normalize_batch(&powers_of_gamma_g) + // Use the entire `powers_of_beta`, since we want to be able to support + // up to D queries. + let gamma_g_time = start_timer!(|| "Generating powers of gamma * G"); + let powers_of_gamma_g = gamma_g + .batch_mul(&powers_of_beta) .into_iter() .enumerate() .collect(); + end_timer!(gamma_g_time); let neg_powers_of_h_time = start_timer!(|| "Generating negative powers of h in G2"); let neg_powers_of_h = if produce_g2_powers { @@ -106,20 +97,10 @@ where cur /= β } - let neg_h_table = FixedBase::get_window_table(scalar_bits, window_size, h); - let neg_powers_of_h = FixedBase::msm::( - scalar_bits, - window_size, - &neg_h_table, - &neg_powers_of_beta, - ); - - let affines = E::G2::normalize_batch(&neg_powers_of_h); - let mut affines_map = BTreeMap::new(); - affines.into_iter().enumerate().for_each(|(i, a)| { - affines_map.insert(i, a); - }); - affines_map + h.batch_mul(&neg_powers_of_beta) + .into_iter() + .enumerate() + .collect() } else { BTreeMap::new() }; diff --git a/poly-commit/src/lib.rs b/poly-commit/src/lib.rs index c8805bac..bcd625a7 100644 --- a/poly-commit/src/lib.rs +++ b/poly-commit/src/lib.rs @@ -9,7 +9,7 @@ #![deny(renamed_and_removed_lints, stable_features, unused_allocation)] #![deny(unused_comparisons, bare_trait_objects, unused_must_use)] #![forbid(unsafe_code)] -#![doc = include_str!("../README.md")] +#![doc = include_str!("../../README.md")] #[allow(unused)] #[macro_use] @@ -101,8 +101,6 @@ pub mod sonic_pc; /// [pcdas]: https://eprint.iacr.org/2020/499 pub mod ipa_pc; -/// Defines the challenge strategies and challenge generator. -pub mod challenge; /// A multilinear polynomial commitment scheme that converts n-variate multilinear polynomial into /// n quotient UV polynomial. This scheme is based on hardness of the discrete logarithm /// in prime-order groups. Construction is detailed in [[XZZPD19]][xzzpd19] and [[ZGKPP18]][zgkpp18] @@ -111,7 +109,6 @@ pub mod challenge; /// [zgkpp]: https://ieeexplore.ieee.org/document/8418645 pub mod multilinear_pc; -use crate::challenge::ChallengeGenerator; use ark_crypto_primitives::sponge::{CryptographicSponge, FieldElementSize}; /// Multivariate polynomial commitment based on the construction in /// [[PST13]][pst] with batching and (optional) hiding property inspired @@ -164,8 +161,11 @@ pub trait PolynomialCommitment, S: Cryptographic type VerifierKey: PCVerifierKey; /// The commitment to a polynomial. type Commitment: PCCommitment + Default; - /// The commitment randomness. - type Randomness: PCRandomness; + /// Auxiliary state of the commitment, output by the `commit` phase. + /// It contains information that can be reused by the committer + /// during the `open` phase, such as the commitment randomness. + /// Not to be shared with the verifier. + type CommitmentState: PCCommitmentState; /// The evaluation proof for a single point. type Proof: Clone; /// The evaluation proof for a query set. @@ -211,7 +211,7 @@ pub trait PolynomialCommitment, S: Cryptographic ) -> Result< ( Vec>, - Vec, + Vec, ), Self::Error, > @@ -224,13 +224,13 @@ pub trait PolynomialCommitment, S: Cryptographic labeled_polynomials: impl IntoIterator>, commitments: impl IntoIterator>, point: &'a P::Point, - challenge_generator: &mut ChallengeGenerator, - rands: impl IntoIterator, + sponge: &mut S, + states: impl IntoIterator, rng: Option<&mut dyn RngCore>, ) -> Result where P: 'a, - Self::Randomness: 'a, + Self::CommitmentState: 'a, Self::Commitment: 'a; /// check but with individual challenges @@ -240,7 +240,7 @@ pub trait PolynomialCommitment, S: Cryptographic point: &'a P::Point, values: impl IntoIterator, proof: &Self::Proof, - challenge_generator: &mut ChallengeGenerator, + sponge: &mut S, rng: Option<&mut dyn RngCore>, ) -> Result where @@ -260,13 +260,13 @@ pub trait PolynomialCommitment, S: Cryptographic labeled_polynomials: impl IntoIterator>, commitments: impl IntoIterator>, query_set: &QuerySet, - challenge_generator: &mut ChallengeGenerator, - rands: impl IntoIterator, + sponge: &mut S, + states: impl IntoIterator, rng: Option<&mut dyn RngCore>, ) -> Result where P: 'a, - Self::Randomness: 'a, + Self::CommitmentState: 'a, Self::Commitment: 'a, { // The default implementation achieves proceeds by rearranging the queries in @@ -274,16 +274,16 @@ pub trait PolynomialCommitment, S: Cryptographic // the same point, then opening their commitments simultaneously with a // single call to `open` (per point) let rng = &mut crate::optional_rng::OptionalRng(rng); - let poly_rand_comm: BTreeMap<_, _> = labeled_polynomials + let poly_st_comm: BTreeMap<_, _> = labeled_polynomials .into_iter() - .zip(rands) + .zip(states) .zip(commitments.into_iter()) - .map(|((poly, r), comm)| (poly.label(), (poly, r, comm))) + .map(|((poly, st), comm)| (poly.label(), (poly, st, comm))) .collect(); let open_time = start_timer!(|| format!( "Opening {} polynomials at query set of size {}", - poly_rand_comm.len(), + poly_st_comm.len(), query_set.len(), )); @@ -306,20 +306,20 @@ pub trait PolynomialCommitment, S: Cryptographic let mut proofs = Vec::new(); for (_point_label, (point, labels)) in query_to_labels_map.into_iter() { let mut query_polys: Vec<&'a LabeledPolynomial<_, _>> = Vec::new(); - let mut query_rands: Vec<&'a Self::Randomness> = Vec::new(); + let mut query_states: Vec<&'a Self::CommitmentState> = Vec::new(); let mut query_comms: Vec<&'a LabeledCommitment> = Vec::new(); // Constructing matching vectors with the polynomial, commitment // randomness and actual commitment for each polynomial being // queried at `point` for label in labels { - let (polynomial, rand, comm) = - poly_rand_comm.get(label).ok_or(Error::MissingPolynomial { + let (polynomial, state, comm) = + poly_st_comm.get(label).ok_or(Error::MissingPolynomial { label: label.to_string(), })?; query_polys.push(polynomial); - query_rands.push(rand); + query_states.push(state); query_comms.push(comm); } @@ -332,8 +332,8 @@ pub trait PolynomialCommitment, S: Cryptographic query_polys, query_comms, &point, - challenge_generator, - query_rands, + sponge, + query_states, Some(rng), )?; @@ -365,7 +365,7 @@ pub trait PolynomialCommitment, S: Cryptographic query_set: &QuerySet, evaluations: &Evaluations, proof: &Self::BatchProof, - challenge_generator: &mut ChallengeGenerator, + sponge: &mut S, rng: &mut R, ) -> Result where @@ -423,15 +423,7 @@ pub trait PolynomialCommitment, S: Cryptographic // Verify all proofs referring to the current point simultaneously // with a single call to `check` - result &= Self::check( - vk, - comms, - &point, - values, - &proof, - challenge_generator, - Some(rng), - )?; + result &= Self::check(vk, comms, &point, values, &proof, sponge, Some(rng))?; end_timer!(proof_time); } Ok(result) @@ -445,12 +437,12 @@ pub trait PolynomialCommitment, S: Cryptographic polynomials: impl IntoIterator>, commitments: impl IntoIterator>, query_set: &QuerySet, - challenge_generator: &mut ChallengeGenerator, - rands: impl IntoIterator, + sponge: &mut S, + states: impl IntoIterator, rng: Option<&mut dyn RngCore>, ) -> Result, Self::Error> where - Self::Randomness: 'a, + Self::CommitmentState: 'a, Self::Commitment: 'a, P: 'a, { @@ -471,8 +463,8 @@ pub trait PolynomialCommitment, S: Cryptographic polynomials, commitments, &poly_query_set, - challenge_generator, - rands, + sponge, + states, rng, )?; Ok(BatchLCProof { @@ -490,7 +482,7 @@ pub trait PolynomialCommitment, S: Cryptographic eqn_query_set: &QuerySet, eqn_evaluations: &Evaluations, proof: &BatchLCProof, - challenge_generator: &mut ChallengeGenerator, + sponge: &mut S, rng: &mut R, ) -> Result where @@ -561,7 +553,7 @@ pub trait PolynomialCommitment, S: Cryptographic &poly_query_set, &poly_evals, proof, - challenge_generator, + sponge, rng, )?; if !pc_result { @@ -673,88 +665,83 @@ pub mod tests { PC: PolynomialCommitment, S: CryptographicSponge, { - let challenge_generators = vec![ - ChallengeGenerator::new_multivariate(sponge()), - ChallengeGenerator::new_univariate(&mut sponge()), - ]; - - for challenge_gen in challenge_generators { - let rng = &mut ChaCha20Rng::from_rng(test_rng()).unwrap(); - let max_degree = 100; - let pp = PC::setup(max_degree, None, rng)?; - for _ in 0..10 { - let supported_degree = Uniform::from(1..=max_degree).sample(rng); - assert!( - max_degree >= supported_degree, - "max_degree < supported_degree" - ); - - let mut labels = Vec::new(); - let mut polynomials = Vec::new(); - let mut degree_bounds = Vec::new(); - - for i in 0..10 { - let label = format!("Test{}", i); - labels.push(label.clone()); - let degree_bound = 1usize; - let hiding_bound = Some(1); - degree_bounds.push(degree_bound); - - polynomials.push(LabeledPolynomial::new( - label, - rand_poly(supported_degree, None, rng), - Some(degree_bound), - hiding_bound, - )); - } + let sponge = sponge(); + + let rng = &mut ChaCha20Rng::from_rng(test_rng()).unwrap(); + let max_degree = 100; + let pp = PC::setup(max_degree, None, rng)?; + for _ in 0..10 { + let supported_degree = Uniform::from(1..=max_degree).sample(rng); + assert!( + max_degree >= supported_degree, + "max_degree < supported_degree" + ); + + let mut labels = Vec::new(); + let mut polynomials = Vec::new(); + let mut degree_bounds = Vec::new(); + + for i in 0..10 { + let label = format!("Test{}", i); + labels.push(label.clone()); + let degree_bound = 1usize; + let hiding_bound = Some(1); + degree_bounds.push(degree_bound); + + polynomials.push(LabeledPolynomial::new( + label, + rand_poly(supported_degree, None, rng), + Some(degree_bound), + hiding_bound, + )); + } - let supported_hiding_bound = polynomials - .iter() - .map(|p| p.hiding_bound().unwrap_or(0)) - .max() - .unwrap_or(0); - println!("supported degree: {:?}", supported_degree); - println!("supported hiding bound: {:?}", supported_hiding_bound); - let (ck, vk) = PC::trim( - &pp, - supported_degree, - supported_hiding_bound, - Some(degree_bounds.as_slice()), - )?; - println!("Trimmed"); + let supported_hiding_bound = polynomials + .iter() + .map(|p| p.hiding_bound().unwrap_or(0)) + .max() + .unwrap_or(0); + println!("supported degree: {:?}", supported_degree); + println!("supported hiding bound: {:?}", supported_hiding_bound); + let (ck, vk) = PC::trim( + &pp, + supported_degree, + supported_hiding_bound, + Some(degree_bounds.as_slice()), + )?; + println!("Trimmed"); - let (comms, rands) = PC::commit(&ck, &polynomials, Some(rng))?; + let (comms, rands) = PC::commit(&ck, &polynomials, Some(rng))?; - let mut query_set = QuerySet::new(); - let mut values = Evaluations::new(); - let point = rand_point(None, rng); - for (i, label) in labels.iter().enumerate() { - query_set.insert((label.clone(), (format!("{}", i), point.clone()))); - let value = polynomials[i].evaluate(&point); - values.insert((label.clone(), point.clone()), value); - } - println!("Generated query set"); - - let proof = PC::batch_open( - &ck, - &polynomials, - &comms, - &query_set, - &mut (challenge_gen.clone()), - &rands, - Some(rng), - )?; - let result = PC::batch_check( - &vk, - &comms, - &query_set, - &values, - &proof, - &mut (challenge_gen.clone()), - rng, - )?; - assert!(result, "proof was incorrect, Query set: {:#?}", query_set); + let mut query_set = QuerySet::new(); + let mut values = Evaluations::new(); + let point = rand_point(None, rng); + for (i, label) in labels.iter().enumerate() { + query_set.insert((label.clone(), (format!("{}", i), point.clone()))); + let value = polynomials[i].evaluate(&point); + values.insert((label.clone(), point.clone()), value); } + println!("Generated query set"); + + let proof = PC::batch_open( + &ck, + &polynomials, + &comms, + &query_set, + &mut (sponge.clone()), + &rands, + Some(rng), + )?; + let result = PC::batch_check( + &vk, + &comms, + &query_set, + &values, + &proof, + &mut (sponge.clone()), + rng, + )?; + assert!(result, "proof was incorrect, Query set: {:#?}", query_set); } Ok(()) @@ -781,127 +768,123 @@ pub mod tests { sponge, } = info; - let challenge_gens = vec![ - ChallengeGenerator::new_multivariate(sponge()), - ChallengeGenerator::new_univariate(&mut sponge()), - ]; + let sponge = sponge(); - for challenge_gen in challenge_gens { - let rng = &mut ChaCha20Rng::from_rng(test_rng()).unwrap(); - // If testing multivariate polynomials, make the max degree lower - let max_degree = match num_vars { - Some(_) => max_degree.unwrap_or(Uniform::from(2..=10).sample(rng)), - None => max_degree.unwrap_or(Uniform::from(2..=64).sample(rng)), + let rng = &mut ChaCha20Rng::from_rng(test_rng()).unwrap(); + // If testing multivariate polynomials, make the max degree lower + let max_degree = match num_vars { + Some(_) => max_degree.unwrap_or(Uniform::from(2..=10).sample(rng)), + None => max_degree.unwrap_or(Uniform::from(2..=64).sample(rng)), + }; + let pp = PC::setup(max_degree, num_vars, rng)?; + + for _ in 0..num_iters { + let supported_degree = + supported_degree.unwrap_or(Uniform::from(1..=max_degree).sample(rng)); + assert!( + max_degree >= supported_degree, + "max_degree < supported_degree" + ); + let mut polynomials: Vec> = Vec::new(); + let mut degree_bounds = if enforce_degree_bounds { + Some(Vec::new()) + } else { + None }; - let pp = PC::setup(max_degree, num_vars, rng)?; - - for _ in 0..num_iters { - let supported_degree = - supported_degree.unwrap_or(Uniform::from(1..=max_degree).sample(rng)); - assert!( - max_degree >= supported_degree, - "max_degree < supported_degree" - ); - let mut polynomials: Vec> = Vec::new(); - let mut degree_bounds = if enforce_degree_bounds { - Some(Vec::new()) + + let mut labels = Vec::new(); + println!("Sampled supported degree"); + + // Generate polynomials + let num_points_in_query_set = Uniform::from(1..=max_num_queries).sample(rng); + for i in 0..num_polynomials { + let label = format!("Test{}", i); + labels.push(label.clone()); + let degree = Uniform::from(1..=supported_degree).sample(rng); + let degree_bound = if let Some(degree_bounds) = &mut degree_bounds { + let range = Uniform::from(degree..=supported_degree); + let degree_bound = range.sample(rng); + degree_bounds.push(degree_bound); + Some(degree_bound) } else { None }; - let mut labels = Vec::new(); - println!("Sampled supported degree"); - - // Generate polynomials - let num_points_in_query_set = Uniform::from(1..=max_num_queries).sample(rng); - for i in 0..num_polynomials { - let label = format!("Test{}", i); - labels.push(label.clone()); - let degree = Uniform::from(1..=supported_degree).sample(rng); - let degree_bound = if let Some(degree_bounds) = &mut degree_bounds { - let range = Uniform::from(degree..=supported_degree); - let degree_bound = range.sample(rng); - degree_bounds.push(degree_bound); - Some(degree_bound) - } else { - None - }; - - let hiding_bound = if num_points_in_query_set >= degree { - Some(degree) - } else { - Some(num_points_in_query_set) - }; + let hiding_bound = if num_points_in_query_set >= degree { + Some(degree) + } else { + Some(num_points_in_query_set) + }; - polynomials.push(LabeledPolynomial::new( - label, - rand_poly(degree, num_vars, rng).into(), - degree_bound, - hiding_bound, - )) - } - let supported_hiding_bound = polynomials - .iter() - .map(|p| p.hiding_bound().unwrap_or(0)) - .max() - .unwrap_or(0); - println!("supported degree: {:?}", supported_degree); - println!("supported hiding bound: {:?}", supported_hiding_bound); - println!("num_points_in_query_set: {:?}", num_points_in_query_set); - let (ck, vk) = PC::trim( - &pp, - supported_degree, - supported_hiding_bound, - degree_bounds.as_ref().map(|s| s.as_slice()), - )?; - println!("Trimmed"); + polynomials.push(LabeledPolynomial::new( + label, + rand_poly(degree, num_vars, rng).into(), + degree_bound, + hiding_bound, + )) + } + let supported_hiding_bound = polynomials + .iter() + .map(|p| p.hiding_bound().unwrap_or(0)) + .max() + .unwrap_or(0); + println!("supported degree: {:?}", supported_degree); + println!("supported hiding bound: {:?}", supported_hiding_bound); + println!("num_points_in_query_set: {:?}", num_points_in_query_set); + let (ck, vk) = PC::trim( + &pp, + supported_degree, + supported_hiding_bound, + degree_bounds.as_ref().map(|s| s.as_slice()), + )?; + println!("Trimmed"); - let (comms, rands) = PC::commit(&ck, &polynomials, Some(rng))?; + let (comms, rands) = PC::commit(&ck, &polynomials, Some(rng))?; - // Construct query set - let mut query_set = QuerySet::new(); - let mut values = Evaluations::new(); - for _ in 0..num_points_in_query_set { - let point = rand_point(num_vars, rng); - for (i, label) in labels.iter().enumerate() { - query_set.insert((label.clone(), (format!("{}", i), point.clone()))); - let value = polynomials[i].evaluate(&point); - values.insert((label.clone(), point.clone()), value); - } + // Construct query set + let mut query_set = QuerySet::new(); + let mut values = Evaluations::new(); + for _ in 0..num_points_in_query_set { + let point = rand_point(num_vars, rng); + for (i, label) in labels.iter().enumerate() { + query_set.insert((label.clone(), (format!("{}", i), point.clone()))); + let value = polynomials[i].evaluate(&point); + values.insert((label.clone(), point.clone()), value); } - println!("Generated query set"); - - let proof = PC::batch_open( - &ck, - &polynomials, - &comms, - &query_set, - &mut (challenge_gen.clone()), - &rands, - Some(rng), - )?; - let result = PC::batch_check( - &vk, - &comms, - &query_set, - &values, - &proof, - &mut (challenge_gen.clone()), - rng, - )?; - if !result { - println!( - "Failed with {} polynomials, num_points_in_query_set: {:?}", - num_polynomials, num_points_in_query_set - ); - println!("Degree of polynomials:",); - for poly in polynomials { - println!("Degree: {:?}", poly.degree()); - } + } + println!("Generated query set"); + + let proof = PC::batch_open( + &ck, + &polynomials, + &comms, + &query_set, + &mut (sponge.clone()), + &rands, + Some(rng), + )?; + let result = PC::batch_check( + &vk, + &comms, + &query_set, + &values, + &proof, + &mut (sponge.clone()), + rng, + )?; + if !result { + println!( + "Failed with {} polynomials, num_points_in_query_set: {:?}", + num_polynomials, num_points_in_query_set + ); + println!("Degree of polynomials:",); + for poly in polynomials { + println!("Degree: {:?}", poly.degree()); } - assert!(result, "proof was incorrect, Query set: {:#?}", query_set); } + assert!(result, "proof was incorrect, Query set: {:#?}", query_set); } + Ok(()) } @@ -926,167 +909,163 @@ pub mod tests { sponge, } = info; - let challenge_gens = vec![ - ChallengeGenerator::new_multivariate(sponge()), - ChallengeGenerator::new_univariate(&mut sponge()), - ]; + let sponge = sponge(); - for challenge_gen in challenge_gens { - let rng = &mut ChaCha20Rng::from_rng(test_rng()).unwrap(); - // If testing multivariate polynomials, make the max degree lower - let max_degree = match num_vars { - Some(_) => max_degree.unwrap_or(Uniform::from(2..=10).sample(rng)), - None => max_degree.unwrap_or(Uniform::from(2..=64).sample(rng)), + let rng = &mut ChaCha20Rng::from_rng(test_rng()).unwrap(); + // If testing multivariate polynomials, make the max degree lower + let max_degree = match num_vars { + Some(_) => max_degree.unwrap_or(Uniform::from(2..=10).sample(rng)), + None => max_degree.unwrap_or(Uniform::from(2..=64).sample(rng)), + }; + let pp = PC::setup(max_degree, num_vars, rng)?; + + for _ in 0..num_iters { + let supported_degree = + supported_degree.unwrap_or(Uniform::from(1..=max_degree).sample(rng)); + assert!( + max_degree >= supported_degree, + "max_degree < supported_degree" + ); + let mut polynomials = Vec::new(); + let mut degree_bounds = if enforce_degree_bounds { + Some(Vec::new()) + } else { + None }; - let pp = PC::setup(max_degree, num_vars, rng)?; - - for _ in 0..num_iters { - let supported_degree = - supported_degree.unwrap_or(Uniform::from(1..=max_degree).sample(rng)); - assert!( - max_degree >= supported_degree, - "max_degree < supported_degree" - ); - let mut polynomials = Vec::new(); - let mut degree_bounds = if enforce_degree_bounds { - Some(Vec::new()) + + let mut labels = Vec::new(); + println!("Sampled supported degree"); + + // Generate polynomials + let num_points_in_query_set = Uniform::from(1..=max_num_queries).sample(rng); + for i in 0..num_polynomials { + let label = format!("Test{}", i); + labels.push(label.clone()); + let degree = Uniform::from(1..=supported_degree).sample(rng); + let degree_bound = if let Some(degree_bounds) = &mut degree_bounds { + if rng.gen() { + let range = Uniform::from(degree..=supported_degree); + let degree_bound = range.sample(rng); + degree_bounds.push(degree_bound); + Some(degree_bound) + } else { + None + } } else { None }; - let mut labels = Vec::new(); - println!("Sampled supported degree"); - - // Generate polynomials - let num_points_in_query_set = Uniform::from(1..=max_num_queries).sample(rng); - for i in 0..num_polynomials { - let label = format!("Test{}", i); - labels.push(label.clone()); - let degree = Uniform::from(1..=supported_degree).sample(rng); - let degree_bound = if let Some(degree_bounds) = &mut degree_bounds { - if rng.gen() { - let range = Uniform::from(degree..=supported_degree); - let degree_bound = range.sample(rng); - degree_bounds.push(degree_bound); - Some(degree_bound) + let hiding_bound = if num_points_in_query_set >= degree { + Some(degree) + } else { + Some(num_points_in_query_set) + }; + println!("Hiding bound: {:?}", hiding_bound); + + polynomials.push(LabeledPolynomial::new( + label, + rand_poly(degree, num_vars, rng), + degree_bound, + hiding_bound, + )) + } + println!("supported degree: {:?}", supported_degree); + println!("num_points_in_query_set: {:?}", num_points_in_query_set); + println!("{:?}", degree_bounds); + println!("{}", num_polynomials); + println!("{}", enforce_degree_bounds); + + let (ck, vk) = PC::trim( + &pp, + supported_degree, + supported_degree, + degree_bounds.as_ref().map(|s| s.as_slice()), + )?; + println!("Trimmed"); + + let (comms, rands) = PC::commit(&ck, &polynomials, Some(rng))?; + + // Let's construct our equations + let mut linear_combinations = Vec::new(); + let mut query_set = QuerySet::new(); + let mut values = Evaluations::new(); + for i in 0..num_points_in_query_set { + let point = rand_point(num_vars, rng); + for j in 0..num_equations.unwrap() { + let label = format!("query {} eqn {}", i, j); + let mut lc = LinearCombination::empty(label.clone()); + + let mut value = F::zero(); + let should_have_degree_bounds: bool = rng.gen(); + for (k, label) in labels.iter().enumerate() { + if should_have_degree_bounds { + value += &polynomials[k].evaluate(&point); + lc.push((F::one(), label.to_string().into())); + break; } else { - None - } - } else { - None - }; - - let hiding_bound = if num_points_in_query_set >= degree { - Some(degree) - } else { - Some(num_points_in_query_set) - }; - println!("Hiding bound: {:?}", hiding_bound); - - polynomials.push(LabeledPolynomial::new( - label, - rand_poly(degree, num_vars, rng), - degree_bound, - hiding_bound, - )) - } - println!("supported degree: {:?}", supported_degree); - println!("num_points_in_query_set: {:?}", num_points_in_query_set); - println!("{:?}", degree_bounds); - println!("{}", num_polynomials); - println!("{}", enforce_degree_bounds); - - let (ck, vk) = PC::trim( - &pp, - supported_degree, - supported_degree, - degree_bounds.as_ref().map(|s| s.as_slice()), - )?; - println!("Trimmed"); - - let (comms, rands) = PC::commit(&ck, &polynomials, Some(rng))?; - - // Let's construct our equations - let mut linear_combinations = Vec::new(); - let mut query_set = QuerySet::new(); - let mut values = Evaluations::new(); - for i in 0..num_points_in_query_set { - let point = rand_point(num_vars, rng); - for j in 0..num_equations.unwrap() { - let label = format!("query {} eqn {}", i, j); - let mut lc = LinearCombination::empty(label.clone()); - - let mut value = F::zero(); - let should_have_degree_bounds: bool = rng.gen(); - for (k, label) in labels.iter().enumerate() { - if should_have_degree_bounds { - value += &polynomials[k].evaluate(&point); - lc.push((F::one(), label.to_string().into())); - break; + let poly = &polynomials[k]; + if poly.degree_bound().is_some() { + continue; } else { - let poly = &polynomials[k]; - if poly.degree_bound().is_some() { - continue; - } else { - assert!(poly.degree_bound().is_none()); - let coeff = F::rand(rng); - value += &(coeff * poly.evaluate(&point)); - lc.push((coeff, label.to_string().into())); - } + assert!(poly.degree_bound().is_none()); + let coeff = F::rand(rng); + value += &(coeff * poly.evaluate(&point)); + lc.push((coeff, label.to_string().into())); } } - values.insert((label.clone(), point.clone()), value); - if !lc.is_empty() { - linear_combinations.push(lc); - // Insert query - query_set.insert((label.clone(), (format!("{}", i), point.clone()))); - } } - } - if linear_combinations.is_empty() { - continue; - } - println!("Generated query set"); - println!("Linear combinations: {:?}", linear_combinations); - - let proof = PC::open_combinations( - &ck, - &linear_combinations, - &polynomials, - &comms, - &query_set, - &mut (challenge_gen.clone()), - &rands, - Some(rng), - )?; - println!("Generated proof"); - let result = PC::check_combinations( - &vk, - &linear_combinations, - &comms, - &query_set, - &values, - &proof, - &mut (challenge_gen.clone()), - rng, - )?; - if !result { - println!( - "Failed with {} polynomials, num_points_in_query_set: {:?}", - num_polynomials, num_points_in_query_set - ); - println!("Degree of polynomials:",); - for poly in polynomials { - println!("Degree: {:?}", poly.degree()); + values.insert((label.clone(), point.clone()), value); + if !lc.is_empty() { + linear_combinations.push(lc); + // Insert query + query_set.insert((label.clone(), (format!("{}", i), point.clone()))); } } - assert!( - result, - "proof was incorrect, equations: {:#?}", - linear_combinations + } + if linear_combinations.is_empty() { + continue; + } + println!("Generated query set"); + println!("Linear combinations: {:?}", linear_combinations); + + let proof = PC::open_combinations( + &ck, + &linear_combinations, + &polynomials, + &comms, + &query_set, + &mut (sponge.clone()), + &rands, + Some(rng), + )?; + println!("Generated proof"); + let result = PC::check_combinations( + &vk, + &linear_combinations, + &comms, + &query_set, + &values, + &proof, + &mut (sponge.clone()), + rng, + )?; + if !result { + println!( + "Failed with {} polynomials, num_points_in_query_set: {:?}", + num_polynomials, num_points_in_query_set ); + println!("Degree of polynomials:",); + for poly in polynomials { + println!("Degree: {:?}", poly.degree()); + } } + assert!( + result, + "proof was incorrect, equations: {:#?}", + linear_combinations + ); } + Ok(()) } diff --git a/poly-commit/src/linear_codes/data_structures.rs b/poly-commit/src/linear_codes/data_structures.rs index 8a6f91dd..960e62cf 100644 --- a/poly-commit/src/linear_codes/data_structures.rs +++ b/poly-commit/src/linear_codes/data_structures.rs @@ -1,4 +1,4 @@ -use crate::{PCCommitment, PCRandomness}; +use crate::{utils::Matrix, PCCommitment, PCCommitmentState}; use ark_crypto_primitives::{ crh::CRHScheme, merkle_tree::{Config, LeafParam, Path, TwoToOneParam}, @@ -22,10 +22,10 @@ pub struct LigeroPCParams { pub(crate) check_well_formedness: bool, /// Parameters for hash function of Merkle tree leaves #[derivative(Debug = "ignore")] - pub(crate) leaf_hash_params: LeafParam, + pub(crate) leaf_hash_param: LeafParam, /// Parameters for hash function of Merke tree combining two nodes into one #[derivative(Debug = "ignore")] - pub(crate) two_to_one_params: TwoToOneParam, + pub(crate) two_to_one_hash_param: TwoToOneParam, // Parameters for obtaining leaf digest from leaf value. #[derivative(Debug = "ignore")] pub(crate) col_hash_params: H::Parameters, @@ -59,9 +59,24 @@ impl PCCommitment for LinCodePCCommitment { } } -pub(crate) type LinCodePCRandomness = (); +#[derive(Derivative, CanonicalSerialize, CanonicalDeserialize)] +#[derivative(Default(bound = ""), Clone(bound = ""), Debug(bound = ""))] +pub struct LinCodePCCommitmentState +where + F: PrimeField, + H: CRHScheme, +{ + pub(crate) mat: Matrix, + pub(crate) ext_mat: Matrix, + pub(crate) leaves: Vec, +} -impl PCRandomness for LinCodePCRandomness { +impl PCCommitmentState for LinCodePCCommitmentState +where + F: PrimeField, + H: CRHScheme, +{ + type Randomness = (); fn empty() -> Self { unimplemented!() } @@ -71,7 +86,7 @@ impl PCRandomness for LinCodePCRandomness { _has_degree_bound: bool, _num_vars: Option, _rng: &mut R, - ) -> Self { + ) -> Self::Randomness { unimplemented!() } } diff --git a/poly-commit/src/linear_codes/ligero.rs b/poly-commit/src/linear_codes/ligero.rs index f60125b4..3eb13043 100644 --- a/poly-commit/src/linear_codes/ligero.rs +++ b/poly-commit/src/linear_codes/ligero.rs @@ -23,8 +23,8 @@ where sec_param: usize, rho_inv: usize, check_well_formedness: bool, - leaf_hash_params: LeafParam, - two_to_one_params: TwoToOneParam, + leaf_hash_param: LeafParam, + two_to_one_hash_param: TwoToOneParam, col_hash_params: H::Parameters, ) -> Self { Self { @@ -32,8 +32,8 @@ where sec_param, rho_inv, check_well_formedness, - leaf_hash_params, - two_to_one_params, + leaf_hash_param, + two_to_one_hash_param, col_hash_params, } } @@ -127,12 +127,14 @@ where (n, m) } - fn leaf_hash_params(&self) -> &<::LeafHash as CRHScheme>::Parameters { - &self.leaf_hash_params + fn leaf_hash_param(&self) -> &<::LeafHash as CRHScheme>::Parameters { + &self.leaf_hash_param } - fn two_to_one_params(&self) -> &<::TwoToOneHash as TwoToOneCRHScheme>::Parameters { - &self.two_to_one_params + fn two_to_one_hash_param( + &self, + ) -> &<::TwoToOneHash as TwoToOneCRHScheme>::Parameters { + &self.two_to_one_hash_param } fn col_hash_params(&self) -> &::Parameters { diff --git a/poly-commit/src/linear_codes/mod.rs b/poly-commit/src/linear_codes/mod.rs index 8e21c83a..b31595c5 100644 --- a/poly-commit/src/linear_codes/mod.rs +++ b/poly-commit/src/linear_codes/mod.rs @@ -1,12 +1,15 @@ -use crate::utils::{inner_product, IOPTranscript, Matrix}; +use crate::utils::{inner_product, Matrix}; use crate::{ - Error, LabeledCommitment, LabeledPolynomial, PCCommitterKey, PCUniversalParams, PCVerifierKey, - PolynomialCommitment, + to_bytes, Error, LabeledCommitment, LabeledPolynomial, PCCommitterKey, PCUniversalParams, + PCVerifierKey, PolynomialCommitment, }; use ark_crypto_primitives::crh::{CRHScheme, TwoToOneCRHScheme}; use ark_crypto_primitives::merkle_tree::MerkleTree; -use ark_crypto_primitives::{merkle_tree::Config, sponge::CryptographicSponge}; +use ark_crypto_primitives::{ + merkle_tree::Config, + sponge::{Absorb, CryptographicSponge}, +}; use ark_ff::PrimeField; use ark_poly::Polynomial; use ark_std::borrow::Borrow; @@ -32,7 +35,7 @@ use data_structures::*; pub use data_structures::{LigeroPCParams, LinCodePCProof}; -use utils::{calculate_t, get_indices_from_transcript, hash_column}; +use utils::{calculate_t, get_indices_from_sponge}; const FIELD_SIZE_ERROR: &str = "This field is not suitable for the proposed parameters"; @@ -57,10 +60,12 @@ where fn compute_dimensions(&self, n: usize) -> (usize, usize); /// Get the hash parameters for obtaining leaf digest from leaf value. - fn leaf_hash_params(&self) -> &<::LeafHash as CRHScheme>::Parameters; + fn leaf_hash_param(&self) -> &<::LeafHash as CRHScheme>::Parameters; /// Get the parameters for hashing nodes in the merkle tree. - fn two_to_one_params(&self) -> &<::TwoToOneHash as TwoToOneCRHScheme>::Parameters; + fn two_to_one_hash_param( + &self, + ) -> &<::TwoToOneHash as TwoToOneCRHScheme>::Parameters; /// Get the parameters for hashing a vector of values, /// representing a column of the coefficient matrix, into a leaf value. @@ -87,8 +92,8 @@ where max_degree: usize, num_vars: Option, rng: &mut R, - leaf_hash_params: <::LeafHash as CRHScheme>::Parameters, - two_to_one_params: <::TwoToOneHash as TwoToOneCRHScheme>::Parameters, + leaf_hash_param: <::LeafHash as CRHScheme>::Parameters, + two_to_one_hash_param: <::TwoToOneHash as TwoToOneCRHScheme>::Parameters, col_hash_params: H::Parameters, ) -> Self::LinCodePCParams; @@ -151,14 +156,14 @@ where impl PolynomialCommitment for LinearCodePCS where L: LinearEncode, - F: PrimeField, + F: PrimeField + Absorb, P: Polynomial, S: CryptographicSponge, C: Config + 'static, Vec: Borrow<::Input>, - H::Output: Into, - C::Leaf: Sized + Clone + Default + Send, - H: CRHScheme, + H::Output: Into + Send, + C::Leaf: Sized + Clone + Default + Send + AsRef, + H: CRHScheme + 'static, { type UniversalParams = L::LinCodePCParams; @@ -168,7 +173,7 @@ where type Commitment = LinCodePCCommitment; - type Randomness = LinCodePCRandomness; + type CommitmentState = LinCodePCCommitmentState; type Proof = LPCPArray; @@ -184,8 +189,8 @@ where num_vars: Option, rng: &mut R, ) -> Result { - let leaf_hash_params = ::setup(rng).unwrap(); - let two_to_one_params = ::setup(rng) + let leaf_hash_param = ::setup(rng).unwrap(); + let two_to_one_hash_param = ::setup(rng) .unwrap() .clone(); let col_hash_params = ::setup(rng).unwrap(); @@ -193,8 +198,8 @@ where max_degree, num_vars, rng, - leaf_hash_params, - two_to_one_params, + leaf_hash_param, + two_to_one_hash_param, col_hash_params, ); let real_max_degree = ::max_degree(&pp); @@ -223,7 +228,7 @@ where ) -> Result< ( Vec>, - Vec, + Vec, ), Self::Error, > @@ -231,35 +236,43 @@ where P: 'a, { let mut commitments = Vec::new(); + let mut states = Vec::new(); - for labeled_polynomial in polynomials.into_iter() { + for labeled_polynomial in polynomials { let polynomial = labeled_polynomial.polynomial(); // 1. Arrange the coefficients of the polynomial into a matrix, // and apply encoding to get `ext_mat`. let (mat, ext_mat) = L::compute_matrices(polynomial, ck); + let n_rows = mat.n; + let n_cols = mat.m; + let n_ext_cols = ext_mat.m; // 2. Create the Merkle tree from the hashes of each column. - let col_tree = create_merkle_tree::( - &ext_mat, - ck.leaf_hash_params(), - ck.two_to_one_params(), - ck.col_hash_params(), + let ext_mat_cols = ext_mat.cols(); + let leaves: Vec = cfg_into_iter!(ext_mat_cols) + .map(|col| { + H::evaluate(ck.col_hash_params(), col) + .map_err(|_| Error::HashingError) + .unwrap() + }) + .collect(); + let state = Self::CommitmentState { + mat, + ext_mat, + leaves, + }; + let mut leaves: Vec = + state.leaves.clone().into_iter().map(|h| h.into()).collect(); // TODO cfg_inter + let col_tree = create_merkle_tree::( + &mut leaves, + ck.leaf_hash_param(), + ck.two_to_one_hash_param(), )?; - // 3. Obtain the MT root and add it to the transcript. + // 3. Obtain the MT root let root = col_tree.root(); - let mut transcript: IOPTranscript = IOPTranscript::new(b"transcript"); - - transcript - .append_serializable_element(b"root", &root) - .map_err(|_| Error::TranscriptError)?; - - let n_rows = mat.n; - let n_cols = mat.m; - let n_ext_cols = ext_mat.m; - // 4. The commitment is just the root, but since each commitment could be to a differently-sized polynomial, we also add some metadata. let commitment = LinCodePCCommitment { metadata: Metadata { @@ -275,92 +288,67 @@ where commitment, None, )); + states.push(state); } - let com_len = &commitments.len(); - Ok((commitments, vec![(); *com_len])) + Ok((commitments, states)) } fn open<'a>( ck: &Self::CommitterKey, - labeled_polynomials: impl IntoIterator>, + _labeled_polynomials: impl IntoIterator>, commitments: impl IntoIterator>, point: &'a P::Point, - _challenge_generator: &mut crate::challenge::ChallengeGenerator, - _rands: impl IntoIterator, + sponge: &mut S, + states: impl IntoIterator, _rng: Option<&mut dyn RngCore>, ) -> Result where P: 'a, - Self::Randomness: 'a, + Self::CommitmentState: 'a, Self::Commitment: 'a, { let mut proof_array = LPCPArray::default(); - let labeled_commitments: Vec<&'a LabeledCommitment> = - commitments.into_iter().collect(); - let labeled_polynomials: Vec<&'a LabeledPolynomial> = - labeled_polynomials.into_iter().collect(); - - if labeled_commitments.len() != labeled_polynomials.len() { - return Err(Error::IncorrectInputLength(format!( - "Mismatched lengths: {} commitments, {} polynomials", - labeled_commitments.len(), - labeled_polynomials.len() - ))); - } - for i in 0..labeled_polynomials.len() { - let polynomial = labeled_polynomials[i].polynomial(); - let commitment = labeled_commitments[i].commitment(); + for (labeled_commitment, state) in commitments.into_iter().zip(states) { + let commitment = labeled_commitment.commitment(); let n_rows = commitment.metadata.n_rows; let n_cols = commitment.metadata.n_cols; - let root = &commitment.root; // 1. Arrange the coefficients of the polynomial into a matrix, // and apply encoding to get `ext_mat`. - let (mat, ext_mat) = L::compute_matrices(polynomial, ck); - // 2. Create the Merkle tree from the hashes of each column. - let col_tree = create_merkle_tree::( - &ext_mat, - ck.leaf_hash_params(), - ck.two_to_one_params(), - ck.col_hash_params(), + let Self::CommitmentState { + mat, + ext_mat, + leaves: col_hashes, + } = state; + let mut col_hashes: Vec = + col_hashes.clone().into_iter().map(|h| h.into()).collect(); // TODO cfg_inter + + let col_tree = create_merkle_tree::( + &mut col_hashes, + ck.leaf_hash_param(), + ck.two_to_one_hash_param(), )?; // 3. Generate vector `b` to left-multiply the matrix. let (_, b) = L::tensor(point, n_cols, n_rows); - let mut transcript = IOPTranscript::new(b"transcript"); - transcript - .append_serializable_element(b"root", root) - .map_err(|_| Error::TranscriptError)?; + sponge.absorb(&to_bytes!(&commitment.root).map_err(|_| Error::TranscriptError)?); // If we are checking well-formedness, we need to compute the well-formedness proof (which is just r.M) and append it to the transcript. let well_formedness = if ck.check_well_formedness() { - let mut r = Vec::new(); - for _ in 0..n_rows { - r.push( - transcript - .get_and_append_challenge(b"r") - .map_err(|_| Error::TranscriptError)?, - ); - } + let r = sponge.squeeze_field_elements::(n_rows); let v = mat.row_mul(&r); - transcript - .append_serializable_element(b"v", &v) - .map_err(|_| Error::TranscriptError)?; + sponge.absorb(&v); Some(v) } else { None }; let point_vec = L::point_to_vec(point.clone()); - for element in point_vec.iter() { - transcript - .append_serializable_element(b"point", element) - .map_err(|_| Error::TranscriptError)?; - } + sponge.absorb(&point_vec); proof_array.push(LinCodePCProof { // Compute the opening proof and append b.M to the transcript. @@ -371,7 +359,7 @@ where &mat, &ext_mat, &col_tree, - &mut transcript, + sponge, )?, well_formedness, }); @@ -386,31 +374,19 @@ where point: &'a P::Point, values: impl IntoIterator, proof_array: &Self::Proof, - _challenge_generator: &mut crate::challenge::ChallengeGenerator, + sponge: &mut S, _rng: Option<&mut dyn RngCore>, ) -> Result where Self::Commitment: 'a, { - let labeled_commitments: Vec<&'a LabeledCommitment> = - commitments.into_iter().collect(); - let values: Vec = values.into_iter().collect(); - - if labeled_commitments.len() != proof_array.len() - || labeled_commitments.len() != values.len() - { - return Err(Error::IncorrectInputLength( - format!( - "Mismatched lengths: {} proofs were provided for {} commitments with {} claimed values",labeled_commitments.len(), proof_array.len(), values.len() - ) - )); - } - let leaf_hash_params: &<::LeafHash as CRHScheme>::Parameters = - vk.leaf_hash_params(); - let two_to_one_params: &<::TwoToOneHash as TwoToOneCRHScheme>::Parameters = - vk.two_to_one_params(); + let leaf_hash_param: &<::LeafHash as CRHScheme>::Parameters = + vk.leaf_hash_param(); + let two_to_one_hash_param: &<::TwoToOneHash as TwoToOneCRHScheme>::Parameters = + vk.two_to_one_hash_param(); - for (i, labeled_commitment) in labeled_commitments.iter().enumerate() { + for (i, (labeled_commitment, value)) in commitments.into_iter().zip(values).enumerate() { + let proof = &proof_array[i]; let commitment = labeled_commitment.commitment(); let n_rows = commitment.metadata.n_rows; let n_cols = commitment.metadata.n_cols; @@ -418,31 +394,19 @@ where let root = &commitment.root; let t = calculate_t::(vk.sec_param(), vk.distance(), n_ext_cols)?; - let mut transcript = IOPTranscript::new(b"transcript"); - transcript - .append_serializable_element(b"root", &commitment.root) - .map_err(|_| Error::TranscriptError)?; + sponge.absorb(&to_bytes!(&commitment.root).map_err(|_| Error::TranscriptError)?); let out = if vk.check_well_formedness() { - if proof_array[i].well_formedness.is_none() { + if proof.well_formedness.is_none() { return Err(Error::InvalidCommitment); } - let tmp = &proof_array[i].well_formedness.as_ref(); - let well_formedness = tmp.unwrap(); - let mut r = Vec::with_capacity(n_rows); - for _ in 0..n_rows { - r.push( - transcript - .get_and_append_challenge(b"r") - .map_err(|_| Error::TranscriptError)?, - ); - } + let tmp = &proof.well_formedness.as_ref(); + let v = tmp.unwrap(); + let r = sponge.squeeze_field_elements::(n_rows); // Upon sending `v` to the Verifier, add it to the sponge. The claim is that v = r.M. - transcript - .append_serializable_element(b"v", well_formedness) - .map_err(|_| Error::TranscriptError)?; + sponge.absorb(&v); - (Some(well_formedness), Some(r)) + (Some(v), Some(r)) } else { (None, None) }; @@ -450,36 +414,35 @@ where // 1. Seed the transcript with the point and the recieved vector // TODO Consider removing the evaluation point from the transcript. let point_vec = L::point_to_vec(point.clone()); - for element in point_vec.iter() { - transcript - .append_serializable_element(b"point", element) - .map_err(|_| Error::TranscriptError)?; - } - transcript - .append_serializable_element(b"v", &proof_array[i].opening.v) - .map_err(|_| Error::TranscriptError)?; + sponge.absorb(&point_vec); + sponge.absorb(&proof.opening.v); // 2. Ask random oracle for the `t` indices where the checks happen. - let indices = get_indices_from_transcript::(n_ext_cols, t, &mut transcript)?; + let indices = get_indices_from_sponge(n_ext_cols, t, sponge)?; // 3. Hash the received columns into leaf hashes. - let col_hashes: Vec = proof_array[i] + let col_hashes: Vec = proof .opening .columns .iter() - .map(|c| hash_column::(c.clone(), vk.col_hash_params()).unwrap()) + .map(|c| { + H::evaluate(vk.col_hash_params(), c.clone()) + .map_err(|_| Error::HashingError) + .unwrap() + .into() + }) .collect(); // 4. Verify the paths for each of the leaf hashes - this is only run once, // even if we have a well-formedness check (i.e., we save sending and checking the columns). // See "Concrete optimizations to the commitment scheme", p.12 of [Brakedown](https://eprint.iacr.org/2021/1043.pdf). for (j, (leaf, q_j)) in col_hashes.iter().zip(indices.iter()).enumerate() { - let path = &proof_array[i].opening.paths[j]; + let path = &proof.opening.paths[j]; if path.leaf_index != *q_j { return Err(Error::InvalidCommitment); } - path.verify(leaf_hash_params, two_to_one_params, root, leaf.clone()) + path.verify(leaf_hash_param, two_to_one_hash_param, root, leaf.clone()) .map_err(|_| Error::InvalidCommitment)?; } @@ -493,7 +456,7 @@ where }; // 5. Compute the encoding w = E(v). - let w = L::encode(&proof_array[i].opening.v, vk); + let w = L::encode(&proof.opening.v, vk); // 6. Compute `a`, `b` to right- and left- multiply with the matrix `M`. let (a, b) = L::tensor(point, n_cols, n_rows); @@ -506,12 +469,12 @@ where for (transcript_index, matrix_index) in indices.iter().enumerate() { check_inner_product( &r, - &proof_array[i].opening.columns[transcript_index], + &proof.opening.columns[transcript_index], w_well_formedness[*matrix_index], )?; check_inner_product( &b, - &proof_array[i].opening.columns[transcript_index], + &proof.opening.columns[transcript_index], w[*matrix_index], )?; } @@ -519,13 +482,13 @@ where for (transcript_index, matrix_index) in indices.iter().enumerate() { check_inner_product( &b, - &proof_array[i].opening.columns[transcript_index], + &proof.opening.columns[transcript_index], w[*matrix_index], )?; } } - if inner_product(&proof_array[i].opening.v, &a) != values[i] { + if inner_product(&proof.opening.v, &a) != value { eprintln!("Function check: claimed value in position {i} does not match the evaluation of the committed polynomial in the same position"); return Ok(false); } @@ -536,58 +499,45 @@ where } // TODO maybe this can go to utils -fn create_merkle_tree( - ext_mat: &Matrix, - leaf_hash_params: &<::LeafHash as CRHScheme>::Parameters, - two_to_one_params: &<::TwoToOneHash as TwoToOneCRHScheme>::Parameters, - col_hash_params: &H::Parameters, +fn create_merkle_tree( + leaves: &mut Vec, + leaf_hash_param: &<::LeafHash as CRHScheme>::Parameters, + two_to_one_hash_param: &<::TwoToOneHash as TwoToOneCRHScheme>::Parameters, ) -> Result, Error> where - F: PrimeField, C: Config, - H: CRHScheme, - Vec: Borrow<::Input>, - H::Output: Into, - C::Leaf: Default + Clone + Send, + C::Leaf: Default + Clone + Send + AsRef, { - let ext_mat_cols = ext_mat.cols(); - - let mut col_hashes: Vec = cfg_into_iter!(ext_mat_cols) - .map(|col| hash_column::(col, &col_hash_params).unwrap()) - .collect(); - // pad the column hashes with zeroes - let next_pow_of_two = col_hashes.len().next_power_of_two(); - col_hashes.resize(next_pow_of_two, ::default()); + let next_pow_of_two = leaves.len().next_power_of_two(); + leaves.resize(next_pow_of_two, ::default()); - MerkleTree::::new(leaf_hash_params, two_to_one_params, col_hashes) + MerkleTree::::new(leaf_hash_param, two_to_one_hash_param, leaves) .map_err(|_| Error::HashingError) } -fn generate_proof( +fn generate_proof( sec_param: usize, distance: (usize, usize), b: &[F], mat: &Matrix, ext_mat: &Matrix, col_tree: &MerkleTree, - transcript: &mut IOPTranscript, + sponge: &mut S, ) -> Result, Error> where - F: PrimeField, + F: PrimeField + Absorb, C: Config, + S: CryptographicSponge, { let t = calculate_t::(sec_param, distance, ext_mat.m)?; // 1. left-multiply the matrix by `b`. let v = mat.row_mul(b); - - transcript - .append_serializable_element(b"v", &v) - .map_err(|_| Error::TranscriptError)?; + sponge.absorb(&v); // 2. Generate t column indices to test the linear combination on. - let indices = get_indices_from_transcript(ext_mat.m, t, transcript)?; + let indices = get_indices_from_sponge(ext_mat.m, t, sponge)?; // 3. Compute Merkle tree paths for the requested columns. let mut queried_columns = Vec::with_capacity(t); diff --git a/poly-commit/src/linear_codes/multilinear_ligero/mod.rs b/poly-commit/src/linear_codes/multilinear_ligero/mod.rs index ed0c4ab1..7f071cc9 100644 --- a/poly-commit/src/linear_codes/multilinear_ligero/mod.rs +++ b/poly-commit/src/linear_codes/multilinear_ligero/mod.rs @@ -47,16 +47,16 @@ where _max_degree: usize, _num_vars: Option, _rng: &mut R, - leaf_hash_params: <::LeafHash as CRHScheme>::Parameters, - two_to_one_params: <::TwoToOneHash as TwoToOneCRHScheme>::Parameters, + leaf_hash_param: <::LeafHash as CRHScheme>::Parameters, + two_to_one_hash_param: <::TwoToOneHash as TwoToOneCRHScheme>::Parameters, col_hash_params: H::Parameters, ) -> Self::LinCodePCParams { Self::LinCodePCParams::new( 128, 2, true, - leaf_hash_params, - two_to_one_params, + leaf_hash_param, + two_to_one_hash_param, col_hash_params, ) } diff --git a/poly-commit/src/linear_codes/multilinear_ligero/tests.rs b/poly-commit/src/linear_codes/multilinear_ligero/tests.rs index 2f91c402..016dc39f 100644 --- a/poly-commit/src/linear_codes/multilinear_ligero/tests.rs +++ b/poly-commit/src/linear_codes/multilinear_ligero/tests.rs @@ -4,7 +4,6 @@ mod tests { use crate::linear_codes::LinearCodePCS; use crate::utils::test_sponge; use crate::{ - challenge::ChallengeGenerator, linear_codes::{LigeroPCParams, MultilinearLigero, PolynomialCommitment}, LabeledPolynomial, }; @@ -89,8 +88,8 @@ mod tests { let mut rng = &mut test_rng(); let num_vars = 10; // just to make sure we have the right degree given the FFT domain for our field - let leaf_hash_params = ::setup(&mut rng).unwrap(); - let two_to_one_params = ::setup(&mut rng) + let leaf_hash_param = ::setup(&mut rng).unwrap(); + let two_to_one_hash_param = ::setup(&mut rng) .unwrap() .clone(); let col_hash_params = as CRHScheme>::setup(&mut rng).unwrap(); @@ -100,8 +99,8 @@ mod tests { 128, 4, check_well_formedness, - leaf_hash_params, - two_to_one_params, + leaf_hash_param, + two_to_one_hash_param, col_hash_params, ); @@ -122,29 +121,20 @@ mod tests { let value = labeled_poly.evaluate(&point); - let mut challenge_generator: ChallengeGenerator> = - ChallengeGenerator::new_univariate(&mut test_sponge); - let proof = LigeroPCS::::open( &ck, &[labeled_poly], &c, &point, - &mut (challenge_generator.clone()), + &mut (test_sponge.clone()), &rands, None, ) .unwrap(); - assert!(LigeroPCS::::check( - &vk, - &c, - &point, - [value], - &proof, - &mut challenge_generator, - None - ) - .unwrap()); + assert!( + LigeroPCS::::check(&vk, &c, &point, [value], &proof, &mut test_sponge, None) + .unwrap() + ); } fn rand_point(num_vars: Option, rng: &mut ChaCha20Rng) -> Vec { diff --git a/poly-commit/src/linear_codes/univariate_ligero/mod.rs b/poly-commit/src/linear_codes/univariate_ligero/mod.rs index 973a5c30..e6b59fcc 100644 --- a/poly-commit/src/linear_codes/univariate_ligero/mod.rs +++ b/poly-commit/src/linear_codes/univariate_ligero/mod.rs @@ -41,16 +41,16 @@ where _max_degree: usize, _num_vars: Option, _rng: &mut R, - leaf_hash_params: <::LeafHash as CRHScheme>::Parameters, - two_to_one_params: <::TwoToOneHash as TwoToOneCRHScheme>::Parameters, + leaf_hash_param: <::LeafHash as CRHScheme>::Parameters, + two_to_one_hash_param: <::TwoToOneHash as TwoToOneCRHScheme>::Parameters, col_hash_params: H::Parameters, ) -> Self::LinCodePCParams { Self::LinCodePCParams::new( 128, 4, true, - leaf_hash_params, - two_to_one_params, + leaf_hash_param, + two_to_one_hash_param, col_hash_params, ) } diff --git a/poly-commit/src/linear_codes/univariate_ligero/tests.rs b/poly-commit/src/linear_codes/univariate_ligero/tests.rs index c98c09ec..3151b2d5 100644 --- a/poly-commit/src/linear_codes/univariate_ligero/tests.rs +++ b/poly-commit/src/linear_codes/univariate_ligero/tests.rs @@ -5,7 +5,6 @@ mod tests { use crate::linear_codes::LinearCodePCS; use crate::utils::test_sponge; use crate::{ - challenge::ChallengeGenerator, linear_codes::{LigeroPCParams, PolynomialCommitment, UnivariateLigero}, LabeledPolynomial, }; @@ -83,8 +82,8 @@ mod tests { let degree = 4; let mut rng = &mut test_rng(); // just to make sure we have the right degree given the FFT domain for our field - let leaf_hash_params = ::setup(&mut rng).unwrap(); - let two_to_one_params = ::setup(&mut rng) + let leaf_hash_param = ::setup(&mut rng).unwrap(); + let two_to_one_hash_param = ::setup(&mut rng) .unwrap() .clone(); let col_hash_params = as CRHScheme>::setup(&mut rng).unwrap(); @@ -94,8 +93,8 @@ mod tests { 128, 4, check_well_formedness, - leaf_hash_params, - two_to_one_params, + leaf_hash_param, + two_to_one_hash_param, col_hash_params, ); @@ -116,29 +115,19 @@ mod tests { let value = labeled_poly.evaluate(&point); - let mut challenge_generator: ChallengeGenerator> = - ChallengeGenerator::new_univariate(&mut test_sponge); - let proof = LigeroPCS::open( &ck, &[labeled_poly], &c, &point, - &mut (challenge_generator.clone()), + &mut (test_sponge.clone()), &rands, None, ) .unwrap(); - assert!(LigeroPCS::check( - &vk, - &c, - &point, - [value], - &proof, - &mut challenge_generator, - None - ) - .unwrap()); + assert!( + LigeroPCS::check(&vk, &c, &point, [value], &proof, &mut test_sponge, None).unwrap() + ); } fn rand_point(_: Option, rng: &mut ChaCha20Rng) -> F { diff --git a/poly-commit/src/linear_codes/utils.rs b/poly-commit/src/linear_codes/utils.rs index 9e43221f..12b868ad 100644 --- a/poly-commit/src/linear_codes/utils.rs +++ b/poly-commit/src/linear_codes/utils.rs @@ -1,11 +1,7 @@ -use core::borrow::Borrow; - -use crate::utils::IOPTranscript; use crate::{utils::ceil_div, Error}; -use ark_crypto_primitives::{crh::CRHScheme, merkle_tree::Config}; +use ark_crypto_primitives::sponge::CryptographicSponge; use ark_ff::{FftField, PrimeField}; - use ark_poly::{EvaluationDomain, GeneralEvaluationDomain}; use ark_std::string::ToString; use ark_std::vec::Vec; @@ -38,35 +34,18 @@ pub(crate) fn get_num_bytes(n: usize) -> usize { ceil_div((usize::BITS - n.leading_zeros()) as usize, 8) } -#[inline] -pub(crate) fn hash_column(array: Vec, params: &H::Parameters) -> Result -where - F: PrimeField, - C: Config, - H: CRHScheme, - Vec: Borrow<::Input>, - C::Leaf: Sized, - H::Output: Into, -{ - H::evaluate(params, array) - .map_err(|_| Error::HashingError) - .map(|x| x.into()) -} - /// Generate `t` (not necessarily distinct) random points in `[0, n)` /// using the current state of the `transcript`. -pub(crate) fn get_indices_from_transcript( +pub(crate) fn get_indices_from_sponge( n: usize, t: usize, - transcript: &mut IOPTranscript, + sponge: &mut S, ) -> Result, Error> { let bytes_to_squeeze = get_num_bytes(n); let mut indices = Vec::with_capacity(t); for _ in 0..t { - let mut bytes: Vec = vec![0; bytes_to_squeeze]; - transcript - .get_and_append_byte_challenge(b"i", &mut bytes) - .map_err(|_| Error::TranscriptError)?; + let bytes = sponge.squeeze_bytes(bytes_to_squeeze); + sponge.absorb(&bytes); // get the usize from Vec: let ind = bytes.iter().fold(0, |acc, &x| (acc << 8) + x as usize); diff --git a/poly-commit/src/marlin/marlin_pc/data_structures.rs b/poly-commit/src/marlin/marlin_pc/data_structures.rs index 2b09e03a..203e3201 100644 --- a/poly-commit/src/marlin/marlin_pc/data_structures.rs +++ b/poly-commit/src/marlin/marlin_pc/data_structures.rs @@ -1,6 +1,6 @@ use crate::{ - DenseUVPolynomial, PCCommitment, PCCommitterKey, PCPreparedCommitment, PCPreparedVerifierKey, - PCRandomness, PCVerifierKey, Vec, + DenseUVPolynomial, PCCommitment, PCCommitmentState, PCCommitterKey, PCPreparedCommitment, + PCPreparedVerifierKey, PCVerifierKey, Vec, }; use ark_ec::pairing::Pairing; use ark_ec::AdditiveGroup; @@ -360,7 +360,8 @@ impl<'a, F: PrimeField, P: DenseUVPolynomial> AddAssign<(F, &'a Randomness> PCRandomness for Randomness { +impl> PCCommitmentState for Randomness { + type Randomness = Self; fn empty() -> Self { Self { rand: kzg10::Randomness::empty(), diff --git a/poly-commit/src/marlin/marlin_pc/mod.rs b/poly-commit/src/marlin/marlin_pc/mod.rs index 39c4e362..7fbfba07 100644 --- a/poly-commit/src/marlin/marlin_pc/mod.rs +++ b/poly-commit/src/marlin/marlin_pc/mod.rs @@ -2,7 +2,7 @@ use crate::{kzg10, marlin::Marlin, PCCommitterKey, CHALLENGE_SIZE}; use crate::{BTreeMap, BTreeSet, ToString, Vec}; use crate::{BatchLCProof, Error, Evaluations, QuerySet}; use crate::{LabeledCommitment, LabeledPolynomial, LinearCombination}; -use crate::{PCRandomness, PCUniversalParams, PolynomialCommitment}; +use crate::{PCCommitmentState, PCUniversalParams, PolynomialCommitment}; use ark_ec::pairing::Pairing; use ark_ec::AffineRepr; use ark_ec::CurveGroup; @@ -12,7 +12,6 @@ use ark_std::rand::RngCore; use ark_std::{marker::PhantomData, ops::Div, vec}; mod data_structures; -use crate::challenge::ChallengeGenerator; use ark_crypto_primitives::sponge::CryptographicSponge; pub use data_structures::*; @@ -66,7 +65,7 @@ where type CommitterKey = CommitterKey; type VerifierKey = VerifierKey; type Commitment = Commitment; - type Randomness = Randomness; + type CommitmentState = Randomness; type Proof = kzg10::Proof; type BatchProof = Vec; type Error = Error; @@ -180,7 +179,7 @@ where ) -> Result< ( Vec>, - Vec, + Vec, ), Self::Error, > @@ -191,7 +190,7 @@ where let commit_time = start_timer!(|| "Committing to polynomials"); let mut commitments = Vec::new(); - let mut randomness = Vec::new(); + let mut states = Vec::new(); for p in polynomials { let label = p.label(); @@ -232,17 +231,17 @@ where }; let comm = Commitment { comm, shifted_comm }; - let rand = Randomness { rand, shifted_rand }; + let state = Randomness { rand, shifted_rand }; commitments.push(LabeledCommitment::new( label.to_string(), comm, degree_bound, )); - randomness.push(rand); + states.push(state); end_timer!(commit_time); } end_timer!(commit_time); - Ok((commitments, randomness)) + Ok((commitments, states)) } /// On input a polynomial `p` and a point `point`, outputs a proof for the same. @@ -251,13 +250,13 @@ where labeled_polynomials: impl IntoIterator>, _commitments: impl IntoIterator>, point: &'a P::Point, - opening_challenges: &mut ChallengeGenerator, - rands: impl IntoIterator, + sponge: &mut S, + states: impl IntoIterator, _rng: Option<&mut dyn RngCore>, ) -> Result where P: 'a, - Self::Randomness: 'a, + Self::CommitmentState: 'a, Self::Commitment: 'a, { let mut p = P::zero(); @@ -267,7 +266,7 @@ where let mut shifted_r_witness = P::zero(); let mut enforce_degree_bound = false; - for (polynomial, rand) in labeled_polynomials.into_iter().zip(rands) { + for (polynomial, rand) in labeled_polynomials.into_iter().zip(states) { let degree_bound = polynomial.degree_bound(); assert_eq!(degree_bound.is_some(), rand.shifted_rand.is_some()); @@ -283,7 +282,7 @@ where )?; // compute next challenges challenge^j and challenge^{j+1}. - let challenge_j = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + let challenge_j = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; assert_eq!(degree_bound.is_some(), rand.shifted_rand.is_some()); @@ -299,7 +298,7 @@ where *point, &shifted_rand, )?; - let challenge_j_1 = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + let challenge_j_1 = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; let shifted_witness = shift_polynomial(ck, &witness, degree_bound); @@ -347,7 +346,7 @@ where point: &'a P::Point, values: impl IntoIterator, proof: &Self::Proof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, _rng: Option<&mut dyn RngCore>, ) -> Result where @@ -358,7 +357,7 @@ where Marlin::::accumulate_commitments_and_values( commitments, values, - opening_challenges, + sponge, Some(vk), )?; let combined_comm = kzg10::Commitment(combined_comm.into()); @@ -373,7 +372,7 @@ where query_set: &QuerySet, values: &Evaluations, proof: &Self::BatchProof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, rng: &mut R, ) -> Result where @@ -384,7 +383,7 @@ where commitments, query_set, values, - opening_challenges, + sponge, Some(vk), )?; assert_eq!(proof.len(), combined_queries.len()); @@ -407,13 +406,13 @@ where polynomials: impl IntoIterator>, commitments: impl IntoIterator>, query_set: &QuerySet, - opening_challenges: &mut ChallengeGenerator, - rands: impl IntoIterator, + sponge: &mut S, + states: impl IntoIterator, rng: Option<&mut dyn RngCore>, ) -> Result, Self::Error> where P: 'a, - Self::Randomness: 'a, + Self::CommitmentState: 'a, Self::Commitment: 'a, { Marlin::::open_combinations( @@ -422,8 +421,8 @@ where polynomials, commitments, query_set, - opening_challenges, - rands, + sponge, + states, rng, ) } @@ -437,7 +436,7 @@ where query_set: &QuerySet, evaluations: &Evaluations, proof: &BatchLCProof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, rng: &mut R, ) -> Result where @@ -450,7 +449,7 @@ where query_set, evaluations, proof, - opening_challenges, + sponge, rng, ) } @@ -462,19 +461,19 @@ where labeled_polynomials: impl IntoIterator>, commitments: impl IntoIterator>>, query_set: &QuerySet, - opening_challenges: &mut ChallengeGenerator, - rands: impl IntoIterator, + sponge: &mut S, + states: impl IntoIterator, rng: Option<&mut dyn RngCore>, ) -> Result>, Error> where P: 'a, - Self::Randomness: 'a, + Self::CommitmentState: 'a, Self::Commitment: 'a, { let rng = &mut crate::optional_rng::OptionalRng(rng); let poly_rand_comm: BTreeMap<_, _> = labeled_polynomials .into_iter() - .zip(rands) + .zip(states) .zip(commitments.into_iter()) .map(|((poly, r), comm)| (poly.label(), (poly, r, comm))) .collect(); @@ -497,7 +496,7 @@ where let mut proofs = Vec::new(); for (_point_label, (point, labels)) in query_to_labels_map.into_iter() { let mut query_polys: Vec<&'a LabeledPolynomial<_, _>> = Vec::new(); - let mut query_rands: Vec<&'a Self::Randomness> = Vec::new(); + let mut query_states: Vec<&'a Self::CommitmentState> = Vec::new(); let mut query_comms: Vec<&'a LabeledCommitment> = Vec::new(); for label in labels { @@ -507,7 +506,7 @@ where })?; query_polys.push(polynomial); - query_rands.push(rand); + query_states.push(rand); query_comms.push(comm); } @@ -517,8 +516,8 @@ where query_polys, query_comms, point, - opening_challenges, - query_rands, + sponge, + query_states, Some(rng), )?; diff --git a/poly-commit/src/marlin/marlin_pst13_pc/data_structures.rs b/poly-commit/src/marlin/marlin_pst13_pc/data_structures.rs index 8ccf300b..9cc8d73b 100644 --- a/poly-commit/src/marlin/marlin_pst13_pc/data_structures.rs +++ b/poly-commit/src/marlin/marlin_pst13_pc/data_structures.rs @@ -1,6 +1,6 @@ use crate::{BTreeMap, Vec}; use crate::{ - PCCommitterKey, PCPreparedVerifierKey, PCRandomness, PCUniversalParams, PCVerifierKey, + PCCommitmentState, PCCommitterKey, PCPreparedVerifierKey, PCUniversalParams, PCVerifierKey, }; use ark_ec::pairing::Pairing; use ark_poly::DenseMVPolynomial; @@ -362,12 +362,13 @@ where } } -impl PCRandomness for Randomness +impl PCCommitmentState for Randomness where E: Pairing, P: DenseMVPolynomial, P::Point: Index, { + type Randomness = Self; fn empty() -> Self { Self { blinding_polynomial: P::zero(), diff --git a/poly-commit/src/marlin/marlin_pst13_pc/mod.rs b/poly-commit/src/marlin/marlin_pst13_pc/mod.rs index ac47c2a7..eee026d7 100644 --- a/poly-commit/src/marlin/marlin_pst13_pc/mod.rs +++ b/poly-commit/src/marlin/marlin_pst13_pc/mod.rs @@ -5,10 +5,14 @@ use crate::{ }; use crate::{BatchLCProof, Error, Evaluations, QuerySet}; use crate::{LabeledCommitment, LabeledPolynomial, LinearCombination}; -use crate::{PCRandomness, PCUniversalParams, PolynomialCommitment}; +use crate::{PCCommitmentState, PCUniversalParams, PolynomialCommitment}; use crate::{ToString, Vec}; use ark_ec::AffineRepr; -use ark_ec::{pairing::Pairing, scalar_mul::fixed_base::FixedBase, CurveGroup, VariableBaseMSM}; +use ark_ec::{ + pairing::Pairing, + scalar_mul::{BatchMulPreprocessing, ScalarMul}, + CurveGroup, VariableBaseMSM, +}; use ark_ff::{One, PrimeField, UniformRand, Zero}; use ark_poly::{multivariate::Term, DenseMVPolynomial}; use ark_std::rand::RngCore; @@ -20,7 +24,6 @@ pub use data_structures::*; mod combinations; use combinations::*; -use crate::challenge::ChallengeGenerator; use ark_crypto_primitives::sponge::CryptographicSponge; #[cfg(feature = "parallel")] use rayon::prelude::*; @@ -151,7 +154,7 @@ where type CommitterKey = CommitterKey; type VerifierKey = VerifierKey; type Commitment = marlin_pc::Commitment; - type Randomness = Randomness; + type CommitmentState = Randomness; type Proof = Proof; type BatchProof = Vec; type Error = Error; @@ -211,47 +214,33 @@ where }) .unzip(); - let scalar_bits = E::ScalarField::MODULUS_BIT_SIZE as usize; let g_time = start_timer!(|| "Generating powers of G"); - let window_size = FixedBase::get_mul_window_size(max_degree + 1); - let g_table = FixedBase::get_window_table(scalar_bits, window_size, g); - let mut powers_of_g = - FixedBase::msm::(scalar_bits, window_size, &g_table, &powers_of_beta); - powers_of_g.push(g); + let mut powers_of_g = g.batch_mul(&powers_of_beta); + powers_of_g.push(g.into_affine()); powers_of_beta_terms.push(P::Term::new(vec![])); end_timer!(g_time); let gamma_g_time = start_timer!(|| "Generating powers of gamma * G"); - let window_size = FixedBase::get_mul_window_size(max_degree + 2); - let gamma_g_table = FixedBase::get_window_table(scalar_bits, window_size, gamma_g); // Each element `i` of `powers_of_gamma_g` is a vector of length `max_degree+1` // containing `betas[i]^j \gamma G` for `j` from 1 to `max_degree+1` to support // up to `max_degree` queries let mut powers_of_gamma_g = vec![Vec::new(); num_vars]; + let gamma_g_table = BatchMulPreprocessing::new(gamma_g, max_degree + 1); + ark_std::cfg_iter_mut!(powers_of_gamma_g) .enumerate() .for_each(|(i, v)| { - let mut powers_of_beta = Vec::with_capacity(max_degree); + let mut powers_of_beta = Vec::with_capacity(max_degree + 1); let mut cur = E::ScalarField::one(); for _ in 0..=max_degree { cur *= &betas[i]; powers_of_beta.push(cur); } - *v = FixedBase::msm::( - scalar_bits, - window_size, - &gamma_g_table, - &powers_of_beta, - ); + *v = gamma_g_table.batch_mul(&powers_of_beta); }); end_timer!(gamma_g_time); - let powers_of_g = E::G1::normalize_batch(&powers_of_g); let gamma_g = gamma_g.into_affine(); - let powers_of_gamma_g = powers_of_gamma_g - .into_iter() - .map(|v| E::G1::normalize_batch(&v)) - .collect(); let beta_h: Vec<_> = betas.iter().map(|b| h.mul(b).into_affine()).collect(); let h = h.into_affine(); let prepared_h = h.into(); @@ -343,7 +332,7 @@ where ) -> Result< ( Vec>, - Vec, + Vec, ), Self::Error, > @@ -440,26 +429,26 @@ where labeled_polynomials: impl IntoIterator>, _commitments: impl IntoIterator>, point: &P::Point, - opening_challenges: &mut ChallengeGenerator, - rands: impl IntoIterator, + sponge: &mut S, + states: impl IntoIterator, _rng: Option<&mut dyn RngCore>, ) -> Result where P: 'a, - Self::Randomness: 'a, + Self::CommitmentState: 'a, Self::Commitment: 'a, { // Compute random linear combinations of committed polynomials and randomness let mut p = P::zero(); let mut r = Randomness::empty(); - for (polynomial, rand) in labeled_polynomials.into_iter().zip(rands) { + for (polynomial, state) in labeled_polynomials.into_iter().zip(states) { Self::check_degrees_and_bounds(ck.supported_degree, &polynomial)?; // compute challenge^j and challenge^{j+1}. - let challenge_j = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + let challenge_j = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; p += (challenge_j, polynomial.polynomial()); - r += (challenge_j, rand); + r += (challenge_j, state); } let open_time = start_timer!(|| format!("Opening polynomial of degree {}", p.degree())); @@ -538,7 +527,7 @@ where point: &'a P::Point, values: impl IntoIterator, proof: &Self::Proof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, _rng: Option<&mut dyn RngCore>, ) -> Result where @@ -550,7 +539,7 @@ where Marlin::::accumulate_commitments_and_values( commitments, values, - opening_challenges, + sponge, None, )?; // Compute both sides of the pairing equation @@ -582,7 +571,7 @@ where query_set: &QuerySet, values: &Evaluations, proof: &Self::BatchProof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, rng: &mut R, ) -> Result where @@ -593,7 +582,7 @@ where commitments, query_set, values, - opening_challenges, + sponge, None, )?; let check_time = @@ -660,13 +649,13 @@ where polynomials: impl IntoIterator>, commitments: impl IntoIterator>, query_set: &QuerySet, - opening_challenges: &mut ChallengeGenerator, - rands: impl IntoIterator, + sponge: &mut S, + states: impl IntoIterator, rng: Option<&mut dyn RngCore>, ) -> Result, Self::Error> where P: 'a, - Self::Randomness: 'a, + Self::CommitmentState: 'a, Self::Commitment: 'a, { Marlin::::open_combinations( @@ -675,8 +664,8 @@ where polynomials, commitments, query_set, - opening_challenges, - rands, + sponge, + states, rng, ) } @@ -690,7 +679,7 @@ where eqn_query_set: &QuerySet, eqn_evaluations: &Evaluations, proof: &BatchLCProof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, rng: &mut R, ) -> Result where @@ -703,7 +692,7 @@ where eqn_query_set, eqn_evaluations, proof, - opening_challenges, + sponge, rng, ) } diff --git a/poly-commit/src/marlin/mod.rs b/poly-commit/src/marlin/mod.rs index 4bd4fe27..d7e7f5a1 100644 --- a/poly-commit/src/marlin/mod.rs +++ b/poly-commit/src/marlin/mod.rs @@ -1,9 +1,9 @@ -use crate::{challenge::ChallengeGenerator, CHALLENGE_SIZE}; +use crate::CHALLENGE_SIZE; use crate::{kzg10, Error}; use crate::{BTreeMap, BTreeSet, Debug, RngCore, String, ToString, Vec}; use crate::{BatchLCProof, LabeledPolynomial, LinearCombination}; use crate::{Evaluations, LabeledCommitment, QuerySet}; -use crate::{PCRandomness, Polynomial, PolynomialCommitment}; +use crate::{PCCommitmentState, Polynomial, PolynomialCommitment}; use ark_crypto_primitives::sponge::CryptographicSponge; use ark_ec::pairing::Pairing; use ark_ec::AffineRepr; @@ -110,7 +110,7 @@ where fn accumulate_commitments_and_values<'a>( commitments: impl IntoIterator>>, values: impl IntoIterator, - challenge_gen: &mut ChallengeGenerator, + sponge: &mut S, vk: Option<&marlin_pc::VerifierKey>, ) -> Result<(E::G1, E::ScalarField), Error> { let acc_time = start_timer!(|| "Accumulating commitments and values"); @@ -121,13 +121,14 @@ where let commitment = labeled_commitment.commitment(); assert_eq!(degree_bound.is_some(), commitment.shifted_comm.is_some()); - let challenge_i = challenge_gen.try_next_challenge_of_size(CHALLENGE_SIZE); + let challenge_i = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; combined_comm += &commitment.comm.0.mul(challenge_i); combined_value += &(value * &challenge_i); if let Some(degree_bound) = degree_bound { - let challenge_i_1 = challenge_gen.try_next_challenge_of_size(CHALLENGE_SIZE); + let challenge_i_1: E::ScalarField = + sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; let shifted_comm = commitment.shifted_comm.as_ref().unwrap().0.into_group(); @@ -152,7 +153,7 @@ where commitments: impl IntoIterator>>, query_set: &QuerySet, evaluations: &Evaluations, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, vk: Option<&marlin_pc::VerifierKey>, ) -> Result<(Vec>, Vec, Vec), Error> where @@ -199,7 +200,7 @@ where let (c, v) = Self::accumulate_commitments_and_values( comms_to_combine, values_to_combine, - opening_challenges, + sponge, vk, )?; end_timer!(lc_time); @@ -227,8 +228,8 @@ where polynomials: impl IntoIterator>, commitments: impl IntoIterator>, query_set: &QuerySet, - opening_challenges: &mut ChallengeGenerator, - rands: impl IntoIterator, + sponge: &mut S, + states: impl IntoIterator, rng: Option<&mut dyn RngCore>, ) -> Result, Error> where @@ -241,18 +242,18 @@ where Commitment = marlin_pc::Commitment, Error = Error, >, - PC::Randomness: 'a + AddAssign<(E::ScalarField, &'a PC::Randomness)>, + PC::CommitmentState: 'a + AddAssign<(E::ScalarField, &'a PC::CommitmentState)>, PC::Commitment: 'a, { let label_map = polynomials .into_iter() - .zip(rands) + .zip(states) .zip(commitments) .map(|((p, r), c)| (p.label(), (p, r, c))) .collect::>(); let mut lc_polynomials = Vec::new(); - let mut lc_randomness = Vec::new(); + let mut lc_states: Vec = Vec::new(); let mut lc_commitments = Vec::new(); let mut lc_info = Vec::new(); @@ -262,13 +263,13 @@ where let mut degree_bound = None; let mut hiding_bound = None; - let mut randomness = PC::Randomness::empty(); + let mut randomness = PC::CommitmentState::empty(); let mut coeffs_and_comms = Vec::new(); let num_polys = lc.len(); for (coeff, label) in lc.iter().filter(|(_, l)| !l.is_one()) { let label: &String = label.try_into().expect("cannot be one!"); - let &(cur_poly, cur_rand, cur_comm) = + let &(cur_poly, cur_state, cur_comm) = label_map.get(label).ok_or(Error::MissingPolynomial { label: label.to_string(), })?; @@ -284,14 +285,14 @@ where // Some(_) > None, always. hiding_bound = core::cmp::max(hiding_bound, cur_poly.hiding_bound()); poly += (*coeff, cur_poly.polynomial()); - randomness += (*coeff, cur_rand); + randomness += (*coeff, cur_state); coeffs_and_comms.push((*coeff, cur_comm.commitment())); } let lc_poly = LabeledPolynomial::new(lc_label.clone(), poly, degree_bound, hiding_bound); lc_polynomials.push(lc_poly); - lc_randomness.push(randomness); + lc_states.push(randomness); lc_commitments.push(Self::combine_commitments(coeffs_and_comms)); lc_info.push((lc_label, degree_bound)); } @@ -308,8 +309,8 @@ where lc_polynomials.iter(), lc_commitments.iter(), &query_set, - opening_challenges, - lc_randomness.iter(), + sponge, + lc_states.iter(), rng, )?; @@ -323,7 +324,7 @@ where query_set: &QuerySet, evaluations: &Evaluations, proof: &BatchLCProof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, rng: &mut R, ) -> Result where @@ -404,7 +405,7 @@ where &query_set, &evaluations, proof, - opening_challenges, + sponge, rng, ) } diff --git a/poly-commit/src/multilinear_pc/mod.rs b/poly-commit/src/multilinear_pc/mod.rs index eff86ab9..0973e822 100644 --- a/poly-commit/src/multilinear_pc/mod.rs +++ b/poly-commit/src/multilinear_pc/mod.rs @@ -1,9 +1,10 @@ use crate::multilinear_pc::data_structures::{ Commitment, CommitterKey, Proof, UniversalParams, VerifierKey, }; +use ark_ec::scalar_mul::BatchMulPreprocessing; use ark_ec::AffineRepr; use ark_ec::{pairing::Pairing, CurveGroup}; -use ark_ec::{scalar_mul::fixed_base::FixedBase, VariableBaseMSM}; +use ark_ec::{scalar_mul::ScalarMul, VariableBaseMSM}; use ark_ff::{Field, PrimeField}; use ark_ff::{One, Zero}; use ark_poly::{DenseMultilinearExtension, MultilinearExtension}; @@ -27,14 +28,11 @@ impl MultilinearPC { /// setup pub fn setup(num_vars: usize, rng: &mut R) -> UniversalParams { assert!(num_vars > 0, "constant polynomial not supported"); - let g: E::G1 = E::G1::rand(rng); - let h: E::G2 = E::G2::rand(rng); - let g = g.into_affine(); - let h = h.into_affine(); + let g = E::G1::rand(rng); + let h = E::G2::rand(rng); let mut powers_of_g = Vec::new(); let mut powers_of_h = Vec::new(); let t: Vec<_> = (0..num_vars).map(|_| E::ScalarField::rand(rng)).collect(); - let scalar_bits = E::ScalarField::MODULUS_BIT_SIZE as usize; let mut eq: LinkedList> = LinkedList::from_iter(eq_extension(&t).into_iter()); @@ -54,29 +52,15 @@ impl MultilinearPC { } let mut pp_powers = Vec::new(); - let mut total_scalars = 0; for i in 0..num_vars { let eq = eq_arr.pop_front().unwrap(); let pp_k_powers = (0..(1 << (num_vars - i))).map(|x| eq[x]); pp_powers.extend(pp_k_powers); - total_scalars += 1 << (num_vars - i); } - let window_size = FixedBase::get_mul_window_size(total_scalars); - let g_table = FixedBase::get_window_table(scalar_bits, window_size, g.into_group()); - let h_table = FixedBase::get_window_table(scalar_bits, window_size, h.into_group()); - - let pp_g = E::G1::normalize_batch(&FixedBase::msm( - scalar_bits, - window_size, - &g_table, - &pp_powers, - )); - let pp_h = E::G2::normalize_batch(&FixedBase::msm( - scalar_bits, - window_size, - &h_table, - &pp_powers, - )); + + let g_table = BatchMulPreprocessing::new(g, num_vars); + let pp_g = g_table.batch_mul(&pp_powers); + let pp_h = h.batch_mul(&pp_powers); let mut start = 0; for i in 0..num_vars { let size = 1 << (num_vars - i); @@ -89,18 +73,14 @@ impl MultilinearPC { // uncomment to measure the time for calculating vp // let vp_generation_timer = start_timer!(|| "VP generation"); - let g_mask = { - let window_size = FixedBase::get_mul_window_size(num_vars); - let g_table = FixedBase::get_window_table(scalar_bits, window_size, g.into_group()); - E::G1::normalize_batch(&FixedBase::msm(scalar_bits, window_size, &g_table, &t)) - }; + let g_mask = g_table.batch_mul(&t); // end_timer!(vp_generation_timer); UniversalParams { num_vars, - g, + g: g.into_affine(), g_mask, - h, + h: h.into_affine(), powers_of_g, powers_of_h, } @@ -199,11 +179,7 @@ impl MultilinearPC { ) -> bool { let left = E::pairing(commitment.g_product.into_group() - &vk.g.mul(value), vk.h); - let scalar_size = E::ScalarField::MODULUS_BIT_SIZE as usize; - let window_size = FixedBase::get_mul_window_size(vk.nv); - - let g_table = FixedBase::get_window_table(scalar_size, window_size, vk.g.into_group()); - let g_mul: Vec = FixedBase::msm(scalar_size, window_size, &g_table, point); + let g_mul = vk.g.into_group().batch_mul(point); let pairing_lefts: Vec<_> = (0..vk.nv) .map(|i| vk.g_mask_random[i].into_group() - &g_mul[i]) diff --git a/poly-commit/src/sonic_pc/mod.rs b/poly-commit/src/sonic_pc/mod.rs index b989b323..caf9b79c 100644 --- a/poly-commit/src/sonic_pc/mod.rs +++ b/poly-commit/src/sonic_pc/mod.rs @@ -2,7 +2,7 @@ use crate::{kzg10, PCCommitterKey, CHALLENGE_SIZE}; use crate::{BTreeMap, BTreeSet, String, ToString, Vec}; use crate::{BatchLCProof, DenseUVPolynomial, Error, Evaluations, QuerySet}; use crate::{LabeledCommitment, LabeledPolynomial, LinearCombination}; -use crate::{PCRandomness, PCUniversalParams, PolynomialCommitment}; +use crate::{PCCommitmentState, PCUniversalParams, PolynomialCommitment}; use ark_ec::AffineRepr; use ark_ec::CurveGroup; @@ -12,7 +12,6 @@ use ark_std::rand::RngCore; use ark_std::{convert::TryInto, marker::PhantomData, ops::Div, ops::Mul, vec}; mod data_structures; -use crate::challenge::ChallengeGenerator; use ark_crypto_primitives::sponge::CryptographicSponge; pub use data_structures::*; @@ -47,12 +46,12 @@ where point: P::Point, values: impl IntoIterator, proof: &kzg10::Proof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, randomizer: Option, ) { let acc_time = start_timer!(|| "Accumulating elements"); - let mut curr_challenge = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + let mut curr_challenge = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; // Keeps track of running combination of values let mut combined_values = E::ScalarField::zero(); @@ -73,7 +72,7 @@ where // Accumulate values in the BTreeMap *combined_comms.entry(degree_bound).or_insert(E::G1::zero()) += &comm_with_challenge; - curr_challenge = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + curr_challenge = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; } // Push expected results into list of elems. Power will be the negative of the expected power @@ -146,7 +145,7 @@ where type CommitterKey = CommitterKey; type VerifierKey = VerifierKey; type Commitment = Commitment; - type Randomness = Randomness; + type CommitmentState = Randomness; type Proof = kzg10::Proof; type BatchProof = Vec; type Error = Error; @@ -281,7 +280,7 @@ where ) -> Result< ( Vec>, - Vec, + Vec, ), Self::Error, > @@ -291,7 +290,7 @@ where let rng = &mut crate::optional_rng::OptionalRng(rng); let commit_time = start_timer!(|| "Committing to polynomials"); let mut labeled_comms: Vec> = Vec::new(); - let mut randomness: Vec = Vec::new(); + let mut randomness: Vec = Vec::new(); for labeled_polynomial in polynomials { let enforced_degree_bounds: Option<&[usize]> = ck @@ -345,21 +344,21 @@ where labeled_polynomials: impl IntoIterator>, _commitments: impl IntoIterator>, point: &'a P::Point, - opening_challenges: &mut ChallengeGenerator, - rands: impl IntoIterator, + sponge: &mut S, + states: impl IntoIterator, _rng: Option<&mut dyn RngCore>, ) -> Result where - Self::Randomness: 'a, + Self::CommitmentState: 'a, Self::Commitment: 'a, P: 'a, { let mut combined_polynomial = P::zero(); let mut combined_rand = kzg10::Randomness::empty(); - let mut curr_challenge = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + let mut curr_challenge = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; - for (polynomial, rand) in labeled_polynomials.into_iter().zip(rands) { + for (polynomial, state) in labeled_polynomials.into_iter().zip(states) { let enforced_degree_bounds: Option<&[usize]> = ck .enforced_degree_bounds .as_ref() @@ -373,8 +372,8 @@ where )?; combined_polynomial += (curr_challenge, polynomial.polynomial()); - combined_rand += (curr_challenge, rand); - curr_challenge = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + combined_rand += (curr_challenge, state); + curr_challenge = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; } let proof_time = start_timer!(|| "Creating proof for polynomials"); @@ -390,7 +389,7 @@ where point: &'a P::Point, values: impl IntoIterator, proof: &Self::Proof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, _rng: Option<&mut dyn RngCore>, ) -> Result where @@ -410,7 +409,7 @@ where *point, values, proof, - opening_challenges, + sponge, None, ); @@ -430,7 +429,7 @@ where query_set: &QuerySet, values: &Evaluations, proof: &Self::BatchProof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, rng: &mut R, ) -> Result where @@ -481,7 +480,7 @@ where *point, values_to_combine.into_iter(), p, - opening_challenges, + sponge, Some(randomizer), ); @@ -502,24 +501,24 @@ where polynomials: impl IntoIterator>, commitments: impl IntoIterator>, query_set: &QuerySet, - opening_challenges: &mut ChallengeGenerator, - rands: impl IntoIterator, + sponge: &mut S, + states: impl IntoIterator, rng: Option<&mut dyn RngCore>, ) -> Result, Self::Error> where - Self::Randomness: 'a, + Self::CommitmentState: 'a, Self::Commitment: 'a, P: 'a, { let label_map = polynomials .into_iter() - .zip(rands) + .zip(states) .zip(commitments) - .map(|((p, r), c)| (p.label(), (p, r, c))) + .map(|((p, s), c)| (p.label(), (p, s, c))) .collect::>(); let mut lc_polynomials = Vec::new(); - let mut lc_randomness = Vec::new(); + let mut lc_states = Vec::new(); let mut lc_commitments = Vec::new(); let mut lc_info = Vec::new(); @@ -528,13 +527,13 @@ where let mut poly = P::zero(); let mut degree_bound = None; let mut hiding_bound = None; - let mut randomness = Self::Randomness::empty(); + let mut state = Self::CommitmentState::empty(); let mut comm = E::G1::zero(); let num_polys = lc.len(); for (coeff, label) in lc.iter().filter(|(_, l)| !l.is_one()) { let label: &String = label.try_into().expect("cannot be one!"); - let &(cur_poly, cur_rand, curr_comm) = + let &(cur_poly, cur_state, curr_comm) = label_map.get(label).ok_or(Error::MissingPolynomial { label: label.to_string(), })?; @@ -553,14 +552,14 @@ where // Some(_) > None, always. hiding_bound = core::cmp::max(hiding_bound, cur_poly.hiding_bound()); poly += (*coeff, cur_poly.polynomial()); - randomness += (*coeff, cur_rand); + state += (*coeff, cur_state); comm += &curr_comm.commitment().0.mul(*coeff); } let lc_poly = LabeledPolynomial::new(lc_label.clone(), poly, degree_bound, hiding_bound); lc_polynomials.push(lc_poly); - lc_randomness.push(randomness); + lc_states.push(state); lc_commitments.push(comm); lc_info.push((lc_label, degree_bound)); } @@ -581,8 +580,8 @@ where lc_polynomials.iter(), lc_commitments.iter(), &query_set, - opening_challenges, - lc_randomness.iter(), + sponge, + lc_states.iter(), rng, )?; Ok(BatchLCProof { proof, evals: None }) @@ -597,7 +596,7 @@ where eqn_query_set: &QuerySet, eqn_evaluations: &Evaluations, proof: &BatchLCProof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, rng: &mut R, ) -> Result where @@ -666,7 +665,7 @@ where &eqn_query_set, &evaluations, proof, - opening_challenges, + sponge, rng, ) } diff --git a/poly-commit/src/streaming_kzg/data_structures.rs b/poly-commit/src/streaming_kzg/data_structures.rs index 7adaf005..c8b19c83 100644 --- a/poly-commit/src/streaming_kzg/data_structures.rs +++ b/poly-commit/src/streaming_kzg/data_structures.rs @@ -141,7 +141,7 @@ where /// Stream implementation of foleded polynomial. #[derive(Clone, Copy)] -pub struct FoldedPolynomialStream<'a, F, S>(FoldedPolynomialTree<'a, F, S>, usize); +pub struct FoldedPolynomialStream<'a, F, S>(FoldedPolynomialTree<'a, F, S>); /// Iterator implementation of foleded polynomial. pub struct FoldedPolynomialStreamIter<'a, F, I> { challenges: &'a [F], @@ -158,8 +158,7 @@ where /// Initialize a new folded polynomial stream. pub fn new(coefficients: &'a S, challenges: &'a [F]) -> Self { let tree = FoldedPolynomialTree::new(coefficients, challenges); - let len = challenges.len(); - Self(tree, len) + Self(tree) } } @@ -241,7 +240,7 @@ fn test_folded_polynomial() { let challenges = vec![F::one(), two]; let coefficients_stream = coefficients.as_slice(); let foldstream = FoldedPolynomialTree::new(&coefficients_stream, challenges.as_slice()); - let fold_stream = FoldedPolynomialStream(foldstream, 2); + let fold_stream = FoldedPolynomialStream(foldstream); assert_eq!(fold_stream.len(), 1); assert_eq!( fold_stream.iter().next(), @@ -253,7 +252,7 @@ fn test_folded_polynomial() { let challenges = vec![F::one(); 4]; let coefficients_stream = coefficients.as_slice(); let foldstream = FoldedPolynomialTree::new(&coefficients_stream, challenges.as_slice()); - let fold_stream = FoldedPolynomialStream(foldstream, 4).iter(); + let fold_stream = FoldedPolynomialStream(foldstream).iter(); assert_eq!(fold_stream.last(), Some(coefficients.iter().sum())); } diff --git a/poly-commit/src/streaming_kzg/time.rs b/poly-commit/src/streaming_kzg/time.rs index 8c7fa2f8..b8d52093 100644 --- a/poly-commit/src/streaming_kzg/time.rs +++ b/poly-commit/src/streaming_kzg/time.rs @@ -1,9 +1,9 @@ //! An impementation of a time-efficient version of Kate et al's polynomial commitment, //! with optimization from [\[BDFG20\]](https://eprint.iacr.org/2020/081.pdf). use ark_ec::pairing::Pairing; -use ark_ec::scalar_mul::fixed_base::FixedBase; +use ark_ec::scalar_mul::ScalarMul; use ark_ec::CurveGroup; -use ark_ff::{PrimeField, Zero}; +use ark_ff::Zero; use ark_poly::{univariate::DensePolynomial, DenseUVPolynomial}; use ark_std::{borrow::Borrow, ops::Div, ops::Mul, rand::RngCore, vec::Vec, UniformRand}; @@ -50,11 +50,7 @@ impl CommitterKey { let powers_of_tau = powers(tau, max_degree + 1); let g = E::G1::rand(rng); - let window_size = FixedBase::get_mul_window_size(max_degree + 1); - let scalar_bits = E::ScalarField::MODULUS_BIT_SIZE as usize; - let g_table = FixedBase::get_window_table(scalar_bits, window_size, g); - let powers_of_g_proj = FixedBase::msm(scalar_bits, window_size, &g_table, &powers_of_tau); - let powers_of_g = E::G1::normalize_batch(&powers_of_g_proj); + let powers_of_g = g.batch_mul(&powers_of_tau); let g2 = E::G2::rand(rng).into_affine(); let powers_of_g2 = powers_of_tau diff --git a/poly-commit/src/utils.rs b/poly-commit/src/utils.rs index 5606c6b0..9f731a0b 100644 --- a/poly-commit/src/utils.rs +++ b/poly-commit/src/utils.rs @@ -1,4 +1,6 @@ -use core::marker::PhantomData; +use ark_ff::Field; +use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; +use ark_std::vec::Vec; #[cfg(feature = "parallel")] use rayon::{ @@ -6,13 +8,6 @@ use rayon::{ prelude::IndexedParallelIterator, }; -use ark_ff::{Field, PrimeField}; -use ark_serialize::CanonicalSerialize; -use ark_std::vec::Vec; -use merlin::Transcript; - -use crate::Error; - /// Takes as input a struct, and converts them to a series of bytes. All traits /// that implement `CanonicalSerialize` can be automatically converted to bytes /// in this manner. @@ -31,7 +26,8 @@ pub(crate) fn ceil_div(x: usize, y: usize) -> usize { (x + y - 1) / y } -#[derive(Debug)] +#[derive(Derivative, CanonicalSerialize, CanonicalDeserialize)] +#[derivative(Default(bound = ""), Clone(bound = ""), Debug(bound = ""))] pub struct Matrix { pub(crate) n: usize, pub(crate) m: usize, @@ -140,86 +136,6 @@ pub(crate) fn inner_product(v1: &[F], v2: &[F]) -> F { .sum() } -/// The following struct is taken from jellyfish repository. Once they change -/// their dependency on `crypto-primitive`, we use their crate instead of -/// a copy-paste. We needed the newer `crypto-primitive` for serializing. -#[derive(Clone)] -pub(crate) struct IOPTranscript { - transcript: Transcript, - is_empty: bool, - #[doc(hidden)] - phantom: PhantomData, -} - -// TODO: merge this with jf_plonk::transcript -impl IOPTranscript { - /// Create a new IOP transcript. - pub(crate) fn new(label: &'static [u8]) -> Self { - Self { - transcript: Transcript::new(label), - is_empty: true, - phantom: PhantomData, - } - } - - /// Append the message to the transcript. - pub(crate) fn append_message(&mut self, label: &'static [u8], msg: &[u8]) -> Result<(), Error> { - self.transcript.append_message(label, msg); - self.is_empty = false; - Ok(()) - } - - /// Append the message to the transcript. - pub(crate) fn append_serializable_element( - &mut self, - label: &'static [u8], - group_elem: &S, - ) -> Result<(), Error> { - self.append_message( - label, - &to_bytes!(group_elem).map_err(|_| Error::TranscriptError)?, - ) - } - - /// Generate the challenge from the current transcript - /// and append it to the transcript. - /// - /// The output field element is statistical uniform as long - /// as the field has a size less than 2^384. - pub(crate) fn get_and_append_challenge(&mut self, label: &'static [u8]) -> Result { - // we need to reject when transcript is empty - if self.is_empty { - return Err(Error::TranscriptError); - } - - let mut buf = [0u8; 64]; - self.transcript.challenge_bytes(label, &mut buf); - let challenge = F::from_le_bytes_mod_order(&buf); - self.append_serializable_element(label, &challenge)?; - Ok(challenge) - } - - /// Generate the challenge from the current transcript - /// and append it to the transcript. - /// - /// Without exposing the internal field `transcript`, - /// this is a wrapper around getting bytes as opposed to field elements. - pub(crate) fn get_and_append_byte_challenge( - &mut self, - label: &'static [u8], - dest: &mut [u8], - ) -> Result<(), Error> { - // we need to reject when transcript is empty - if self.is_empty { - return Err(Error::TranscriptError); - } - - self.transcript.challenge_bytes(label, dest); - self.append_message(label, dest)?; - Ok(()) - } -} - #[inline] #[cfg(test)] pub(crate) fn to_field(v: Vec) -> Vec { @@ -229,6 +145,8 @@ pub(crate) fn to_field(v: Vec) -> Vec { // TODO: replace by https://github.com/arkworks-rs/crypto-primitives/issues/112. #[cfg(test)] use ark_crypto_primitives::sponge::poseidon::PoseidonSponge; +#[cfg(test)] +use ark_ff::PrimeField; #[cfg(test)] pub(crate) fn test_sponge() -> PoseidonSponge { From 1f988ac941243fda75a1870a8a5ce6fd6693cef0 Mon Sep 17 00:00:00 2001 From: Hossein Moghaddas Date: Thu, 18 Jan 2024 15:54:38 +0100 Subject: [PATCH 47/75] Add a few comments and update `Cargo.toml` --- Cargo.toml | 6 +++--- bench-templates/src/lib.rs | 9 ++++++--- poly-commit/Cargo.toml | 1 - 3 files changed, 9 insertions(+), 7 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 617fd8a1..bde57bf6 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -36,6 +36,6 @@ ark-poly = { git = "https://github.com/arkworks-rs/algebra/" } ark-crypto-primitives = { git = "https://github.com/arkworks-rs/crypto-primitives" } ark-r1cs-std = { git = "https://github.com/arkworks-rs/r1cs-std/" } -ark-bls12-377 = { git = "https://github.com/arkworks-rs/curves/" } -ark-bls12-381 = { git = "https://github.com/arkworks-rs/curves/" } -ark-bn254 = { git = "https://github.com/arkworks-rs/curves/" } +ark-bls12-377 = { git = "https://github.com/arkworks-rs/algebra/" } +ark-bls12-381 = { git = "https://github.com/arkworks-rs/algebra/" } +ark-bn254 = { git = "https://github.com/arkworks-rs/algebra/" } diff --git a/bench-templates/src/lib.rs b/bench-templates/src/lib.rs index 8a656589..952a36e4 100644 --- a/bench-templates/src/lib.rs +++ b/bench-templates/src/lib.rs @@ -16,14 +16,17 @@ use ark_poly_commit::{LabeledPolynomial, PolynomialCommitment}; pub use criterion::*; pub use paste::paste; -/// Measure the time cost of {commit/open/verify} across a range of num_vars +/// Measure the time cost of `method` (i.e., commit/open/verify) of a +/// multilinear PCS for all `num_vars` specified in `nv_list`. +/// `rand_poly` is a function that outputs a random multilinear polynomial. +/// `rand_point` is a function that outputs a random point in the domain of polynomial. pub fn bench_pcs_method< F: PrimeField, P: Polynomial, PCS: PolynomialCommitment>, >( c: &mut Criterion, - range: Vec, + nv_list: Vec, msg: &str, method: impl Fn( &PCS::CommitterKey, @@ -38,7 +41,7 @@ pub fn bench_pcs_method< let mut group = c.benchmark_group(msg); let rng = &mut ChaCha20Rng::from_rng(test_rng()).unwrap(); - for num_vars in range { + for num_vars in nv_list { let pp = PCS::setup(num_vars, Some(num_vars), rng).unwrap(); let (ck, vk) = PCS::trim(&pp, num_vars, num_vars, None).unwrap(); diff --git a/poly-commit/Cargo.toml b/poly-commit/Cargo.toml index 57c68b85..c7a28579 100644 --- a/poly-commit/Cargo.toml +++ b/poly-commit/Cargo.toml @@ -16,7 +16,6 @@ ark-poly = {version = "^0.4.0", default-features = false } ark-crypto-primitives = {version = "^0.4.0", default-features = false, features = ["sponge", "merkle_tree"] } ark-std = { version = "^0.4.0", default-features = false } blake2 = { version = "0.10", default-features = false } -merlin = { version = "3.0.0", default-features = false } rand = { version = "0.8.0", optional = true } ark-relations = { version = "^0.4.0", default-features = false, optional = true } ark-r1cs-std = { version = "^0.4.0", default-features = false, optional = true } From 5677c5b43eab8e36dc47c14105132468decca982 Mon Sep 17 00:00:00 2001 From: Hossein Moghaddas Date: Mon, 22 Jan 2024 22:33:15 +0100 Subject: [PATCH 48/75] Remove extra `cfg_iter!` Co-authored-by: Pratyush Mishra --- poly-commit/src/hyrax/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/poly-commit/src/hyrax/mod.rs b/poly-commit/src/hyrax/mod.rs index d5536137..51b7df64 100644 --- a/poly-commit/src/hyrax/mod.rs +++ b/poly-commit/src/hyrax/mod.rs @@ -376,7 +376,7 @@ where // t_prime coincides witht he Pedersen commitment to lt with the // randomnes r_lt computed here let r_lt = cfg_iter!(l) - .zip(cfg_iter!(state.randomness)) + .zip(&state.randomness) .map(|(l, r)| *l * r) .sum::(); From c2e64125c00d72154f8aca1e4ee75680d0e5569c Mon Sep 17 00:00:00 2001 From: Hossein Moghaddas Date: Tue, 23 Jan 2024 00:38:18 +0100 Subject: [PATCH 49/75] Change `pedersen_commit` and add `cfg_into_iter!` --- poly-commit/src/hyrax/mod.rs | 80 +++++++++++++--------------------- poly-commit/src/hyrax/utils.rs | 6 +-- poly-commit/src/utils.rs | 18 ++------ 3 files changed, 37 insertions(+), 67 deletions(-) diff --git a/poly-commit/src/hyrax/mod.rs b/poly-commit/src/hyrax/mod.rs index 51b7df64..2bf5923d 100644 --- a/poly-commit/src/hyrax/mod.rs +++ b/poly-commit/src/hyrax/mod.rs @@ -1,5 +1,4 @@ use crate::hyrax::utils::tensor_prime; -use crate::to_bytes; use crate::utils::{inner_product, scalar_by_vector, vector_sum, Matrix}; use crate::{ hyrax::utils::flat_to_matrix_column_major, Error, LabeledCommitment, LabeledPolynomial, @@ -9,6 +8,7 @@ use ark_crypto_primitives::sponge::{Absorb, CryptographicSponge}; use ark_ec::{AffineRepr, CurveGroup, VariableBaseMSM}; use ark_ff::PrimeField; use ark_poly::MultilinearExtension; +use ark_serialize::serialize_to_vec; use ark_std::{marker::PhantomData, rand::RngCore, string::ToString, vec::Vec, UniformRand}; use blake2::Blake2s256; use digest::Digest; @@ -80,42 +80,19 @@ where { /// Pedersen commitment to a vector of scalars as described in appendix A.1 /// of the reference article. - /// The caller must either directly pass hiding exponent `r` inside Some, - /// or provide an rng so that `r` can be sampled. - /// If there are `n` scalars, the first `n` elements of the key will be - /// multiplied by them in the same order, and its `n + 1`th element will be - /// multiplied by `r`. + /// The function does not add handle hiding term `h * r`. + /// It is only a wrapper around MSM. /// /// # Panics /// - /// Panics if both `r` and `rng` are None. - fn pedersen_commit( - key: &HyraxCommitterKey, - scalars: &[G::ScalarField], - r: Option, - rng: Option<&mut dyn RngCore>, - ) -> (G, G::ScalarField) { - // Cannot use unwrap_or, since its argument is always evaluated - let r = match r { - Some(v) => v, - None => G::ScalarField::rand(rng.expect("Either r or rng must be provided")), - }; - - let mut scalars_ext = Vec::from(scalars); - scalars_ext.push(r); - - // Trimming the key to the length of the coefficient vector - let mut points_ext = key.com_key[0..scalars.len()].to_vec(); - points_ext.push(key.h); - + /// Panics if `key` and `scalars` do not have the same length + fn pedersen_commit(key: &[G], scalars: &[G::ScalarField]) -> G::Group { + assert_eq!(key.len(), scalars.len()); let scalars_bigint = ark_std::cfg_iter!(scalars) .map(|s| s.into_bigint()) .collect::>(); - // Multi-exponentiation in the group of points of the EC - let com = ::msm_bigint(&points_ext, &scalars_bigint); - - (com.into(), r) + ::msm_bigint(&key, &scalars_bigint) } } @@ -260,10 +237,10 @@ where let (row_coms, com_rands): (Vec<_>, Vec<_>) = cfg_iter!(m) .map(|row| { #[cfg(not(feature = "parallel"))] - let (c, r) = Self::pedersen_commit(ck, row, None, Some(rng_inner)); + let r = G::ScalarField::rand(rng_inner); #[cfg(feature = "parallel")] - let (c, r) = - Self::pedersen_commit(ck, row, None, Some(&mut rand::thread_rng())); + let r = G::ScalarField::rand(&mut rand::thread_rng()); + let c = (Self::pedersen_commit(&ck.com_key, row) + ck.h * r).into(); (c, r) }) .unzip(); @@ -360,10 +337,10 @@ where } // Absorbing public parameters - sponge.absorb(&to_bytes!(ck).map_err(|_| Error::TranscriptError)?); + sponge.absorb(&serialize_to_vec!(*ck).map_err(|_| Error::TranscriptError)?); // Absorbing the commitment to the polynomial - sponge.absorb(&to_bytes!(&com.row_coms).map_err(|_| Error::TranscriptError)?); + sponge.absorb(&serialize_to_vec!(com.row_coms).map_err(|_| Error::TranscriptError)?); // Absorbing the point sponge.absorb(point); @@ -383,7 +360,10 @@ where let eval = inner_product(<, &r); // Singleton commit - let (com_eval, r_eval) = Self::pedersen_commit(ck, &[eval], None, Some(rng_inner)); + let (com_eval, r_eval) = { + let r = G::ScalarField::rand(rng_inner); + ((ck.com_key[0] * eval + ck.h * r).into(), r) + }; // ******** Dot product argument ******** // Appendix A.2 in the reference article @@ -394,17 +374,19 @@ where let b = inner_product(&r, &d); // Multi-commit - let (com_d, r_d) = Self::pedersen_commit(ck, &d, None, Some(rng_inner)); + let r_d = G::ScalarField::rand(rng_inner); + let com_d = (Self::pedersen_commit(&ck.com_key, &d) + ck.h * r_d).into(); // Singleton commit - let (com_b, r_b) = Self::pedersen_commit(ck, &[b], None, Some(rng_inner)); + let r_b = G::ScalarField::rand(rng_inner); + let com_b = (ck.com_key[0] * b + ck.h * r_b).into(); // Absorbing the commitment to the evaluation - sponge.absorb(&to_bytes!(&com_eval).map_err(|_| Error::TranscriptError)?); + sponge.absorb(&serialize_to_vec!(com_eval).map_err(|_| Error::TranscriptError)?); // Absorbing the two auxiliary commitments - sponge.absorb(&to_bytes!(&com_d).map_err(|_| Error::TranscriptError)?); - sponge.absorb(&to_bytes!(&com_b).map_err(|_| Error::TranscriptError)?); + sponge.absorb(&serialize_to_vec!(com_d).map_err(|_| Error::TranscriptError)?); + sponge.absorb(&serialize_to_vec!(com_b).map_err(|_| Error::TranscriptError)?); // Receive the random challenge c from the verifier, i.e. squeeze // it from the transcript. @@ -493,36 +475,36 @@ where let l_bigint = cfg_iter!(l) .map(|chi| chi.into_bigint()) .collect::>(); - let t_prime: G = ::msm_bigint(row_coms, &l_bigint).into(); + let t_prime: G = ::msm_bigint(&row_coms, &l_bigint).into(); // Absorbing public parameters - sponge.absorb(&to_bytes!(vk).map_err(|_| Error::TranscriptError)?); + sponge.absorb(&serialize_to_vec!(*vk).map_err(|_| Error::TranscriptError)?); // Absorbing the commitment to the polynomial - sponge.absorb(&to_bytes!(row_coms).map_err(|_| Error::TranscriptError)?); + sponge.absorb(&serialize_to_vec!(*row_coms).map_err(|_| Error::TranscriptError)?); // Absorbing the point sponge.absorb(point); // Absorbing the commitment to the evaluation - sponge.absorb(&to_bytes!(com_eval).map_err(|_| Error::TranscriptError)?); + sponge.absorb(&serialize_to_vec!(*com_eval).map_err(|_| Error::TranscriptError)?); // Absorbing the two auxiliary commitments - sponge.absorb(&to_bytes!(com_d).map_err(|_| Error::TranscriptError)?); - sponge.absorb(&to_bytes!(com_b).map_err(|_| Error::TranscriptError)?); + sponge.absorb(&serialize_to_vec!(*com_d).map_err(|_| Error::TranscriptError)?); + sponge.absorb(&serialize_to_vec!(*com_b).map_err(|_| Error::TranscriptError)?); // Receive the random challenge c from the verifier, i.e. squeeze // it from the transcript. let c: G::ScalarField = sponge.squeeze_field_elements(1)[0]; // First check - let com_z_zd = Self::pedersen_commit(vk, z, Some(*z_d), None).0; + let com_z_zd = (Self::pedersen_commit(&vk.com_key, z) + vk.h * z_d).into(); if com_z_zd != (t_prime.mul(c) + com_d).into() { return Ok(false); } // Second check - let com_dp = Self::pedersen_commit(vk, &[inner_product(&r, z)], Some(*z_b), None).0; + let com_dp = (vk.com_key[0] * inner_product(&r, z) + vk.h * z_b).into(); if com_dp != (com_eval.mul(c) + com_b).into() { return Ok(false); } diff --git a/poly-commit/src/hyrax/utils.rs b/poly-commit/src/hyrax/utils.rs index 69642e44..74879a9e 100644 --- a/poly-commit/src/hyrax/utils.rs +++ b/poly-commit/src/hyrax/utils.rs @@ -4,10 +4,10 @@ use ark_std::vec::Vec; #[cfg(feature = "parallel")] use rayon::prelude::*; -/// Transforms a flat vector into a matrix in column-major order. The latter is -/// given as a list of rows. +/// Transforms a flat vector into a n*m matrix in column-major order. The +/// latter is given as a list of rows. /// -/// For example, if flat = [1, 2, 3, 4, 5, 6] and n = 2, m = 3, then +/// For example, if flat = [1, 2, 3, 4, 5, 6] and n = 3, m = 2, then /// the output is [[1, 3, 5], [2, 4, 6]]. pub(crate) fn flat_to_matrix_column_major(flat: &[T], n: usize, m: usize) -> Vec> { assert_eq!(flat.len(), n * m, "n * m should coincide with flat.len()"); diff --git a/poly-commit/src/utils.rs b/poly-commit/src/utils.rs index f06ebf96..fcb62ad9 100644 --- a/poly-commit/src/utils.rs +++ b/poly-commit/src/utils.rs @@ -4,22 +4,10 @@ use ark_std::vec::Vec; #[cfg(feature = "parallel")] use rayon::{ - iter::{IntoParallelRefIterator, ParallelIterator}, + iter::{IntoParallelIterator, IntoParallelRefIterator, ParallelIterator}, prelude::IndexedParallelIterator, }; -/// Takes as input a struct, and converts them to a series of bytes. All traits -/// that implement `CanonicalSerialize` can be automatically converted to bytes -/// in this manner. -/// From jellyfish lib -#[macro_export] -macro_rules! to_bytes { - ($x:expr) => {{ - let mut buf = ark_std::vec![]; - ark_serialize::CanonicalSerialize::serialize_compressed($x, &mut buf).map(|_| buf) - }}; -} - /// Return ceil(x / y). pub(crate) fn ceil_div(x: usize, y: usize) -> usize { // XXX. warning: this expression can overflow. @@ -70,11 +58,11 @@ impl Matrix { self.n ); - (0..self.m) + cfg_into_iter!(0..self.m) .map(|col| { inner_product( v, - &(0..self.n) + &cfg_into_iter!(0..self.m) .map(|row| self.entries[row][col]) .collect::>(), ) From ac4a14c5e7a3486d845229e0ef529ed64457648e Mon Sep 17 00:00:00 2001 From: Hossein Moghaddas Date: Tue, 23 Jan 2024 01:11:39 +0100 Subject: [PATCH 50/75] Hash and absorb --- poly-commit/src/hyrax/mod.rs | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/poly-commit/src/hyrax/mod.rs b/poly-commit/src/hyrax/mod.rs index 2bf5923d..21e66cb6 100644 --- a/poly-commit/src/hyrax/mod.rs +++ b/poly-commit/src/hyrax/mod.rs @@ -337,7 +337,10 @@ where } // Absorbing public parameters - sponge.absorb(&serialize_to_vec!(*ck).map_err(|_| Error::TranscriptError)?); + sponge.absorb( + &Blake2s256::digest(serialize_to_vec!(*ck).map_err(|_| Error::TranscriptError)?) + .as_slice(), + ); // Absorbing the commitment to the polynomial sponge.absorb(&serialize_to_vec!(com.row_coms).map_err(|_| Error::TranscriptError)?); @@ -478,7 +481,10 @@ where let t_prime: G = ::msm_bigint(&row_coms, &l_bigint).into(); // Absorbing public parameters - sponge.absorb(&serialize_to_vec!(*vk).map_err(|_| Error::TranscriptError)?); + sponge.absorb( + &Blake2s256::digest(serialize_to_vec!(*vk).map_err(|_| Error::TranscriptError)?) + .as_slice(), + ); // Absorbing the commitment to the polynomial sponge.absorb(&serialize_to_vec!(*row_coms).map_err(|_| Error::TranscriptError)?); From d6a709e2c606d3c0e4624e75f1a0e08c1b27a14f Mon Sep 17 00:00:00 2001 From: mmagician Date: Fri, 26 Jan 2024 13:30:06 +0100 Subject: [PATCH 51/75] BrakedownPCSParams need to be exported publicly --- poly-commit/src/linear_codes/mod.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/poly-commit/src/linear_codes/mod.rs b/poly-commit/src/linear_codes/mod.rs index d0b8f90b..30b524c0 100644 --- a/poly-commit/src/linear_codes/mod.rs +++ b/poly-commit/src/linear_codes/mod.rs @@ -29,6 +29,7 @@ pub use multilinear_brakedown::MultilinearBrakedown; mod brakedown; mod data_structures; +pub use data_structures::BrakedownPCParams; use data_structures::*; pub use data_structures::LinCodePCProof; From ddb1cc54fb2308174741b9c7d0588de64ee11b25 Mon Sep 17 00:00:00 2001 From: Marcin Date: Wed, 31 Jan 2024 11:21:10 +0100 Subject: [PATCH 52/75] only enable num-traits on aarch (#58) --- poly-commit/src/linear_codes/brakedown.rs | 2 +- poly-commit/src/linear_codes/utils.rs | 2 +- poly-commit/src/utils.rs | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/poly-commit/src/linear_codes/brakedown.rs b/poly-commit/src/linear_codes/brakedown.rs index 6afc7c8d..722e1338 100644 --- a/poly-commit/src/linear_codes/brakedown.rs +++ b/poly-commit/src/linear_codes/brakedown.rs @@ -12,7 +12,7 @@ use ark_ff::PrimeField; use ark_std::log2; use ark_std::rand::RngCore; use ark_std::vec::Vec; -#[cfg(not(feature = "std"))] +#[cfg(all(not(feature = "std"), target_arch = "aarch64"))] use num_traits::Float; impl PCUniversalParams for BrakedownPCParams diff --git a/poly-commit/src/linear_codes/utils.rs b/poly-commit/src/linear_codes/utils.rs index df2dc1cc..292bd066 100644 --- a/poly-commit/src/linear_codes/utils.rs +++ b/poly-commit/src/linear_codes/utils.rs @@ -5,7 +5,7 @@ use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; use ark_std::string::ToString; use ark_std::vec::Vec; -#[cfg(not(feature = "std"))] +#[cfg(all(not(feature = "std"), target_arch = "aarch64"))] use num_traits::Float; #[cfg(test)] diff --git a/poly-commit/src/utils.rs b/poly-commit/src/utils.rs index 3b2a336f..a26cc0c7 100644 --- a/poly-commit/src/utils.rs +++ b/poly-commit/src/utils.rs @@ -1,4 +1,4 @@ -#[cfg(not(feature = "std"))] +#[cfg(all(not(feature = "std"), target_arch = "aarch64"))] use num_traits::Float; #[cfg(feature = "parallel")] From 265e261101f0b03e2a4a1ba99948d71863f9d6e6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Antonio=20Mej=C3=ADas=20Gil?= Date: Mon, 3 Jun 2024 14:49:06 +0200 Subject: [PATCH 53/75] added Sync trait bound Co-authored-by: Cesar Descalzo --- poly-commit/src/linear_codes/mod.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/poly-commit/src/linear_codes/mod.rs b/poly-commit/src/linear_codes/mod.rs index b31595c5..ea5827e8 100644 --- a/poly-commit/src/linear_codes/mod.rs +++ b/poly-commit/src/linear_codes/mod.rs @@ -85,7 +85,8 @@ where type LinCodePCParams: PCUniversalParams + PCCommitterKey + PCVerifierKey - + LinCodeParametersInfo; + + LinCodeParametersInfo + + Sync; /// Does a default setup for the PCS. fn setup( From a5cf4cfd80477bd29e725a14fcc9a3ab9b9c01a9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Antonio=20Mej=C3=ADas=20Gil?= Date: Mon, 3 Jun 2024 15:53:50 +0200 Subject: [PATCH 54/75] removed TODO --- poly-commit/src/linear_codes/mod.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/poly-commit/src/linear_codes/mod.rs b/poly-commit/src/linear_codes/mod.rs index ea5827e8..e52feaa5 100644 --- a/poly-commit/src/linear_codes/mod.rs +++ b/poly-commit/src/linear_codes/mod.rs @@ -413,7 +413,6 @@ where }; // 1. Seed the transcript with the point and the recieved vector - // TODO Consider removing the evaluation point from the transcript. let point_vec = L::point_to_vec(point.clone()); sponge.absorb(&point_vec); sponge.absorb(&proof.opening.v); From 8d356dbc24bc47201133a6c09e4f09d007dfb0ed Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Antonio=20Mej=C3=ADas=20Gil?= Date: Mon, 3 Jun 2024 16:13:46 +0200 Subject: [PATCH 55/75] Fixed error whereby boolean value returned by path.verify was neglected Co-authored-by: Cesar Descalzo Co-authored-by: mmagician --- poly-commit/src/linear_codes/mod.rs | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/poly-commit/src/linear_codes/mod.rs b/poly-commit/src/linear_codes/mod.rs index e52feaa5..0fc60b9d 100644 --- a/poly-commit/src/linear_codes/mod.rs +++ b/poly-commit/src/linear_codes/mod.rs @@ -442,8 +442,12 @@ where return Err(Error::InvalidCommitment); } - path.verify(leaf_hash_param, two_to_one_hash_param, root, leaf.clone()) - .map_err(|_| Error::InvalidCommitment)?; + if !path + .verify(leaf_hash_param, two_to_one_hash_param, root, leaf.clone()) + .map_err(|_| Error::InvalidCommitment)? + { + return Ok(false); + } } // Helper closure: checks if a.b = c. From 61d9a7612481c9837793ad656ddc811362efaf5c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Antonio=20Mej=C3=ADas=20Gil?= Date: Mon, 3 Jun 2024 16:19:40 +0200 Subject: [PATCH 56/75] removed unnecessary qualification which linter didn't like --- poly-commit/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/poly-commit/src/lib.rs b/poly-commit/src/lib.rs index bcd625a7..5dec236b 100644 --- a/poly-commit/src/lib.rs +++ b/poly-commit/src/lib.rs @@ -273,7 +273,7 @@ pub trait PolynomialCommitment, S: Cryptographic // order to gather (i.e. batch) all polynomials that should be queried at // the same point, then opening their commitments simultaneously with a // single call to `open` (per point) - let rng = &mut crate::optional_rng::OptionalRng(rng); + let rng = &mut optional_rng::OptionalRng(rng); let poly_st_comm: BTreeMap<_, _> = labeled_polynomials .into_iter() .zip(states) From 6c5e09607a28ff1e99203177a2950c267b52b8f5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Antonio=20Mej=C3=ADas=20Gil?= Date: Tue, 4 Jun 2024 11:27:19 +0200 Subject: [PATCH 57/75] changed potential panic to returning Err, stopping early Co-authored-by: Cesar Descalzo --- poly-commit/src/linear_codes/mod.rs | 19 ++++++++----------- 1 file changed, 8 insertions(+), 11 deletions(-) diff --git a/poly-commit/src/linear_codes/mod.rs b/poly-commit/src/linear_codes/mod.rs index 0fc60b9d..eecac5a8 100644 --- a/poly-commit/src/linear_codes/mod.rs +++ b/poly-commit/src/linear_codes/mod.rs @@ -421,17 +421,14 @@ where let indices = get_indices_from_sponge(n_ext_cols, t, sponge)?; // 3. Hash the received columns into leaf hashes. - let col_hashes: Vec = proof - .opening - .columns - .iter() - .map(|c| { - H::evaluate(vk.col_hash_params(), c.clone()) - .map_err(|_| Error::HashingError) - .unwrap() - .into() - }) - .collect(); + let mut col_hashes: Vec = Vec::new(); + + for c in proof.opening.columns.iter() { + match H::evaluate(vk.col_hash_params(), c.clone()) { + Ok(a) => col_hashes.push(a.into()), + Err(_) => return Err(Error::HashingError), + } + } // 4. Verify the paths for each of the leaf hashes - this is only run once, // even if we have a well-formedness check (i.e., we save sending and checking the columns). From 18a3d847719485505e1e7d43bdf6e146711c62dd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Antonio=20Mej=C3=ADas=20Gil?= Date: Tue, 4 Jun 2024 11:34:07 +0200 Subject: [PATCH 58/75] removed unnecessary function defined inside check() Co-authored-by: Cesar Descalzo --- poly-commit/src/linear_codes/mod.rs | 38 ++++++++++------------------- 1 file changed, 13 insertions(+), 25 deletions(-) diff --git a/poly-commit/src/linear_codes/mod.rs b/poly-commit/src/linear_codes/mod.rs index eecac5a8..7859c1af 100644 --- a/poly-commit/src/linear_codes/mod.rs +++ b/poly-commit/src/linear_codes/mod.rs @@ -422,7 +422,7 @@ where // 3. Hash the received columns into leaf hashes. let mut col_hashes: Vec = Vec::new(); - + for c in proof.opening.columns.iter() { match H::evaluate(vk.col_hash_params(), c.clone()) { Ok(a) => col_hashes.push(a.into()), @@ -447,15 +447,6 @@ where } } - // Helper closure: checks if a.b = c. - let check_inner_product = |a, b, c| -> Result<(), Error> { - if inner_product(a, b) != c { - return Err(Error::InvalidCommitment); - } - - Ok(()) - }; - // 5. Compute the encoding w = E(v). let w = L::encode(&proof.opening.v, vk); @@ -468,24 +459,21 @@ where if let (Some(well_formedness), Some(r)) = out { let w_well_formedness = L::encode(well_formedness, vk); for (transcript_index, matrix_index) in indices.iter().enumerate() { - check_inner_product( - &r, - &proof.opening.columns[transcript_index], - w_well_formedness[*matrix_index], - )?; - check_inner_product( - &b, - &proof.opening.columns[transcript_index], - w[*matrix_index], - )?; + if inner_product(&r, &proof.opening.columns[transcript_index]) + != w_well_formedness[*matrix_index] + || inner_product(&b, &proof.opening.columns[transcript_index]) + != w[*matrix_index] + { + return Err(Error::InvalidCommitment); + } } } else { for (transcript_index, matrix_index) in indices.iter().enumerate() { - check_inner_product( - &b, - &proof.opening.columns[transcript_index], - w[*matrix_index], - )?; + if inner_product(&b, &proof.opening.columns[transcript_index]) + != w[*matrix_index] + { + return Err(Error::InvalidCommitment); + } } } From f3495d0c70bf6bf7e2a61b83e4050e74fc2e1ed2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Antonio=20Mej=C3=ADas=20Gil?= Date: Thu, 6 Jun 2024 08:39:07 +0200 Subject: [PATCH 59/75] various minor fixes --- poly-commit/src/hyrax/data_structures.rs | 6 ++---- poly-commit/src/lib.rs | 2 +- poly-commit/src/optional_rng.rs | 8 +++----- poly-commit/src/utils.rs | 2 +- 4 files changed, 7 insertions(+), 11 deletions(-) diff --git a/poly-commit/src/hyrax/data_structures.rs b/poly-commit/src/hyrax/data_structures.rs index aa58b7cf..80d99ff5 100644 --- a/poly-commit/src/hyrax/data_structures.rs +++ b/poly-commit/src/hyrax/data_structures.rs @@ -80,10 +80,8 @@ impl PCCommitment for HyraxCommitment { pub(crate) type HyraxRandomness = Vec; -/// Hyrax Commitment State blah blah blah blah -/// blah blah blah blah -/// blah blah blah blah -/// blah blah blah blah +/// Hyrax Commitment state: matrix of polynomial coefficients and list of random +/// scalars used in each of the row-wise Pedersen commitments #[derive(Derivative, CanonicalSerialize, CanonicalDeserialize)] #[derivative(Default(bound = ""), Clone(bound = ""), Debug(bound = ""))] pub struct HyraxCommitmentState diff --git a/poly-commit/src/lib.rs b/poly-commit/src/lib.rs index 0e1587ee..a2968c9b 100644 --- a/poly-commit/src/lib.rs +++ b/poly-commit/src/lib.rs @@ -278,7 +278,7 @@ pub trait PolynomialCommitment, S: Cryptographic // order to gather (i.e. batch) all polynomials that should be queried at // the same point, then opening their commitments simultaneously with a // single call to `open` (per point) - let rng = &mut crate::optional_rng::OptionalRng(rng); + let rng = &mut optional_rng::OptionalRng(rng); let poly_st_comm: BTreeMap<_, _> = labeled_polynomials .into_iter() .zip(states) diff --git a/poly-commit/src/optional_rng.rs b/poly-commit/src/optional_rng.rs index 44c4fe1c..48d1bf0c 100644 --- a/poly-commit/src/optional_rng.rs +++ b/poly-commit/src/optional_rng.rs @@ -1,4 +1,4 @@ -use ark_std::rand::RngCore; +use ark_std::rand::{Error, RngCore}; use core::num::NonZeroU32; /// `OptionalRng` is a hack that is necessary because `Option<&mut R>` is not implicitly reborrowed @@ -35,12 +35,10 @@ impl RngCore for OptionalRng { } #[inline] - fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), ark_std::rand::Error> { + fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), Error> { match &mut self.0 { Some(r) => r.try_fill_bytes(dest), - None => Err(NonZeroU32::new(ark_std::rand::Error::CUSTOM_START) - .unwrap() - .into()), + None => Err(NonZeroU32::new(Error::CUSTOM_START).unwrap().into()), } } } diff --git a/poly-commit/src/utils.rs b/poly-commit/src/utils.rs index fcb62ad9..85986cbe 100644 --- a/poly-commit/src/utils.rs +++ b/poly-commit/src/utils.rs @@ -62,7 +62,7 @@ impl Matrix { .map(|col| { inner_product( v, - &cfg_into_iter!(0..self.m) + &cfg_into_iter!(0..self.n) .map(|row| self.entries[row][col]) .collect::>(), ) From f66901b08465d254bd725665a5e33044447eb229 Mon Sep 17 00:00:00 2001 From: Hossein Moghaddas Date: Sun, 9 Jun 2024 19:36:01 +0200 Subject: [PATCH 60/75] Add `ark-std` to patch --- Cargo.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/Cargo.toml b/Cargo.toml index 617fd8a1..0532ade1 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -32,6 +32,7 @@ ark-ff = { git = "https://github.com/arkworks-rs/algebra/" } ark-ec = { git = "https://github.com/arkworks-rs/algebra/" } ark-serialize = { git = "https://github.com/arkworks-rs/algebra/" } ark-poly = { git = "https://github.com/arkworks-rs/algebra/" } +ark-std = { git = "https://github.com/arkworks-rs/std/" } ark-crypto-primitives = { git = "https://github.com/arkworks-rs/crypto-primitives" } ark-r1cs-std = { git = "https://github.com/arkworks-rs/r1cs-std/" } From 103669f940c5dad71556acbae6680f3b9e4a1af3 Mon Sep 17 00:00:00 2001 From: Cesar199999 Date: Mon, 10 Jun 2024 10:58:57 +0200 Subject: [PATCH 61/75] Reorder Hyrax checks MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Antonio Mejías Gil --- poly-commit/src/hyrax/mod.rs | 25 +++++++++++++------------ 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/poly-commit/src/hyrax/mod.rs b/poly-commit/src/hyrax/mod.rs index 21e66cb6..5a47e6ca 100644 --- a/poly-commit/src/hyrax/mod.rs +++ b/poly-commit/src/hyrax/mod.rs @@ -474,12 +474,6 @@ where }); } - // Computing t_prime with a multi-exponentiation - let l_bigint = cfg_iter!(l) - .map(|chi| chi.into_bigint()) - .collect::>(); - let t_prime: G = ::msm_bigint(&row_coms, &l_bigint).into(); - // Absorbing public parameters sponge.absorb( &Blake2s256::digest(serialize_to_vec!(*vk).map_err(|_| Error::TranscriptError)?) @@ -503,15 +497,22 @@ where // it from the transcript. let c: G::ScalarField = sponge.squeeze_field_elements(1)[0]; - // First check - let com_z_zd = (Self::pedersen_commit(&vk.com_key, z) + vk.h * z_d).into(); - if com_z_zd != (t_prime.mul(c) + com_d).into() { + // Second check from the paper (figure 6, equation (14)) + // Moved here for potential early return + let com_dp = (vk.com_key[0] * inner_product(&r, z) + vk.h * z_b).into(); + if com_dp != (com_eval.mul(c) + com_b).into() { return Ok(false); } - // Second check - let com_dp = (vk.com_key[0] * inner_product(&r, z) + vk.h * z_b).into(); - if com_dp != (com_eval.mul(c) + com_b).into() { + // Computing t_prime with a multi-exponentiation + let l_bigint = cfg_iter!(l) + .map(|chi| chi.into_bigint()) + .collect::>(); + let t_prime: G = ::msm_bigint(&row_coms, &l_bigint).into(); + + // First check from the paper (figure 6, equation (13)) + let com_z_zd = (Self::pedersen_commit(&vk.com_key, z) + vk.h * z_d).into(); + if com_z_zd != (t_prime.mul(c) + com_d).into() { return Ok(false); } } From eee8e0bdf046078d45d66df0300ee08559d85a80 Mon Sep 17 00:00:00 2001 From: Hossein Moghaddas Date: Sun, 9 Jun 2024 19:36:01 +0200 Subject: [PATCH 62/75] Add `ark-std` to patch --- Cargo.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/Cargo.toml b/Cargo.toml index bde57bf6..1de0e61b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -32,6 +32,7 @@ ark-ff = { git = "https://github.com/arkworks-rs/algebra/" } ark-ec = { git = "https://github.com/arkworks-rs/algebra/" } ark-serialize = { git = "https://github.com/arkworks-rs/algebra/" } ark-poly = { git = "https://github.com/arkworks-rs/algebra/" } +ark-std = { git = "https://github.com/arkworks-rs/std/" } ark-crypto-primitives = { git = "https://github.com/arkworks-rs/crypto-primitives" } ark-r1cs-std = { git = "https://github.com/arkworks-rs/r1cs-std/" } From 65ef67c16c93412a498f56349721caa5d4295954 Mon Sep 17 00:00:00 2001 From: Hossein Moghaddas Date: Mon, 21 Oct 2024 17:53:57 +0200 Subject: [PATCH 63/75] Downgrade `hashbrown` --- poly-commit/Cargo.toml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poly-commit/Cargo.toml b/poly-commit/Cargo.toml index c7a28579..360055c7 100644 --- a/poly-commit/Cargo.toml +++ b/poly-commit/Cargo.toml @@ -15,15 +15,15 @@ ark-ec = { version = "^0.4.0", default-features = false } ark-poly = {version = "^0.4.0", default-features = false } ark-crypto-primitives = {version = "^0.4.0", default-features = false, features = ["sponge", "merkle_tree"] } ark-std = { version = "^0.4.0", default-features = false } -blake2 = { version = "0.10", default-features = false } -rand = { version = "0.8.0", optional = true } ark-relations = { version = "^0.4.0", default-features = false, optional = true } ark-r1cs-std = { version = "^0.4.0", default-features = false, optional = true } +blake2 = { version = "0.10", default-features = false } digest = "0.10" derivative = { version = "2", features = [ "use_core" ] } +hashbrown = { version = "0.13", default-features = false, optional = true } +rand = { version = "0.8.0", optional = true } rayon = { version = "1", optional = true } -hashbrown = { version = "0.14", default-features = false, optional = true } [[bench]] name = "ipa_times" From 67ddd9c16d2cd7aa9a21888f2ca830ceb7da334c Mon Sep 17 00:00:00 2001 From: Cesar Descalzo Date: Fri, 28 Jun 2024 10:31:38 +0200 Subject: [PATCH 64/75] Fix breaking change from algebra/poly (#72) --- poly-commit/src/streaming_kzg/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/poly-commit/src/streaming_kzg/mod.rs b/poly-commit/src/streaming_kzg/mod.rs index 8fd494e2..d96bbaa4 100644 --- a/poly-commit/src/streaming_kzg/mod.rs +++ b/poly-commit/src/streaming_kzg/mod.rs @@ -269,7 +269,7 @@ fn interpolate_poly( ) -> DensePolynomial { let mut res = DensePolynomial::from_coefficients_vec(vec![E::ScalarField::zero()]); for (j, (_x_j, y_j)) in eval_points.iter().zip(evals.iter()).enumerate() { - let l_poly = lang[j].mul(sca_inverse[j] * y_j); + let l_poly = (&lang[j]).mul(sca_inverse[j] * y_j); res = (&res).add(&l_poly); } res From 175a61052bde244c3a645fca502984f98e768f49 Mon Sep 17 00:00:00 2001 From: Hossein Moghaddas Date: Mon, 21 Oct 2024 19:00:13 +0200 Subject: [PATCH 65/75] Reorder deps --- poly-commit/Cargo.toml | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/poly-commit/Cargo.toml b/poly-commit/Cargo.toml index 360055c7..eccd68da 100644 --- a/poly-commit/Cargo.toml +++ b/poly-commit/Cargo.toml @@ -15,12 +15,14 @@ ark-ec = { version = "^0.4.0", default-features = false } ark-poly = {version = "^0.4.0", default-features = false } ark-crypto-primitives = {version = "^0.4.0", default-features = false, features = ["sponge", "merkle_tree"] } ark-std = { version = "^0.4.0", default-features = false } -ark-relations = { version = "^0.4.0", default-features = false, optional = true } -ark-r1cs-std = { version = "^0.4.0", default-features = false, optional = true } blake2 = { version = "0.10", default-features = false } -digest = "0.10" derivative = { version = "2", features = [ "use_core" ] } +digest = "0.10" + +ark-relations = { version = "^0.4.0", default-features = false, optional = true } +ark-r1cs-std = { version = "^0.4.0", default-features = false, optional = true } + hashbrown = { version = "0.13", default-features = false, optional = true } rand = { version = "0.8.0", optional = true } rayon = { version = "1", optional = true } From d3e3808c0a311f7ad74707b08ff9a528ee45e892 Mon Sep 17 00:00:00 2001 From: Hossein Moghaddas Date: Mon, 21 Oct 2024 16:03:36 +0200 Subject: [PATCH 66/75] Add dummy doc for nightly --- poly-commit/src/lib.rs | 15 +++++++++++++++ poly-commit/src/streaming_kzg/mod.rs | 1 + 2 files changed, 16 insertions(+) diff --git a/poly-commit/src/lib.rs b/poly-commit/src/lib.rs index a2968c9b..c3122638 100644 --- a/poly-commit/src/lib.rs +++ b/poly-commit/src/lib.rs @@ -633,6 +633,7 @@ fn lc_query_set_to_poly_query_set<'a, F: Field, T: Clone + Ord>( poly_query_set } +/// Dummy doc #[cfg(test)] pub mod tests { use crate::*; @@ -659,6 +660,7 @@ pub mod tests { sponge: fn() -> S, } + /// Dummy doc pub fn bad_degree_bound_test( rand_poly: fn(usize, Option, &mut ChaCha20Rng) -> P, rand_point: fn(Option, &mut ChaCha20Rng) -> P::Point, @@ -752,6 +754,7 @@ pub mod tests { Ok(()) } + /// Dummy doc fn test_template(info: TestInfo) -> Result<(), PC::Error> where F: PrimeField, @@ -893,6 +896,7 @@ pub mod tests { Ok(()) } + /// Dummy doc fn equation_test_template(info: TestInfo) -> Result<(), PC::Error> where F: PrimeField, @@ -1074,6 +1078,7 @@ pub mod tests { Ok(()) } + /// Dummy doc pub fn single_poly_test( num_vars: Option, rand_poly: fn(usize, Option, &mut ChaCha20Rng) -> P, @@ -1102,6 +1107,7 @@ pub mod tests { test_template::(info) } + /// Dummy doc pub fn linear_poly_degree_bound_test( rand_poly: fn(usize, Option, &mut ChaCha20Rng) -> P, rand_point: fn(Option, &mut ChaCha20Rng) -> P::Point, @@ -1129,6 +1135,7 @@ pub mod tests { test_template::(info) } + /// Dummy doc pub fn single_poly_degree_bound_test( rand_poly: fn(usize, Option, &mut ChaCha20Rng) -> P, rand_point: fn(Option, &mut ChaCha20Rng) -> P::Point, @@ -1156,6 +1163,7 @@ pub mod tests { test_template::(info) } + /// Dummy doc pub fn quadratic_poly_degree_bound_multiple_queries_test( rand_poly: fn(usize, Option, &mut ChaCha20Rng) -> P, rand_point: fn(Option, &mut ChaCha20Rng) -> P::Point, @@ -1183,6 +1191,7 @@ pub mod tests { test_template::(info) } + /// Dummy doc pub fn single_poly_degree_bound_multiple_queries_test( rand_poly: fn(usize, Option, &mut ChaCha20Rng) -> P, rand_point: fn(Option, &mut ChaCha20Rng) -> P::Point, @@ -1210,6 +1219,7 @@ pub mod tests { test_template::(info) } + /// Dummy doc pub fn two_polys_degree_bound_single_query_test( rand_poly: fn(usize, Option, &mut ChaCha20Rng) -> P, rand_point: fn(Option, &mut ChaCha20Rng) -> P::Point, @@ -1237,6 +1247,7 @@ pub mod tests { test_template::(info) } + /// Dummy doc pub fn full_end_to_end_test( num_vars: Option, rand_poly: fn(usize, Option, &mut ChaCha20Rng) -> P, @@ -1265,6 +1276,7 @@ pub mod tests { test_template::(info) } + /// Dummy doc pub fn full_end_to_end_equation_test( num_vars: Option, rand_poly: fn(usize, Option, &mut ChaCha20Rng) -> P, @@ -1293,6 +1305,7 @@ pub mod tests { equation_test_template::(info) } + /// Dummy doc pub fn single_equation_test( num_vars: Option, rand_poly: fn(usize, Option, &mut ChaCha20Rng) -> P, @@ -1321,6 +1334,7 @@ pub mod tests { equation_test_template::(info) } + /// Dummy doc pub fn two_equation_test( num_vars: Option, rand_poly: fn(usize, Option, &mut ChaCha20Rng) -> P, @@ -1349,6 +1363,7 @@ pub mod tests { equation_test_template::(info) } + /// Dummy doc pub fn two_equation_degree_bound_test( rand_poly: fn(usize, Option, &mut ChaCha20Rng) -> P, rand_point: fn(Option, &mut ChaCha20Rng) -> P::Point, diff --git a/poly-commit/src/streaming_kzg/mod.rs b/poly-commit/src/streaming_kzg/mod.rs index d96bbaa4..f9a3d5f9 100644 --- a/poly-commit/src/streaming_kzg/mod.rs +++ b/poly-commit/src/streaming_kzg/mod.rs @@ -93,6 +93,7 @@ pub use data_structures::*; pub use space::CommitterKeyStream; pub use time::CommitterKey; +/// Dummy docs #[cfg(test)] pub mod tests; From 0858433c658cf78d2f62d1c25bc0ce821100f069 Mon Sep 17 00:00:00 2001 From: Hossein Moghaddas Date: Thu, 24 Oct 2024 21:33:59 +0200 Subject: [PATCH 67/75] Fix `hashbrown` + Replace Blake2 by Blake3 --- poly-commit/Cargo.toml | 9 ++++++++- poly-commit/src/constraints.rs | 34 +++++++++++++++++++++++++++++++--- poly-commit/src/hyrax/mod.rs | 21 ++++++--------------- 3 files changed, 45 insertions(+), 19 deletions(-) diff --git a/poly-commit/Cargo.toml b/poly-commit/Cargo.toml index eccd68da..1412fb71 100644 --- a/poly-commit/Cargo.toml +++ b/poly-commit/Cargo.toml @@ -17,13 +17,14 @@ ark-crypto-primitives = {version = "^0.4.0", default-features = false, features ark-std = { version = "^0.4.0", default-features = false } blake2 = { version = "0.10", default-features = false } +blake3 = { version = "1.5.4", default-features = false } derivative = { version = "2", features = [ "use_core" ] } digest = "0.10" ark-relations = { version = "^0.4.0", default-features = false, optional = true } ark-r1cs-std = { version = "^0.4.0", default-features = false, optional = true } -hashbrown = { version = "0.13", default-features = false, optional = true } +hashbrown = { version = "0.15", default-features = false, features = ["inline-more", "allocator-api2"], optional = true } rand = { version = "0.8.0", optional = true } rayon = { version = "1", optional = true } @@ -42,6 +43,12 @@ name = "size" path = "benches/size.rs" harness = false +[target.'cfg(all(target_has_atomic = "8", target_has_atomic = "16", target_has_atomic = "32", target_has_atomic = "64", target_has_atomic = "ptr"))'.dependencies] +ahash = { version = "0.8", default-features = false} + +[target.'cfg(not(all(target_has_atomic = "8", target_has_atomic = "16", target_has_atomic = "32", target_has_atomic = "64", target_has_atomic = "ptr")))'.dependencies] +fnv = { version = "1.0", default-features = false } + [dev-dependencies] ark-ed-on-bls12-381 = { version = "^0.4.0", default-features = false } ark-bls12-381 = { version = "^0.4.0", default-features = false, features = [ "curve" ] } diff --git a/poly-commit/src/constraints.rs b/poly-commit/src/constraints.rs index 1300509a..12962aec 100644 --- a/poly-commit/src/constraints.rs +++ b/poly-commit/src/constraints.rs @@ -8,9 +8,33 @@ use ark_poly::Polynomial; use ark_r1cs_std::fields::emulated_fp::EmulatedFpVar; use ark_r1cs_std::{fields::fp::FpVar, prelude::*}; use ark_relations::r1cs::{ConstraintSystemRef, Namespace, Result as R1CSResult, SynthesisError}; -use ark_std::{borrow::Borrow, cmp::Eq, cmp::PartialEq, hash::Hash, marker::Sized}; +use ark_std::{ + borrow::Borrow, + cmp::Eq, + cmp::PartialEq, + hash::{BuildHasherDefault, Hash}, + marker::Sized, +}; use hashbrown::{HashMap, HashSet}; +#[cfg(all( + target_has_atomic = "8", + target_has_atomic = "16", + target_has_atomic = "32", + target_has_atomic = "64", + target_has_atomic = "ptr" +))] +type DefaultHasher = ahash::AHasher; + +#[cfg(not(all( + target_has_atomic = "8", + target_has_atomic = "16", + target_has_atomic = "32", + target_has_atomic = "64", + target_has_atomic = "ptr" +)))] +type DefaultHasher = fnv::FnvHasher; + /// Define the minimal interface of prepared allocated structures. pub trait PrepareGadget: Sized { /// Prepare from an unprepared element. @@ -178,13 +202,17 @@ pub struct LabeledPointVar { /// An allocated version of `QuerySet`. #[derive(Clone)] pub struct QuerySetVar( - pub HashSet<(String, LabeledPointVar)>, + pub HashSet<(String, LabeledPointVar), DefaultHasher>, ); /// An allocated version of `Evaluations`. #[derive(Clone)] pub struct EvaluationsVar( - pub HashMap, EmulatedFpVar>, + pub HashMap< + LabeledPointVar, + EmulatedFpVar, + BuildHasherDefault, + >, ); impl EvaluationsVar { diff --git a/poly-commit/src/hyrax/mod.rs b/poly-commit/src/hyrax/mod.rs index 5a47e6ca..85a3a833 100644 --- a/poly-commit/src/hyrax/mod.rs +++ b/poly-commit/src/hyrax/mod.rs @@ -10,8 +10,6 @@ use ark_ff::PrimeField; use ark_poly::MultilinearExtension; use ark_serialize::serialize_to_vec; use ark_std::{marker::PhantomData, rand::RngCore, string::ToString, vec::Vec, UniformRand}; -use blake2::Blake2s256; -use digest::Digest; #[cfg(feature = "parallel")] use rayon::prelude::*; @@ -146,16 +144,15 @@ where // generators, since the point at infinity should theoretically occur) let points: Vec<_> = ark_std::cfg_into_iter!(0u64..dim + 1) .map(|i| { - let mut hash = - Blake2s256::digest([PROTOCOL_NAME, &i.to_le_bytes()].concat().as_slice()); - let mut p = G::from_random_bytes(&hash); + let hash = blake3::hash([PROTOCOL_NAME, &i.to_le_bytes()].concat().as_slice()); + let mut p = G::from_random_bytes(hash.as_bytes()); let mut j = 0u64; while p.is_none() { let mut bytes = PROTOCOL_NAME.to_vec(); bytes.extend(i.to_le_bytes()); bytes.extend(j.to_le_bytes()); - hash = Blake2s256::digest(bytes.as_slice()); - p = G::from_random_bytes(&hash); + let hash = blake3::hash(bytes.as_slice()); + p = G::from_random_bytes(hash.as_bytes()); j += 1; } let point = p.unwrap(); @@ -337,10 +334,7 @@ where } // Absorbing public parameters - sponge.absorb( - &Blake2s256::digest(serialize_to_vec!(*ck).map_err(|_| Error::TranscriptError)?) - .as_slice(), - ); + sponge.absorb(&serialize_to_vec!(*ck).map_err(|_| Error::TranscriptError)?); // Absorbing the commitment to the polynomial sponge.absorb(&serialize_to_vec!(com.row_coms).map_err(|_| Error::TranscriptError)?); @@ -475,10 +469,7 @@ where } // Absorbing public parameters - sponge.absorb( - &Blake2s256::digest(serialize_to_vec!(*vk).map_err(|_| Error::TranscriptError)?) - .as_slice(), - ); + sponge.absorb(&serialize_to_vec!(*vk).map_err(|_| Error::TranscriptError)?); // Absorbing the commitment to the polynomial sponge.absorb(&serialize_to_vec!(*row_coms).map_err(|_| Error::TranscriptError)?); From c2ba181f0c2225806fc47fc1c8050bb767bf470d Mon Sep 17 00:00:00 2001 From: Hossein Moghaddas Date: Fri, 25 Oct 2024 11:16:11 +0200 Subject: [PATCH 68/75] Revert to Blake2 --- poly-commit/Cargo.toml | 1 - poly-commit/src/constraints.rs | 5 ++++- poly-commit/src/hyrax/mod.rs | 12 ++++++++---- 3 files changed, 12 insertions(+), 6 deletions(-) diff --git a/poly-commit/Cargo.toml b/poly-commit/Cargo.toml index 1412fb71..65bd8ead 100644 --- a/poly-commit/Cargo.toml +++ b/poly-commit/Cargo.toml @@ -17,7 +17,6 @@ ark-crypto-primitives = {version = "^0.4.0", default-features = false, features ark-std = { version = "^0.4.0", default-features = false } blake2 = { version = "0.10", default-features = false } -blake3 = { version = "1.5.4", default-features = false } derivative = { version = "2", features = [ "use_core" ] } digest = "0.10" diff --git a/poly-commit/src/constraints.rs b/poly-commit/src/constraints.rs index 12962aec..997296dd 100644 --- a/poly-commit/src/constraints.rs +++ b/poly-commit/src/constraints.rs @@ -202,7 +202,10 @@ pub struct LabeledPointVar { /// An allocated version of `QuerySet`. #[derive(Clone)] pub struct QuerySetVar( - pub HashSet<(String, LabeledPointVar), DefaultHasher>, + pub HashSet< + (String, LabeledPointVar), + BuildHasherDefault, + >, ); /// An allocated version of `Evaluations`. diff --git a/poly-commit/src/hyrax/mod.rs b/poly-commit/src/hyrax/mod.rs index 85a3a833..27545e99 100644 --- a/poly-commit/src/hyrax/mod.rs +++ b/poly-commit/src/hyrax/mod.rs @@ -11,6 +11,9 @@ use ark_poly::MultilinearExtension; use ark_serialize::serialize_to_vec; use ark_std::{marker::PhantomData, rand::RngCore, string::ToString, vec::Vec, UniformRand}; +use blake2::Blake2s256; +use digest::Digest; + #[cfg(feature = "parallel")] use rayon::prelude::*; @@ -144,15 +147,16 @@ where // generators, since the point at infinity should theoretically occur) let points: Vec<_> = ark_std::cfg_into_iter!(0u64..dim + 1) .map(|i| { - let hash = blake3::hash([PROTOCOL_NAME, &i.to_le_bytes()].concat().as_slice()); - let mut p = G::from_random_bytes(hash.as_bytes()); + let hash = + Blake2s256::digest([PROTOCOL_NAME, &i.to_le_bytes()].concat().as_slice()); + let mut p = G::from_random_bytes(&hash); let mut j = 0u64; while p.is_none() { let mut bytes = PROTOCOL_NAME.to_vec(); bytes.extend(i.to_le_bytes()); bytes.extend(j.to_le_bytes()); - let hash = blake3::hash(bytes.as_slice()); - p = G::from_random_bytes(hash.as_bytes()); + let hash = Blake2s256::digest(bytes.as_slice()); + p = G::from_random_bytes(&hash); j += 1; } let point = p.unwrap(); From 9b03b6007c1efccafb2c5ff14351dc67ace294e4 Mon Sep 17 00:00:00 2001 From: Hossein Moghaddas Date: Fri, 25 Oct 2024 12:12:18 +0200 Subject: [PATCH 69/75] Fix merging issues --- bench-templates/src/lib.rs | 3 --- poly-commit/benches/hyrax_times.rs | 3 +-- poly-commit/src/constraints.rs | 4 ++-- poly-commit/src/hyrax/tests.rs | 29 +++++++++++++-------------- poly-commit/src/lib.rs | 15 -------------- poly-commit/src/multilinear_pc/mod.rs | 2 +- 6 files changed, 18 insertions(+), 38 deletions(-) diff --git a/bench-templates/src/lib.rs b/bench-templates/src/lib.rs index 2844dcb3..82c834c2 100644 --- a/bench-templates/src/lib.rs +++ b/bench-templates/src/lib.rs @@ -108,7 +108,6 @@ where F: PrimeField, P: Polynomial, PCS: PolynomialCommitment, - P::Point: UniformRand, { let rng = &mut ChaCha20Rng::from_rng(test_rng()).unwrap(); @@ -138,7 +137,6 @@ where F: PrimeField, P: Polynomial, PCS: PolynomialCommitment, - P::Point: UniformRand, { let rng = &mut ChaCha20Rng::from_rng(test_rng()).unwrap(); @@ -180,7 +178,6 @@ where F: PrimeField, P: Polynomial, PCS: PolynomialCommitment, - P::Point: UniformRand, { let rng = &mut ChaCha20Rng::from_rng(test_rng()).unwrap(); diff --git a/poly-commit/benches/hyrax_times.rs b/poly-commit/benches/hyrax_times.rs index c76753df..7f579cab 100644 --- a/poly-commit/benches/hyrax_times.rs +++ b/poly-commit/benches/hyrax_times.rs @@ -1,4 +1,3 @@ -use ark_crypto_primitives::sponge::poseidon::PoseidonSponge; use ark_pcs_bench_templates::*; use ark_poly::{DenseMultilinearExtension, MultilinearExtension}; @@ -9,7 +8,7 @@ use ark_poly_commit::hyrax::HyraxPC; use rand_chacha::ChaCha20Rng; // Hyrax PCS over BN254 -type Hyrax254 = HyraxPC, PoseidonSponge>; +type Hyrax254 = HyraxPC>; fn rand_poly_hyrax( num_vars: usize, diff --git a/poly-commit/src/constraints.rs b/poly-commit/src/constraints.rs index 04401e40..ae1a6c66 100644 --- a/poly-commit/src/constraints.rs +++ b/poly-commit/src/constraints.rs @@ -14,8 +14,8 @@ use ark_std::{ cmp::{Eq, PartialEq}, hash::{BuildHasherDefault, Hash}, }; -// #[cfg(not(feature = "std"))] -// use ark_std::{string::String, vec::Vec}; +#[cfg(not(feature = "std"))] +use ark_std::{string::String, vec::Vec}; use hashbrown::{HashMap, HashSet}; #[cfg(all( diff --git a/poly-commit/src/hyrax/tests.rs b/poly-commit/src/hyrax/tests.rs index 713dd7f3..42fba46c 100644 --- a/poly-commit/src/hyrax/tests.rs +++ b/poly-commit/src/hyrax/tests.rs @@ -3,7 +3,6 @@ use crate::tests::*; use crate::utils::test_sponge; use crate::{LabeledPolynomial, PolynomialCommitment}; use ark_bls12_377::G1Affine; -use ark_crypto_primitives::sponge::poseidon::PoseidonSponge; use ark_ec::AffineRepr; use ark_ed_on_bls12_381::EdwardsAffine; use ark_ff::PrimeField; @@ -17,10 +16,10 @@ use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; // ****************** types ****************** type Fq = ::ScalarField; -type Hyrax377 = HyraxPC, PoseidonSponge>; +type Hyrax377 = HyraxPC>; type Fr = ::ScalarField; -type Hyrax381 = HyraxPC, PoseidonSponge>; +type Hyrax381 = HyraxPC>; // ******** auxiliary test functions ******** @@ -110,14 +109,14 @@ fn hyrax_single_poly_test() { Some(10), rand_poly, rand_point, - poseidon_sponge_for_test, + poseidon_sponge_for_test::, ) .expect("test failed for bls12-377"); single_poly_test::<_, _, Hyrax381, _>( Some(10), rand_poly, rand_point, - poseidon_sponge_for_test, + poseidon_sponge_for_test::, ) .expect("test failed for bls12-381"); } @@ -128,14 +127,14 @@ fn hyrax_constant_poly_test() { Some(0), constant_poly, rand_point, - poseidon_sponge_for_test, + poseidon_sponge_for_test::, ) .expect("test failed for bls12-377"); single_poly_test::<_, _, Hyrax381, _>( Some(0), constant_poly, rand_point, - poseidon_sponge_for_test, + poseidon_sponge_for_test::, ) .expect("test failed for bls12-381"); } @@ -146,14 +145,14 @@ fn hyrax_full_end_to_end_test() { Some(8), rand_poly, rand_point, - poseidon_sponge_for_test, + poseidon_sponge_for_test::, ) .expect("test failed for bls12-377"); full_end_to_end_test::<_, _, Hyrax381, _>( Some(10), rand_poly, rand_point, - poseidon_sponge_for_test, + poseidon_sponge_for_test::, ) .expect("test failed for bls12-381"); } @@ -164,14 +163,14 @@ fn hyrax_single_equation_test() { Some(6), rand_poly, rand_point, - poseidon_sponge_for_test, + poseidon_sponge_for_test::, ) .expect("test failed for bls12-377"); single_equation_test::<_, _, Hyrax381, _>( Some(6), rand_poly, rand_point, - poseidon_sponge_for_test, + poseidon_sponge_for_test::, ) .expect("test failed for bls12-381"); } @@ -182,14 +181,14 @@ fn hyrax_two_equation_test() { Some(10), rand_poly, rand_point, - poseidon_sponge_for_test, + poseidon_sponge_for_test::, ) .expect("test failed for bls12-377"); two_equation_test::<_, _, Hyrax381, _>( Some(10), rand_poly, rand_point, - poseidon_sponge_for_test, + poseidon_sponge_for_test::, ) .expect("test failed for bls12-381"); } @@ -200,14 +199,14 @@ fn hyrax_full_end_to_end_equation_test() { Some(8), rand_poly, rand_point, - poseidon_sponge_for_test, + poseidon_sponge_for_test::, ) .expect("test failed for bls12-377"); full_end_to_end_equation_test::<_, _, Hyrax381, _>( Some(8), rand_poly, rand_point, - poseidon_sponge_for_test, + poseidon_sponge_for_test::, ) .expect("test failed for bls12-381"); } diff --git a/poly-commit/src/lib.rs b/poly-commit/src/lib.rs index d111cdca..5479051d 100644 --- a/poly-commit/src/lib.rs +++ b/poly-commit/src/lib.rs @@ -633,7 +633,6 @@ fn lc_query_set_to_poly_query_set<'a, F: Field, T: Clone + Ord>( poly_query_set } -/// Dummy doc #[cfg(test)] pub mod tests { #![allow(missing_docs)] @@ -660,7 +659,6 @@ pub mod tests { sponge: fn() -> S, } - /// Dummy doc pub fn bad_degree_bound_test( rand_poly: fn(usize, Option, &mut ChaCha20Rng) -> P, rand_point: fn(Option, &mut ChaCha20Rng) -> P::Point, @@ -754,7 +752,6 @@ pub mod tests { Ok(()) } - /// Dummy doc fn test_template(info: TestInfo) -> Result<(), PC::Error> where F: PrimeField, @@ -896,7 +893,6 @@ pub mod tests { Ok(()) } - /// Dummy doc fn equation_test_template(info: TestInfo) -> Result<(), PC::Error> where F: PrimeField, @@ -1078,7 +1074,6 @@ pub mod tests { Ok(()) } - /// Dummy doc pub fn single_poly_test( num_vars: Option, rand_poly: fn(usize, Option, &mut ChaCha20Rng) -> P, @@ -1107,7 +1102,6 @@ pub mod tests { test_template::(info) } - /// Dummy doc pub fn linear_poly_degree_bound_test( rand_poly: fn(usize, Option, &mut ChaCha20Rng) -> P, rand_point: fn(Option, &mut ChaCha20Rng) -> P::Point, @@ -1135,7 +1129,6 @@ pub mod tests { test_template::(info) } - /// Dummy doc pub fn single_poly_degree_bound_test( rand_poly: fn(usize, Option, &mut ChaCha20Rng) -> P, rand_point: fn(Option, &mut ChaCha20Rng) -> P::Point, @@ -1163,7 +1156,6 @@ pub mod tests { test_template::(info) } - /// Dummy doc pub fn quadratic_poly_degree_bound_multiple_queries_test( rand_poly: fn(usize, Option, &mut ChaCha20Rng) -> P, rand_point: fn(Option, &mut ChaCha20Rng) -> P::Point, @@ -1191,7 +1183,6 @@ pub mod tests { test_template::(info) } - /// Dummy doc pub fn single_poly_degree_bound_multiple_queries_test( rand_poly: fn(usize, Option, &mut ChaCha20Rng) -> P, rand_point: fn(Option, &mut ChaCha20Rng) -> P::Point, @@ -1219,7 +1210,6 @@ pub mod tests { test_template::(info) } - /// Dummy doc pub fn two_polys_degree_bound_single_query_test( rand_poly: fn(usize, Option, &mut ChaCha20Rng) -> P, rand_point: fn(Option, &mut ChaCha20Rng) -> P::Point, @@ -1247,7 +1237,6 @@ pub mod tests { test_template::(info) } - /// Dummy doc pub fn full_end_to_end_test( num_vars: Option, rand_poly: fn(usize, Option, &mut ChaCha20Rng) -> P, @@ -1276,7 +1265,6 @@ pub mod tests { test_template::(info) } - /// Dummy doc pub fn full_end_to_end_equation_test( num_vars: Option, rand_poly: fn(usize, Option, &mut ChaCha20Rng) -> P, @@ -1305,7 +1293,6 @@ pub mod tests { equation_test_template::(info) } - /// Dummy doc pub fn single_equation_test( num_vars: Option, rand_poly: fn(usize, Option, &mut ChaCha20Rng) -> P, @@ -1334,7 +1321,6 @@ pub mod tests { equation_test_template::(info) } - /// Dummy doc pub fn two_equation_test( num_vars: Option, rand_poly: fn(usize, Option, &mut ChaCha20Rng) -> P, @@ -1363,7 +1349,6 @@ pub mod tests { equation_test_template::(info) } - /// Dummy doc pub fn two_equation_degree_bound_test( rand_poly: fn(usize, Option, &mut ChaCha20Rng) -> P, rand_point: fn(Option, &mut ChaCha20Rng) -> P::Point, diff --git a/poly-commit/src/multilinear_pc/mod.rs b/poly-commit/src/multilinear_pc/mod.rs index 08879797..ab3952a6 100644 --- a/poly-commit/src/multilinear_pc/mod.rs +++ b/poly-commit/src/multilinear_pc/mod.rs @@ -238,7 +238,7 @@ mod tests { use crate::multilinear_pc::{data_structures::UniversalParams, MultilinearPC}; use ark_bls12_381::Bls12_381; use ark_ec::pairing::Pairing; - use ark_poly::{DenseMultilinearExtension, MultilinearExtension, SparseMultilinearExtension}; + use ark_poly::{DenseMultilinearExtension, MultilinearExtension, Polynomial, SparseMultilinearExtension}; #[cfg(not(feature = "std"))] use ark_std::vec::Vec; use ark_std::{rand::RngCore, test_rng, UniformRand}; From f5924ee23f143e829a984ce46882345f2563adda Mon Sep 17 00:00:00 2001 From: Hossein Moghaddas Date: Fri, 25 Oct 2024 12:17:35 +0200 Subject: [PATCH 70/75] Test if CI is happy --- poly-commit/src/constraints.rs | 4 ++-- poly-commit/src/data_structures.rs | 4 ++-- poly-commit/src/error.rs | 4 ++-- poly-commit/src/ipa_pc/mod.rs | 10 +++++----- poly-commit/src/kzg10/mod.rs | 4 ++-- poly-commit/src/lib.rs | 10 +++++----- poly-commit/src/marlin/marlin_pc/data_structures.rs | 4 ++-- poly-commit/src/marlin/marlin_pc/mod.rs | 4 ++-- .../src/marlin/marlin_pst13_pc/combinations.rs | 4 ++-- .../src/marlin/marlin_pst13_pc/data_structures.rs | 4 ++-- poly-commit/src/marlin/marlin_pst13_pc/mod.rs | 8 ++++---- poly-commit/src/marlin/mod.rs | 10 +++++----- poly-commit/src/multilinear_pc/data_structures.rs | 4 ++-- poly-commit/src/multilinear_pc/mod.rs | 12 +++++++----- poly-commit/src/sonic_pc/data_structures.rs | 4 ++-- poly-commit/src/sonic_pc/mod.rs | 10 +++++----- poly-commit/src/streaming_kzg/mod.rs | 4 ++-- poly-commit/src/streaming_kzg/space.rs | 4 ++-- poly-commit/src/streaming_kzg/tests.rs | 4 ++-- poly-commit/src/streaming_kzg/time.rs | 4 ++-- 20 files changed, 59 insertions(+), 57 deletions(-) diff --git a/poly-commit/src/constraints.rs b/poly-commit/src/constraints.rs index ae1a6c66..04401e40 100644 --- a/poly-commit/src/constraints.rs +++ b/poly-commit/src/constraints.rs @@ -14,8 +14,8 @@ use ark_std::{ cmp::{Eq, PartialEq}, hash::{BuildHasherDefault, Hash}, }; -#[cfg(not(feature = "std"))] -use ark_std::{string::String, vec::Vec}; +// #[cfg(not(feature = "std"))] +// use ark_std::{string::String, vec::Vec}; use hashbrown::{HashMap, HashSet}; #[cfg(all( diff --git a/poly-commit/src/data_structures.rs b/poly-commit/src/data_structures.rs index cd822b98..608b2d8e 100644 --- a/poly-commit/src/data_structures.rs +++ b/poly-commit/src/data_structures.rs @@ -7,8 +7,8 @@ use ark_std::{ ops::{AddAssign, MulAssign, SubAssign}, rand::RngCore, }; -#[cfg(not(feature = "std"))] -use ark_std::{string::String, vec::Vec}; +// #[cfg(not(feature = "std"))] +// use ark_std::{string::String, vec::Vec}; /// Labels a `LabeledPolynomial` or a `LabeledCommitment`. pub type PolynomialLabel = String; diff --git a/poly-commit/src/error.rs b/poly-commit/src/error.rs index 15eee6a1..06582e32 100644 --- a/poly-commit/src/error.rs +++ b/poly-commit/src/error.rs @@ -1,5 +1,5 @@ -#[cfg(not(feature = "std"))] -use ark_std::string::String; +// #[cfg(not(feature = "std"))] +// use ark_std::string::String; /// The error type for `PolynomialCommitment`. #[derive(Debug)] diff --git a/poly-commit/src/ipa_pc/mod.rs b/poly-commit/src/ipa_pc/mod.rs index 8afbacd9..b280b66a 100644 --- a/poly-commit/src/ipa_pc/mod.rs +++ b/poly-commit/src/ipa_pc/mod.rs @@ -8,11 +8,11 @@ use ark_ec::{AffineRepr, CurveGroup, VariableBaseMSM}; use ark_ff::{Field, One, PrimeField, UniformRand, Zero}; use ark_serialize::CanonicalSerialize; use ark_std::{convert::TryInto, format, marker::PhantomData, ops::Mul, rand::RngCore}; -#[cfg(not(feature = "std"))] -use ark_std::{ - string::{String, ToString}, - vec::Vec, -}; +// #[cfg(not(feature = "std"))] +// use ark_std::{ +// string::{String, ToString}, +// vec::Vec, +// }; use digest::Digest; #[cfg(feature = "parallel")] use rayon::prelude::*; diff --git a/poly-commit/src/kzg10/mod.rs b/poly-commit/src/kzg10/mod.rs index 81a7b59f..da9ff524 100644 --- a/poly-commit/src/kzg10/mod.rs +++ b/poly-commit/src/kzg10/mod.rs @@ -10,8 +10,8 @@ use ark_ec::{pairing::Pairing, scalar_mul::ScalarMul, AffineRepr, CurveGroup, Va use ark_ff::{One, PrimeField, UniformRand, Zero}; use ark_poly::DenseUVPolynomial; use ark_std::{format, marker::PhantomData, ops::Div, ops::Mul, rand::RngCore}; -#[cfg(not(feature = "std"))] -use ark_std::{string::ToString, vec::Vec}; +// #[cfg(not(feature = "std"))] +// use ark_std::{string::ToString, vec::Vec}; #[cfg(feature = "parallel")] use rayon::prelude::*; diff --git a/poly-commit/src/lib.rs b/poly-commit/src/lib.rs index 5479051d..65d39f42 100644 --- a/poly-commit/src/lib.rs +++ b/poly-commit/src/lib.rs @@ -26,11 +26,11 @@ use ark_std::{ iter::FromIterator, rand::RngCore, }; -#[cfg(not(feature = "std"))] -use ark_std::{ - string::{String, ToString}, - vec::Vec, -}; +// #[cfg(not(feature = "std"))] +// use ark_std::{ +// string::{String, ToString}, +// vec::Vec, +// }; /// Data structures used by a polynomial commitment scheme. pub mod data_structures; diff --git a/poly-commit/src/marlin/marlin_pc/data_structures.rs b/poly-commit/src/marlin/marlin_pc/data_structures.rs index f01f67ab..4d866426 100644 --- a/poly-commit/src/marlin/marlin_pc/data_structures.rs +++ b/poly-commit/src/marlin/marlin_pc/data_structures.rs @@ -5,8 +5,8 @@ use crate::{ use ark_ec::{pairing::Pairing, AdditiveGroup}; use ark_ff::{Field, PrimeField, ToConstraintField}; use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; -#[cfg(not(feature = "std"))] -use ark_std::vec::Vec; +// #[cfg(not(feature = "std"))] +// use ark_std::vec::Vec; use ark_std::{ ops::{Add, AddAssign}, rand::RngCore, diff --git a/poly-commit/src/marlin/marlin_pc/mod.rs b/poly-commit/src/marlin/marlin_pc/mod.rs index 52f56354..00ee9cf1 100644 --- a/poly-commit/src/marlin/marlin_pc/mod.rs +++ b/poly-commit/src/marlin/marlin_pc/mod.rs @@ -8,8 +8,8 @@ use ark_ec::{pairing::Pairing, AffineRepr, CurveGroup}; use ark_ff::Zero; use ark_poly::DenseUVPolynomial; use ark_std::{marker::PhantomData, ops::Div, rand::RngCore}; -#[cfg(not(feature = "std"))] -use ark_std::{string::ToString, vec::Vec}; +// #[cfg(not(feature = "std"))] +// use ark_std::{string::ToString, vec::Vec}; mod data_structures; pub use data_structures::*; diff --git a/poly-commit/src/marlin/marlin_pst13_pc/combinations.rs b/poly-commit/src/marlin/marlin_pst13_pc/combinations.rs index bc76ea99..b44e1ebe 100644 --- a/poly-commit/src/marlin/marlin_pst13_pc/combinations.rs +++ b/poly-commit/src/marlin/marlin_pst13_pc/combinations.rs @@ -1,7 +1,7 @@ //! Compute all combinations of values in a given list //! Credit: https://github.com/meltinglava/uniquecombinations/ -#[cfg(not(feature = "std"))] -use ark_std::vec::Vec; +// #[cfg(not(feature = "std"))] +// use ark_std::vec::Vec; /// Compute all combinations of values in a given list. pub(crate) struct Combinations where diff --git a/poly-commit/src/marlin/marlin_pst13_pc/data_structures.rs b/poly-commit/src/marlin/marlin_pst13_pc/data_structures.rs index 22377b9d..b836d9b1 100644 --- a/poly-commit/src/marlin/marlin_pst13_pc/data_structures.rs +++ b/poly-commit/src/marlin/marlin_pst13_pc/data_structures.rs @@ -7,8 +7,8 @@ use ark_poly::DenseMVPolynomial; use ark_serialize::{ CanonicalDeserialize, CanonicalSerialize, Compress, SerializationError, Valid, Validate, }; -#[cfg(not(feature = "std"))] -use ark_std::vec::Vec; +// #[cfg(not(feature = "std"))] +// use ark_std::vec::Vec; use ark_std::{ io::{Read, Write}, marker::PhantomData, diff --git a/poly-commit/src/marlin/marlin_pst13_pc/mod.rs b/poly-commit/src/marlin/marlin_pst13_pc/mod.rs index b0f7114a..2e6b8725 100644 --- a/poly-commit/src/marlin/marlin_pst13_pc/mod.rs +++ b/poly-commit/src/marlin/marlin_pst13_pc/mod.rs @@ -13,8 +13,8 @@ use ark_ec::{ use ark_ff::{One, PrimeField, UniformRand, Zero}; use ark_poly::{multivariate::Term, DenseMVPolynomial}; use ark_std::{marker::PhantomData, ops::Index, ops::Mul, rand::RngCore}; -#[cfg(not(feature = "std"))] -use ark_std::{string::ToString, vec::Vec}; +// #[cfg(not(feature = "std"))] +// use ark_std::{string::ToString, vec::Vec}; #[cfg(feature = "parallel")] use rayon::prelude::*; @@ -702,8 +702,8 @@ mod tests { multivariate::{SparsePolynomial as SparsePoly, SparseTerm}, DenseMVPolynomial, }; - #[cfg(not(feature = "std"))] - use ark_std::vec::Vec; + // #[cfg(not(feature = "std"))] + // use ark_std::vec::Vec; use rand_chacha::ChaCha20Rng; type MVPoly_381 = SparsePoly<::ScalarField, SparseTerm>; diff --git a/poly-commit/src/marlin/mod.rs b/poly-commit/src/marlin/mod.rs index 714ad727..94dca7d1 100644 --- a/poly-commit/src/marlin/mod.rs +++ b/poly-commit/src/marlin/mod.rs @@ -7,11 +7,11 @@ use ark_crypto_primitives::sponge::CryptographicSponge; use ark_ec::{pairing::Pairing, AffineRepr, CurveGroup}; use ark_ff::{One, Zero}; use ark_std::{convert::TryInto, hash::Hash, ops::AddAssign, ops::Mul}; -#[cfg(not(feature = "std"))] -use ark_std::{ - string::{String, ToString}, - vec::Vec, -}; +// #[cfg(not(feature = "std"))] +// use ark_std::{ +// string::{String, ToString}, +// vec::Vec, +// }; /// Polynomial commitment scheme from [[KZG10]][kzg] that enforces /// strict degree bounds and (optionally) enables hiding commitments by diff --git a/poly-commit/src/multilinear_pc/data_structures.rs b/poly-commit/src/multilinear_pc/data_structures.rs index c70aae08..f5909a69 100644 --- a/poly-commit/src/multilinear_pc/data_structures.rs +++ b/poly-commit/src/multilinear_pc/data_structures.rs @@ -1,7 +1,7 @@ use ark_ec::pairing::Pairing; use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; -#[cfg(not(feature = "std"))] -use ark_std::vec::Vec; +// #[cfg(not(feature = "std"))] +// use ark_std::vec::Vec; #[allow(type_alias_bounds)] /// Evaluations over {0,1}^n for G1 pub type EvaluationHyperCubeOnG1 = Vec; diff --git a/poly-commit/src/multilinear_pc/mod.rs b/poly-commit/src/multilinear_pc/mod.rs index ab3952a6..97d1de5e 100644 --- a/poly-commit/src/multilinear_pc/mod.rs +++ b/poly-commit/src/multilinear_pc/mod.rs @@ -8,8 +8,8 @@ use ark_ec::{ }; use ark_ff::{Field, One, PrimeField, Zero}; use ark_poly::{DenseMultilinearExtension, MultilinearExtension}; -#[cfg(not(feature = "std"))] -use ark_std::vec::Vec; +// #[cfg(not(feature = "std"))] +// use ark_std::vec::Vec; use ark_std::{ collections::LinkedList, iter::FromIterator, marker::PhantomData, ops::Mul, rand::RngCore, UniformRand, @@ -238,9 +238,11 @@ mod tests { use crate::multilinear_pc::{data_structures::UniversalParams, MultilinearPC}; use ark_bls12_381::Bls12_381; use ark_ec::pairing::Pairing; - use ark_poly::{DenseMultilinearExtension, MultilinearExtension, Polynomial, SparseMultilinearExtension}; - #[cfg(not(feature = "std"))] - use ark_std::vec::Vec; + use ark_poly::{ + DenseMultilinearExtension, MultilinearExtension, Polynomial, SparseMultilinearExtension, + }; + // #[cfg(not(feature = "std"))] + // use ark_std::vec::Vec; use ark_std::{rand::RngCore, test_rng, UniformRand}; type E = Bls12_381; type Fr = ::ScalarField; diff --git a/poly-commit/src/sonic_pc/data_structures.rs b/poly-commit/src/sonic_pc/data_structures.rs index 4ed8e500..2fb95f85 100644 --- a/poly-commit/src/sonic_pc/data_structures.rs +++ b/poly-commit/src/sonic_pc/data_structures.rs @@ -6,8 +6,8 @@ use ark_serialize::{ CanonicalDeserialize, CanonicalSerialize, Compress, SerializationError, Valid, Validate, }; use ark_std::io::{Read, Write}; -#[cfg(not(feature = "std"))] -use ark_std::vec::Vec; +// #[cfg(not(feature = "std"))] +// use ark_std::vec::Vec; /// `UniversalParams` are the universal parameters for the KZG10 scheme. pub type UniversalParams = kzg10::UniversalParams; diff --git a/poly-commit/src/sonic_pc/mod.rs b/poly-commit/src/sonic_pc/mod.rs index 8af2496e..170a7d53 100644 --- a/poly-commit/src/sonic_pc/mod.rs +++ b/poly-commit/src/sonic_pc/mod.rs @@ -7,11 +7,11 @@ use ark_crypto_primitives::sponge::CryptographicSponge; use ark_ec::{pairing::Pairing, AffineRepr, CurveGroup}; use ark_ff::{One, UniformRand, Zero}; use ark_std::{convert::TryInto, marker::PhantomData, ops::Div, ops::Mul, rand::RngCore}; -#[cfg(not(feature = "std"))] -use ark_std::{ - string::{String, ToString}, - vec::Vec, -}; +// #[cfg(not(feature = "std"))] +// use ark_std::{ +// string::{String, ToString}, +// vec::Vec, +// }; mod data_structures; pub use data_structures::*; diff --git a/poly-commit/src/streaming_kzg/mod.rs b/poly-commit/src/streaming_kzg/mod.rs index cec7f951..ef074adf 100644 --- a/poly-commit/src/streaming_kzg/mod.rs +++ b/poly-commit/src/streaming_kzg/mod.rs @@ -86,8 +86,8 @@ use ark_ec::{pairing::Pairing, AffineRepr, CurveGroup, VariableBaseMSM}; use ark_ff::{Field, One, PrimeField, Zero}; use ark_poly::{univariate::DensePolynomial, DenseUVPolynomial}; use ark_serialize::{CanonicalSerialize, Compress}; -#[cfg(not(feature = "std"))] -use ark_std::vec::Vec; +// #[cfg(not(feature = "std"))] +// use ark_std::vec::Vec; use ark_std::{ borrow::Borrow, fmt, diff --git a/poly-commit/src/streaming_kzg/space.rs b/poly-commit/src/streaming_kzg/space.rs index db8b9d45..96c7a4cf 100644 --- a/poly-commit/src/streaming_kzg/space.rs +++ b/poly-commit/src/streaming_kzg/space.rs @@ -13,8 +13,8 @@ use ark_ec::{ }; use ark_ff::{PrimeField, Zero}; use ark_poly::Polynomial; -#[cfg(not(feature = "std"))] -use ark_std::vec::Vec; +// #[cfg(not(feature = "std"))] +// use ark_std::vec::Vec; use ark_std::{ borrow::Borrow, collections::VecDeque, diff --git a/poly-commit/src/streaming_kzg/tests.rs b/poly-commit/src/streaming_kzg/tests.rs index c92dcb09..7aeea7f6 100644 --- a/poly-commit/src/streaming_kzg/tests.rs +++ b/poly-commit/src/streaming_kzg/tests.rs @@ -4,8 +4,8 @@ use crate::streaming_kzg::{ use ark_bls12_381::{Bls12_381, Fr}; use ark_ff::Field; use ark_poly::{univariate::DensePolynomial, DenseUVPolynomial}; -#[cfg(not(feature = "std"))] -use ark_std::vec::Vec; +// #[cfg(not(feature = "std"))] +// use ark_std::vec::Vec; use ark_std::{ borrow::Borrow, iterable::{Iterable, Reverse}, diff --git a/poly-commit/src/streaming_kzg/time.rs b/poly-commit/src/streaming_kzg/time.rs index 0d73c3c1..3fb4d4cf 100644 --- a/poly-commit/src/streaming_kzg/time.rs +++ b/poly-commit/src/streaming_kzg/time.rs @@ -6,8 +6,8 @@ use crate::streaming_kzg::{ use ark_ec::{pairing::Pairing, scalar_mul::ScalarMul, CurveGroup}; use ark_ff::Zero; use ark_poly::{univariate::DensePolynomial, DenseUVPolynomial}; -#[cfg(not(feature = "std"))] -use ark_std::vec::Vec; +// #[cfg(not(feature = "std"))] +// use ark_std::vec::Vec; use ark_std::{borrow::Borrow, ops::Div, ops::Mul, rand::RngCore, UniformRand}; /// The SRS for the polynomial commitment scheme for a max From cb2074058987a11e4266338352c2dea2a53a535e Mon Sep 17 00:00:00 2001 From: Hossein Moghaddas Date: Fri, 25 Oct 2024 12:34:33 +0200 Subject: [PATCH 71/75] Revert and cleanup --- poly-commit/src/constraints.rs | 4 ++-- poly-commit/src/data_structures.rs | 4 ++-- poly-commit/src/error.rs | 4 ++-- poly-commit/src/hyrax/data_structures.rs | 9 ++++----- poly-commit/src/hyrax/mod.rs | 7 +++---- poly-commit/src/hyrax/tests.rs | 7 +++---- poly-commit/src/hyrax/utils.rs | 1 + poly-commit/src/ipa_pc/mod.rs | 10 +++++----- poly-commit/src/kzg10/mod.rs | 4 ++-- poly-commit/src/lib.rs | 10 +++++----- poly-commit/src/marlin/marlin_pc/data_structures.rs | 4 ++-- poly-commit/src/marlin/marlin_pc/mod.rs | 4 ++-- poly-commit/src/marlin/marlin_pst13_pc/combinations.rs | 4 ++-- .../src/marlin/marlin_pst13_pc/data_structures.rs | 4 ++-- poly-commit/src/marlin/marlin_pst13_pc/mod.rs | 8 ++++---- poly-commit/src/marlin/mod.rs | 10 +++++----- poly-commit/src/multilinear_pc/data_structures.rs | 4 ++-- poly-commit/src/multilinear_pc/mod.rs | 8 ++++---- poly-commit/src/sonic_pc/data_structures.rs | 4 ++-- poly-commit/src/sonic_pc/mod.rs | 10 +++++----- poly-commit/src/streaming_kzg/mod.rs | 4 ++-- poly-commit/src/streaming_kzg/space.rs | 4 ++-- poly-commit/src/streaming_kzg/tests.rs | 4 ++-- poly-commit/src/streaming_kzg/time.rs | 4 ++-- 24 files changed, 67 insertions(+), 69 deletions(-) diff --git a/poly-commit/src/constraints.rs b/poly-commit/src/constraints.rs index 04401e40..ae1a6c66 100644 --- a/poly-commit/src/constraints.rs +++ b/poly-commit/src/constraints.rs @@ -14,8 +14,8 @@ use ark_std::{ cmp::{Eq, PartialEq}, hash::{BuildHasherDefault, Hash}, }; -// #[cfg(not(feature = "std"))] -// use ark_std::{string::String, vec::Vec}; +#[cfg(not(feature = "std"))] +use ark_std::{string::String, vec::Vec}; use hashbrown::{HashMap, HashSet}; #[cfg(all( diff --git a/poly-commit/src/data_structures.rs b/poly-commit/src/data_structures.rs index 608b2d8e..cd822b98 100644 --- a/poly-commit/src/data_structures.rs +++ b/poly-commit/src/data_structures.rs @@ -7,8 +7,8 @@ use ark_std::{ ops::{AddAssign, MulAssign, SubAssign}, rand::RngCore, }; -// #[cfg(not(feature = "std"))] -// use ark_std::{string::String, vec::Vec}; +#[cfg(not(feature = "std"))] +use ark_std::{string::String, vec::Vec}; /// Labels a `LabeledPolynomial` or a `LabeledCommitment`. pub type PolynomialLabel = String; diff --git a/poly-commit/src/error.rs b/poly-commit/src/error.rs index 06582e32..15eee6a1 100644 --- a/poly-commit/src/error.rs +++ b/poly-commit/src/error.rs @@ -1,5 +1,5 @@ -// #[cfg(not(feature = "std"))] -// use ark_std::string::String; +#[cfg(not(feature = "std"))] +use ark_std::string::String; /// The error type for `PolynomialCommitment`. #[derive(Debug)] diff --git a/poly-commit/src/hyrax/data_structures.rs b/poly-commit/src/hyrax/data_structures.rs index 80d99ff5..31afdcb0 100644 --- a/poly-commit/src/hyrax/data_structures.rs +++ b/poly-commit/src/hyrax/data_structures.rs @@ -1,12 +1,11 @@ -use ark_ec::AffineRepr; -use ark_ff::PrimeField; -use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; -use ark_std::{rand::RngCore, vec::Vec}; - use crate::{ utils::Matrix, PCCommitment, PCCommitmentState, PCCommitterKey, PCUniversalParams, PCVerifierKey, }; +use ark_ec::AffineRepr; +use ark_ff::PrimeField; +use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; +use ark_std::{rand::RngCore, vec::Vec}; /// `UniversalParams` amounts to a Pederson commitment key of sufficient length #[derive(Derivative, CanonicalSerialize, CanonicalDeserialize)] diff --git a/poly-commit/src/hyrax/mod.rs b/poly-commit/src/hyrax/mod.rs index 8484d881..14930023 100644 --- a/poly-commit/src/hyrax/mod.rs +++ b/poly-commit/src/hyrax/mod.rs @@ -1,8 +1,7 @@ -use crate::hyrax::utils::tensor_prime; -use crate::utils::{inner_product, scalar_by_vector, vector_sum, Matrix}; use crate::{ - hyrax::utils::flat_to_matrix_column_major, Error, LabeledCommitment, LabeledPolynomial, - PolynomialCommitment, + hyrax::utils::{flat_to_matrix_column_major, tensor_prime}, + utils::{inner_product, scalar_by_vector, vector_sum, Matrix}, + Error, LabeledCommitment, LabeledPolynomial, PolynomialCommitment, }; use ark_crypto_primitives::sponge::{Absorb, CryptographicSponge}; use ark_ec::{AffineRepr, CurveGroup, VariableBaseMSM}; diff --git a/poly-commit/src/hyrax/tests.rs b/poly-commit/src/hyrax/tests.rs index 42fba46c..efb54600 100644 --- a/poly-commit/src/hyrax/tests.rs +++ b/poly-commit/src/hyrax/tests.rs @@ -1,7 +1,6 @@ -use crate::hyrax::HyraxPC; -use crate::tests::*; -use crate::utils::test_sponge; -use crate::{LabeledPolynomial, PolynomialCommitment}; +use crate::{ + hyrax::HyraxPC, tests::*, utils::test_sponge, LabeledPolynomial, PolynomialCommitment, +}; use ark_bls12_377::G1Affine; use ark_ec::AffineRepr; use ark_ed_on_bls12_381::EdwardsAffine; diff --git a/poly-commit/src/hyrax/utils.rs b/poly-commit/src/hyrax/utils.rs index 74879a9e..fa520796 100644 --- a/poly-commit/src/hyrax/utils.rs +++ b/poly-commit/src/hyrax/utils.rs @@ -1,4 +1,5 @@ use ark_ff::Field; +#[cfg(not(feature = "std"))] use ark_std::vec::Vec; #[cfg(feature = "parallel")] diff --git a/poly-commit/src/ipa_pc/mod.rs b/poly-commit/src/ipa_pc/mod.rs index b280b66a..8afbacd9 100644 --- a/poly-commit/src/ipa_pc/mod.rs +++ b/poly-commit/src/ipa_pc/mod.rs @@ -8,11 +8,11 @@ use ark_ec::{AffineRepr, CurveGroup, VariableBaseMSM}; use ark_ff::{Field, One, PrimeField, UniformRand, Zero}; use ark_serialize::CanonicalSerialize; use ark_std::{convert::TryInto, format, marker::PhantomData, ops::Mul, rand::RngCore}; -// #[cfg(not(feature = "std"))] -// use ark_std::{ -// string::{String, ToString}, -// vec::Vec, -// }; +#[cfg(not(feature = "std"))] +use ark_std::{ + string::{String, ToString}, + vec::Vec, +}; use digest::Digest; #[cfg(feature = "parallel")] use rayon::prelude::*; diff --git a/poly-commit/src/kzg10/mod.rs b/poly-commit/src/kzg10/mod.rs index da9ff524..81a7b59f 100644 --- a/poly-commit/src/kzg10/mod.rs +++ b/poly-commit/src/kzg10/mod.rs @@ -10,8 +10,8 @@ use ark_ec::{pairing::Pairing, scalar_mul::ScalarMul, AffineRepr, CurveGroup, Va use ark_ff::{One, PrimeField, UniformRand, Zero}; use ark_poly::DenseUVPolynomial; use ark_std::{format, marker::PhantomData, ops::Div, ops::Mul, rand::RngCore}; -// #[cfg(not(feature = "std"))] -// use ark_std::{string::ToString, vec::Vec}; +#[cfg(not(feature = "std"))] +use ark_std::{string::ToString, vec::Vec}; #[cfg(feature = "parallel")] use rayon::prelude::*; diff --git a/poly-commit/src/lib.rs b/poly-commit/src/lib.rs index 65d39f42..5479051d 100644 --- a/poly-commit/src/lib.rs +++ b/poly-commit/src/lib.rs @@ -26,11 +26,11 @@ use ark_std::{ iter::FromIterator, rand::RngCore, }; -// #[cfg(not(feature = "std"))] -// use ark_std::{ -// string::{String, ToString}, -// vec::Vec, -// }; +#[cfg(not(feature = "std"))] +use ark_std::{ + string::{String, ToString}, + vec::Vec, +}; /// Data structures used by a polynomial commitment scheme. pub mod data_structures; diff --git a/poly-commit/src/marlin/marlin_pc/data_structures.rs b/poly-commit/src/marlin/marlin_pc/data_structures.rs index 4d866426..f01f67ab 100644 --- a/poly-commit/src/marlin/marlin_pc/data_structures.rs +++ b/poly-commit/src/marlin/marlin_pc/data_structures.rs @@ -5,8 +5,8 @@ use crate::{ use ark_ec::{pairing::Pairing, AdditiveGroup}; use ark_ff::{Field, PrimeField, ToConstraintField}; use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; -// #[cfg(not(feature = "std"))] -// use ark_std::vec::Vec; +#[cfg(not(feature = "std"))] +use ark_std::vec::Vec; use ark_std::{ ops::{Add, AddAssign}, rand::RngCore, diff --git a/poly-commit/src/marlin/marlin_pc/mod.rs b/poly-commit/src/marlin/marlin_pc/mod.rs index 00ee9cf1..52f56354 100644 --- a/poly-commit/src/marlin/marlin_pc/mod.rs +++ b/poly-commit/src/marlin/marlin_pc/mod.rs @@ -8,8 +8,8 @@ use ark_ec::{pairing::Pairing, AffineRepr, CurveGroup}; use ark_ff::Zero; use ark_poly::DenseUVPolynomial; use ark_std::{marker::PhantomData, ops::Div, rand::RngCore}; -// #[cfg(not(feature = "std"))] -// use ark_std::{string::ToString, vec::Vec}; +#[cfg(not(feature = "std"))] +use ark_std::{string::ToString, vec::Vec}; mod data_structures; pub use data_structures::*; diff --git a/poly-commit/src/marlin/marlin_pst13_pc/combinations.rs b/poly-commit/src/marlin/marlin_pst13_pc/combinations.rs index b44e1ebe..bc76ea99 100644 --- a/poly-commit/src/marlin/marlin_pst13_pc/combinations.rs +++ b/poly-commit/src/marlin/marlin_pst13_pc/combinations.rs @@ -1,7 +1,7 @@ //! Compute all combinations of values in a given list //! Credit: https://github.com/meltinglava/uniquecombinations/ -// #[cfg(not(feature = "std"))] -// use ark_std::vec::Vec; +#[cfg(not(feature = "std"))] +use ark_std::vec::Vec; /// Compute all combinations of values in a given list. pub(crate) struct Combinations where diff --git a/poly-commit/src/marlin/marlin_pst13_pc/data_structures.rs b/poly-commit/src/marlin/marlin_pst13_pc/data_structures.rs index b836d9b1..22377b9d 100644 --- a/poly-commit/src/marlin/marlin_pst13_pc/data_structures.rs +++ b/poly-commit/src/marlin/marlin_pst13_pc/data_structures.rs @@ -7,8 +7,8 @@ use ark_poly::DenseMVPolynomial; use ark_serialize::{ CanonicalDeserialize, CanonicalSerialize, Compress, SerializationError, Valid, Validate, }; -// #[cfg(not(feature = "std"))] -// use ark_std::vec::Vec; +#[cfg(not(feature = "std"))] +use ark_std::vec::Vec; use ark_std::{ io::{Read, Write}, marker::PhantomData, diff --git a/poly-commit/src/marlin/marlin_pst13_pc/mod.rs b/poly-commit/src/marlin/marlin_pst13_pc/mod.rs index 2e6b8725..b0f7114a 100644 --- a/poly-commit/src/marlin/marlin_pst13_pc/mod.rs +++ b/poly-commit/src/marlin/marlin_pst13_pc/mod.rs @@ -13,8 +13,8 @@ use ark_ec::{ use ark_ff::{One, PrimeField, UniformRand, Zero}; use ark_poly::{multivariate::Term, DenseMVPolynomial}; use ark_std::{marker::PhantomData, ops::Index, ops::Mul, rand::RngCore}; -// #[cfg(not(feature = "std"))] -// use ark_std::{string::ToString, vec::Vec}; +#[cfg(not(feature = "std"))] +use ark_std::{string::ToString, vec::Vec}; #[cfg(feature = "parallel")] use rayon::prelude::*; @@ -702,8 +702,8 @@ mod tests { multivariate::{SparsePolynomial as SparsePoly, SparseTerm}, DenseMVPolynomial, }; - // #[cfg(not(feature = "std"))] - // use ark_std::vec::Vec; + #[cfg(not(feature = "std"))] + use ark_std::vec::Vec; use rand_chacha::ChaCha20Rng; type MVPoly_381 = SparsePoly<::ScalarField, SparseTerm>; diff --git a/poly-commit/src/marlin/mod.rs b/poly-commit/src/marlin/mod.rs index 94dca7d1..714ad727 100644 --- a/poly-commit/src/marlin/mod.rs +++ b/poly-commit/src/marlin/mod.rs @@ -7,11 +7,11 @@ use ark_crypto_primitives::sponge::CryptographicSponge; use ark_ec::{pairing::Pairing, AffineRepr, CurveGroup}; use ark_ff::{One, Zero}; use ark_std::{convert::TryInto, hash::Hash, ops::AddAssign, ops::Mul}; -// #[cfg(not(feature = "std"))] -// use ark_std::{ -// string::{String, ToString}, -// vec::Vec, -// }; +#[cfg(not(feature = "std"))] +use ark_std::{ + string::{String, ToString}, + vec::Vec, +}; /// Polynomial commitment scheme from [[KZG10]][kzg] that enforces /// strict degree bounds and (optionally) enables hiding commitments by diff --git a/poly-commit/src/multilinear_pc/data_structures.rs b/poly-commit/src/multilinear_pc/data_structures.rs index f5909a69..c70aae08 100644 --- a/poly-commit/src/multilinear_pc/data_structures.rs +++ b/poly-commit/src/multilinear_pc/data_structures.rs @@ -1,7 +1,7 @@ use ark_ec::pairing::Pairing; use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; -// #[cfg(not(feature = "std"))] -// use ark_std::vec::Vec; +#[cfg(not(feature = "std"))] +use ark_std::vec::Vec; #[allow(type_alias_bounds)] /// Evaluations over {0,1}^n for G1 pub type EvaluationHyperCubeOnG1 = Vec; diff --git a/poly-commit/src/multilinear_pc/mod.rs b/poly-commit/src/multilinear_pc/mod.rs index 97d1de5e..8240f526 100644 --- a/poly-commit/src/multilinear_pc/mod.rs +++ b/poly-commit/src/multilinear_pc/mod.rs @@ -8,8 +8,8 @@ use ark_ec::{ }; use ark_ff::{Field, One, PrimeField, Zero}; use ark_poly::{DenseMultilinearExtension, MultilinearExtension}; -// #[cfg(not(feature = "std"))] -// use ark_std::vec::Vec; +#[cfg(not(feature = "std"))] +use ark_std::vec::Vec; use ark_std::{ collections::LinkedList, iter::FromIterator, marker::PhantomData, ops::Mul, rand::RngCore, UniformRand, @@ -241,8 +241,8 @@ mod tests { use ark_poly::{ DenseMultilinearExtension, MultilinearExtension, Polynomial, SparseMultilinearExtension, }; - // #[cfg(not(feature = "std"))] - // use ark_std::vec::Vec; + #[cfg(not(feature = "std"))] + use ark_std::vec::Vec; use ark_std::{rand::RngCore, test_rng, UniformRand}; type E = Bls12_381; type Fr = ::ScalarField; diff --git a/poly-commit/src/sonic_pc/data_structures.rs b/poly-commit/src/sonic_pc/data_structures.rs index 2fb95f85..4ed8e500 100644 --- a/poly-commit/src/sonic_pc/data_structures.rs +++ b/poly-commit/src/sonic_pc/data_structures.rs @@ -6,8 +6,8 @@ use ark_serialize::{ CanonicalDeserialize, CanonicalSerialize, Compress, SerializationError, Valid, Validate, }; use ark_std::io::{Read, Write}; -// #[cfg(not(feature = "std"))] -// use ark_std::vec::Vec; +#[cfg(not(feature = "std"))] +use ark_std::vec::Vec; /// `UniversalParams` are the universal parameters for the KZG10 scheme. pub type UniversalParams = kzg10::UniversalParams; diff --git a/poly-commit/src/sonic_pc/mod.rs b/poly-commit/src/sonic_pc/mod.rs index 170a7d53..8af2496e 100644 --- a/poly-commit/src/sonic_pc/mod.rs +++ b/poly-commit/src/sonic_pc/mod.rs @@ -7,11 +7,11 @@ use ark_crypto_primitives::sponge::CryptographicSponge; use ark_ec::{pairing::Pairing, AffineRepr, CurveGroup}; use ark_ff::{One, UniformRand, Zero}; use ark_std::{convert::TryInto, marker::PhantomData, ops::Div, ops::Mul, rand::RngCore}; -// #[cfg(not(feature = "std"))] -// use ark_std::{ -// string::{String, ToString}, -// vec::Vec, -// }; +#[cfg(not(feature = "std"))] +use ark_std::{ + string::{String, ToString}, + vec::Vec, +}; mod data_structures; pub use data_structures::*; diff --git a/poly-commit/src/streaming_kzg/mod.rs b/poly-commit/src/streaming_kzg/mod.rs index ef074adf..cec7f951 100644 --- a/poly-commit/src/streaming_kzg/mod.rs +++ b/poly-commit/src/streaming_kzg/mod.rs @@ -86,8 +86,8 @@ use ark_ec::{pairing::Pairing, AffineRepr, CurveGroup, VariableBaseMSM}; use ark_ff::{Field, One, PrimeField, Zero}; use ark_poly::{univariate::DensePolynomial, DenseUVPolynomial}; use ark_serialize::{CanonicalSerialize, Compress}; -// #[cfg(not(feature = "std"))] -// use ark_std::vec::Vec; +#[cfg(not(feature = "std"))] +use ark_std::vec::Vec; use ark_std::{ borrow::Borrow, fmt, diff --git a/poly-commit/src/streaming_kzg/space.rs b/poly-commit/src/streaming_kzg/space.rs index 96c7a4cf..db8b9d45 100644 --- a/poly-commit/src/streaming_kzg/space.rs +++ b/poly-commit/src/streaming_kzg/space.rs @@ -13,8 +13,8 @@ use ark_ec::{ }; use ark_ff::{PrimeField, Zero}; use ark_poly::Polynomial; -// #[cfg(not(feature = "std"))] -// use ark_std::vec::Vec; +#[cfg(not(feature = "std"))] +use ark_std::vec::Vec; use ark_std::{ borrow::Borrow, collections::VecDeque, diff --git a/poly-commit/src/streaming_kzg/tests.rs b/poly-commit/src/streaming_kzg/tests.rs index 7aeea7f6..c92dcb09 100644 --- a/poly-commit/src/streaming_kzg/tests.rs +++ b/poly-commit/src/streaming_kzg/tests.rs @@ -4,8 +4,8 @@ use crate::streaming_kzg::{ use ark_bls12_381::{Bls12_381, Fr}; use ark_ff::Field; use ark_poly::{univariate::DensePolynomial, DenseUVPolynomial}; -// #[cfg(not(feature = "std"))] -// use ark_std::vec::Vec; +#[cfg(not(feature = "std"))] +use ark_std::vec::Vec; use ark_std::{ borrow::Borrow, iterable::{Iterable, Reverse}, diff --git a/poly-commit/src/streaming_kzg/time.rs b/poly-commit/src/streaming_kzg/time.rs index 3fb4d4cf..0d73c3c1 100644 --- a/poly-commit/src/streaming_kzg/time.rs +++ b/poly-commit/src/streaming_kzg/time.rs @@ -6,8 +6,8 @@ use crate::streaming_kzg::{ use ark_ec::{pairing::Pairing, scalar_mul::ScalarMul, CurveGroup}; use ark_ff::Zero; use ark_poly::{univariate::DensePolynomial, DenseUVPolynomial}; -// #[cfg(not(feature = "std"))] -// use ark_std::vec::Vec; +#[cfg(not(feature = "std"))] +use ark_std::vec::Vec; use ark_std::{borrow::Borrow, ops::Div, ops::Mul, rand::RngCore, UniformRand}; /// The SRS for the polynomial commitment scheme for a max From 52391624d04efa22409e752804de62d9792458e1 Mon Sep 17 00:00:00 2001 From: Hossein Moghaddas Date: Fri, 25 Oct 2024 13:03:22 +0200 Subject: [PATCH 72/75] Delete dummy doc --- poly-commit/src/streaming_kzg/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/poly-commit/src/streaming_kzg/mod.rs b/poly-commit/src/streaming_kzg/mod.rs index cec7f951..402a93db 100644 --- a/poly-commit/src/streaming_kzg/mod.rs +++ b/poly-commit/src/streaming_kzg/mod.rs @@ -101,8 +101,8 @@ pub use data_structures::*; pub use space::CommitterKeyStream; pub use time::CommitterKey; -/// Dummy docs #[cfg(test)] +#[allow(missing_docs)] pub mod tests; /// A Kate polynomial commitment over a bilinear group, represented as a single \\(\GG_1\\) element. From 921d8ca384b101501c26f4e51fc05d9a23503239 Mon Sep 17 00:00:00 2001 From: Hossein Moghaddas Date: Fri, 25 Oct 2024 14:32:22 +0200 Subject: [PATCH 73/75] Bring back `num_traits` --- poly-commit/src/linear_codes/data_structures.rs | 1 + poly-commit/src/linear_codes/ligero.rs | 2 ++ poly-commit/src/linear_codes/multilinear_ligero/mod.rs | 2 +- poly-commit/src/linear_codes/utils.rs | 2 ++ 4 files changed, 6 insertions(+), 1 deletion(-) diff --git a/poly-commit/src/linear_codes/data_structures.rs b/poly-commit/src/linear_codes/data_structures.rs index eff304ed..60960ae8 100644 --- a/poly-commit/src/linear_codes/data_structures.rs +++ b/poly-commit/src/linear_codes/data_structures.rs @@ -5,6 +5,7 @@ use ark_crypto_primitives::{ }; use ark_ff::PrimeField; use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; +#[cfg(not(feature = "std"))] use ark_std::vec::Vec; use ark_std::{marker::PhantomData, rand::RngCore}; diff --git a/poly-commit/src/linear_codes/ligero.rs b/poly-commit/src/linear_codes/ligero.rs index de1c4d03..41dddf15 100644 --- a/poly-commit/src/linear_codes/ligero.rs +++ b/poly-commit/src/linear_codes/ligero.rs @@ -9,6 +9,8 @@ use ark_crypto_primitives::{ }; use ark_ff::PrimeField; use ark_std::{log2, marker::PhantomData}; +#[cfg(not(feature = "std"))] +use num_traits::Float; impl LigeroPCParams where diff --git a/poly-commit/src/linear_codes/multilinear_ligero/mod.rs b/poly-commit/src/linear_codes/multilinear_ligero/mod.rs index 09c0fb4d..4d8c8b86 100644 --- a/poly-commit/src/linear_codes/multilinear_ligero/mod.rs +++ b/poly-commit/src/linear_codes/multilinear_ligero/mod.rs @@ -8,9 +8,9 @@ use ark_crypto_primitives::{ }; use ark_ff::{FftField, PrimeField}; use ark_poly::{MultilinearExtension, Polynomial}; -use ark_std::{log2, marker::PhantomData}; #[cfg(not(feature = "std"))] use ark_std::vec::Vec; +use ark_std::{log2, marker::PhantomData}; mod tests; diff --git a/poly-commit/src/linear_codes/utils.rs b/poly-commit/src/linear_codes/utils.rs index 15be0fc3..dc4fe91a 100644 --- a/poly-commit/src/linear_codes/utils.rs +++ b/poly-commit/src/linear_codes/utils.rs @@ -4,6 +4,8 @@ use ark_ff::{FftField, PrimeField}; use ark_poly::{EvaluationDomain, GeneralEvaluationDomain}; #[cfg(not(feature = "std"))] use ark_std::{string::ToString, vec::Vec}; +#[cfg(not(feature = "std"))] +use num_traits::Float; /// Apply reed-solomon encoding to msg. /// Assumes msg.len() is equal to the order of some FFT domain in F. From 9a2fdc148232687f38f9d8eb2994ba0a8904c823 Mon Sep 17 00:00:00 2001 From: Hossein Moghaddas Date: Fri, 25 Oct 2024 18:18:56 +0200 Subject: [PATCH 74/75] Fix merge conflict for README.md Co-authored-by: Pratyush Mishra --- README.md | 3 --- 1 file changed, 3 deletions(-) diff --git a/README.md b/README.md index d54fe9fe..e851ee1e 100644 --- a/README.md +++ b/README.md @@ -183,12 +183,9 @@ Unless you explicitly state otherwise, any contribution that you submit to this [aurora-light]: https://ia.cr/2019/601 [pcd-acc]: https://ia.cr/2020/499 [pst]: https://ia.cr/2011/587 -<<<<<<< HEAD [brakedown]: https://ia.cr/2021/1043 -======= [ligero]: https://ia.cr/2022/1608 [hyrax]: https://eprint.iacr.org/2017/1132 ->>>>>>> ligero-uni-and-ml ## Reference papers From 2f853cfbf55f9b681307b1a9ed8ad010a87757fd Mon Sep 17 00:00:00 2001 From: Hossein Moghaddas Date: Fri, 25 Oct 2024 18:53:29 +0200 Subject: [PATCH 75/75] Add `/` to Cargo.toml --- Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index bd2cf086..51ea045a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -34,7 +34,7 @@ ark-ec = { git = "https://github.com/arkworks-rs/algebra/" } ark-serialize = { git = "https://github.com/arkworks-rs/algebra/" } ark-poly = { git = "https://github.com/arkworks-rs/algebra/" } -ark-crypto-primitives = { git = "https://github.com/arkworks-rs/crypto-primitives" } +ark-crypto-primitives = { git = "https://github.com/arkworks-rs/crypto-primitives/" } ark-r1cs-std = { git = "https://github.com/arkworks-rs/r1cs-std/" } ark-bls12-377 = { git = "https://github.com/arkworks-rs/algebra/" }