Skip to content

Commit

Permalink
comments and cleanup
Browse files Browse the repository at this point in the history
  • Loading branch information
ledwards2225 committed Aug 16, 2023
1 parent 8e07148 commit 779f3c2
Show file tree
Hide file tree
Showing 17 changed files with 81 additions and 85 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,9 @@ class BN254 {
using G2BaseField = typename barretenberg::fq2;
using TargetField = barretenberg::fq12;

// TODO(#673): This flag is temporary. It is needed in the verifier classes (GeminiVerifier, etc.) while these
// classes are instantiated with "native" curve types. Eventually, the verifier classes will be instantiated only
// with stdlib types, and "native" verification will be acheived via a simulated builder.
static constexpr bool is_stdlib_type = false;
};
} // namespace curve
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,9 @@ class Grumpkin {
using Element = typename Group::element;
using AffineElement = typename Group::affine_element;

// TODO(#673): This flag is temporary. It is needed in the verifier classes (GeminiVerifier, etc.) while these
// classes are instantiated with "native" curve types. Eventually, the verifier classes will be instantiated only
// with stdlib types, and "native" verification will be acheived via a simulated builder.
static constexpr bool is_stdlib_type = false;
};
} // namespace curve
Original file line number Diff line number Diff line change
Expand Up @@ -32,16 +32,14 @@ class UltraRecursive {
public:
using CircuitBuilder = UltraCircuitBuilder;
using Curve = plonk::stdlib::bn254<CircuitBuilder>;
using FF = Curve::ScalarField;
using PCS = pcs::kzg::KZG<Curve>;
using GroupElement = Curve::Element;
using Commitment = Curve::Element;
using CommitmentHandle = Curve::Element;
using FF = Curve::ScalarField;

// WORKTODO: these. do we need them?
using CommitmentKey = pcs::CommitmentKey<Curve>;
// Note(luke): Eventually this may not be needed at all
using VerifierCommitmentKey = pcs::VerifierCommitmentKey<Curve>;

using PCS = pcs::kzg::KZG<Curve>;

static constexpr size_t NUM_WIRES = CircuitBuilder::NUM_WIRES;
// The number of multivariate polynomials on which a sumcheck prover sumcheck operates (including shifts). We often
Expand Down Expand Up @@ -255,11 +253,17 @@ class UltraRecursive {
* that, and split out separate PrecomputedPolynomials/Commitments data for clarity but also for portability of our
* circuits.
*/
class VerificationKey : public VerificationKey_<PrecomputedEntities<Commitment, CommitmentHandle>>
{
class VerificationKey : public VerificationKey_<PrecomputedEntities<Commitment, CommitmentHandle>> {
public:
/**
* @brief Construct a new Verification Key with stdlib types from a provided native verification key
*
* @param builder
* @param native_key Native verification key from which to extract the precomputed commitments
*/
VerificationKey(CircuitBuilder* builder, auto native_key)
: VerificationKey_<PrecomputedEntities<Commitment, CommitmentHandle>>(native_key->circuit_size, native_key->num_public_inputs)
: VerificationKey_<PrecomputedEntities<Commitment, CommitmentHandle>>(native_key->circuit_size,
native_key->num_public_inputs)
{
q_m = Commitment::from_witness(builder, native_key->q_m);
q_l = Commitment::from_witness(builder, native_key->q_l);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -237,10 +237,10 @@ template <typename Curve> class GeminiVerifier_ {
GroupElement C0_r_neg = batched_f;
Fr r_inv = r.invert();

// WORKTODO: reinstate some kind of !batched_g.is_point_at_infinity() check for stdlib? This is mostly relevant
// for Gemini unit tests since in practice batched_g will not be zero
// TODO(luke): reinstate some kind of !batched_g.is_point_at_infinity() check for stdlib types? This is mostly
// relevant for Gemini unit tests since in practice batched_g != zero (i.e. we will always have shifted polys).
bool batched_g_is_point_at_infinity = false;
if constexpr (!Curve::is_stdlib_type) {
if constexpr (!Curve::is_stdlib_type) { // Note: required for Gemini tests with no shifts
batched_g_is_point_at_infinity = batched_g.is_point_at_infinity();
}
if (!batched_g_is_point_at_infinity) {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
#pragma once
#include "barretenberg/honk/pcs/claim.hpp"
#include "barretenberg/honk/transcript/transcript.hpp"
#include "barretenberg/honk/pcs/commitment_key.hpp"
#include "barretenberg/honk/pcs/verification_key.hpp"
#include "barretenberg/honk/transcript/transcript.hpp"

/**
* @brief Reduces multiple claims about commitments, each opened at a single point
Expand Down Expand Up @@ -162,21 +162,14 @@ template <typename Curve> class ShplonkVerifier_ {
* @return OpeningClaim
*/
static OpeningClaim<Curve> reduce_verification(std::shared_ptr<VK> vk,
std::span<const OpeningClaim<Curve>> claims,
auto& transcript)
std::span<const OpeningClaim<Curve>> claims,
auto& transcript)
{

const size_t num_claims = claims.size();

const Fr nu = transcript.get_challenge("Shplonk:nu");

size_t prev_num_gates = 0;
(void)prev_num_gates;
if constexpr (Curve::is_stdlib_type) {
// info("Shplonk, init: num gates = ", nu.get_context().get_num_gates());
prev_num_gates = nu.get_context()->get_num_gates();
}

auto Q_commitment = transcript.template receive_from_prover<Commitment>("Shplonk:Q");

const Fr z_challenge = transcript.get_challenge("Shplonk:z");
Expand Down Expand Up @@ -211,7 +204,8 @@ template <typename Curve> class ShplonkVerifier_ {
}

auto current_nu = Fr(1);
std::vector<Commitment> commitments; // used in recursion setting only
// Note: commitments and scalars vectors used only in recursion setting for batch mul
std::vector<Commitment> commitments;
std::vector<Fr> scalars;
for (size_t j = 0; j < num_claims; ++j) {
// (Cⱼ, xⱼ, vⱼ)
Expand All @@ -222,7 +216,7 @@ template <typename Curve> class ShplonkVerifier_ {
// G₀ += ρʲ / ( r − xⱼ ) ⋅ vⱼ
G_commitment_constant += scaling_factor * opening_pair.evaluation;

// If recursion, store MSM inputs for batch mul, otherwise perform mul and add directly
// If recursion, store MSM inputs for batch mul, otherwise accumulate directly
if constexpr (Curve::is_stdlib_type) {
commitments.emplace_back(commitment);
scalars.emplace_back(scaling_factor);
Expand All @@ -236,33 +230,26 @@ template <typename Curve> class ShplonkVerifier_ {

// If recursion, do batch mul to compute [G] -= ∑ⱼ ρʲ / ( r − xⱼ )⋅[fⱼ]
if constexpr (Curve::is_stdlib_type) {
info("Shplonk: inversions, adds: num gates = ", nu.get_context()->get_num_gates() - prev_num_gates);
prev_num_gates = nu.get_context()->get_num_gates();
G_commitment -= GroupElement::batch_mul(commitments, scalars);
info("Shplonk: batch mul: num gates = ", nu.get_context()->get_num_gates() - prev_num_gates);
prev_num_gates = nu.get_context()->get_num_gates();
}

// [G] += G₀⋅[1] = [G] + (∑ⱼ ρʲ ⋅ vⱼ / ( r − xⱼ ))⋅[1]
Fr evaluation_zero; // 0 \in Fr
GroupElement group_one; // [1]
if constexpr (Curve::is_stdlib_type) {
auto ctx = nu.get_context();
G_commitment += GroupElement::one(ctx) * G_commitment_constant;
info("Shplonk: final mul add: num gates = ", nu.get_context()->get_num_gates() - prev_num_gates);
auto ctx = transcript.builder;
evaluation_zero = Fr::from_witness(ctx, 0);
group_one = GroupElement::one(ctx);
} else {
// GroupElement sort_of_one{ x, y };
G_commitment += vk->srs->get_first_g1() * G_commitment_constant;
evaluation_zero = Fr(0);
group_one = vk->srs->get_first_g1();
}

Fr zero_evaluation;
if constexpr (Curve::is_stdlib_type) {
auto ctx = transcript.builder;
zero_evaluation = Fr::from_witness(ctx, 0);
} else {
zero_evaluation = Fr(0);
}
G_commitment += group_one * G_commitment_constant;

// Return opening pair (z, 0) and commitment [G]
return { { z_challenge, zero_evaluation }, G_commitment };
return { { z_challenge, evaluation_zero }, G_commitment };
};
};
} // namespace proof_system::honk::pcs::shplonk
Original file line number Diff line number Diff line change
Expand Up @@ -14,12 +14,12 @@ UltraVerifier_<Flavor>::UltraVerifier_(std::shared_ptr<typename Flavor::Verifica
{}

template <typename Flavor>
UltraVerifier_<Flavor>::UltraVerifier_(UltraVerifier_&& other) noexcept
UltraVerifier_<Flavor>::UltraVerifier_(UltraVerifier_&& other)
: key(std::move(other.key))
, pcs_verification_key(std::move(other.pcs_verification_key))
{}

template <typename Flavor> UltraVerifier_<Flavor>& UltraVerifier_<Flavor>::operator=(UltraVerifier_&& other) noexcept
template <typename Flavor> UltraVerifier_<Flavor>& UltraVerifier_<Flavor>::operator=(UltraVerifier_&& other)
{
key = other.key;
pcs_verification_key = (std::move(other.pcs_verification_key));
Expand Down Expand Up @@ -119,7 +119,7 @@ template <typename Flavor> bool UltraVerifier_<Flavor>::verify_proof(const plonk
if (!sumcheck_output.has_value()) {
return false;
}

auto [multivariate_challenge, purported_evaluations] = *sumcheck_output;

// Execute Gemini/Shplonk verification:
Expand All @@ -145,7 +145,7 @@ template <typename Flavor> bool UltraVerifier_<Flavor>::verify_proof(const plonk
// Construct batched commitment for NON-shifted polynomials
size_t commitment_idx = 0;
for (auto& commitment : commitments.get_unshifted()) {
batched_commitment_unshifted += commitment * rhos[commitment_idx];
batched_commitment_unshifted += commitment * rhos[commitment_idx];
++commitment_idx;
}

Expand All @@ -159,10 +159,10 @@ template <typename Flavor> bool UltraVerifier_<Flavor>::verify_proof(const plonk
// - d+1 commitments [Fold_{r}^(0)], [Fold_{-r}^(0)], and [Fold^(l)], l = 1:d-1
// - d+1 evaluations a_0_pos, and a_l, l = 0:d-1
auto gemini_claim = Gemini::reduce_verification(multivariate_challenge,
batched_evaluation,
batched_commitment_unshifted,
batched_commitment_to_be_shifted,
transcript);
batched_evaluation,
batched_commitment_unshifted,
batched_commitment_to_be_shifted,
transcript);

// Produce a Shplonk claim: commitment [Q] - [Q_z], evaluation zero (at random challenge z)
auto shplonk_claim = Shplonk::reduce_verification(pcs_verification_key, gemini_claim, transcript);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
#include "barretenberg/honk/flavor/goblin_ultra.hpp"
#include "barretenberg/honk/flavor/ultra.hpp"
#include "barretenberg/honk/flavor/ultra_grumpkin.hpp"
#include "barretenberg/honk/flavor/ultra_recursive.hpp"
#include "barretenberg/honk/sumcheck/sumcheck.hpp"
#include "barretenberg/plonk/proof_system/types/proof.hpp"

Expand All @@ -15,10 +14,10 @@ template <typename Flavor> class UltraVerifier_ {

public:
explicit UltraVerifier_(std::shared_ptr<VerificationKey> verifier_key = nullptr);
UltraVerifier_(UltraVerifier_&& other) noexcept;
UltraVerifier_(UltraVerifier_&& other);
UltraVerifier_(const UltraVerifier_& other) = delete;
UltraVerifier_& operator=(const UltraVerifier_& other) = delete;
UltraVerifier_& operator=(UltraVerifier_&& other) noexcept;
UltraVerifier_& operator=(UltraVerifier_&& other);

bool verify_proof(const plonk::proof& proof);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,12 @@
#include <algorithm>
#include <array>

// TODO(#674): We need the functionality of BarycentricData for both field and field_t. The former is "literal" i.e. is
// compatible with constexpr operations, and the former is not. The functions for computing the pre-computable arrays in
// BarycentricData need to be constexpr and it takes some trickery to share these functions with the non-constexpr
// setting. Right now everything is more or less duplicated across BarycentricDataCompileTime and
// BarycentricDataRunTime. There should be a way to share more of the logic.

/* IMPROVEMENT(Cody): This could or should be improved in various ways. In no particular order:
1) Edge cases are not considered. One non-use case situation (I forget which) leads to a segfault.
Expand Down Expand Up @@ -229,8 +235,7 @@ template <class Fr, size_t domain_size, size_t num_evals> class BarycentricDataR
return result;
}

static std::array<Fr, domain_size * num_evals> batch_invert(
const std::array<Fr, domain_size * num_evals>& coeffs)
static std::array<Fr, domain_size * num_evals> batch_invert(const std::array<Fr, domain_size * num_evals>& coeffs)
{
constexpr size_t n = domain_size * num_evals;
std::array<Fr, n> temporaries{};
Expand Down Expand Up @@ -260,8 +265,8 @@ template <class Fr, size_t domain_size, size_t num_evals> class BarycentricDataR
// for each x_k in the big domain, build set of domain size-many denominator inverses
// 1/(d_i*(x_k - x_j)). will multiply against each of these (rather than to divide by something)
// for each barycentric evaluation
static std::array<Fr, domain_size * num_evals> construct_denominator_inverses(
const auto& big_domain, const auto& lagrange_denominators)
static std::array<Fr, domain_size * num_evals> construct_denominator_inverses(const auto& big_domain,
const auto& lagrange_denominators)
{
std::array<Fr, domain_size * num_evals> result{}; // default init to 0 since below does not init all elements
for (size_t k = domain_size; k < num_evals; ++k) {
Expand Down Expand Up @@ -374,37 +379,31 @@ template <class Fr, size_t domain_size, size_t num_evals> class BarycentricDataR
};
};


/**
* @brief Helper to determine whether input is bberg::field type
*
* @tparam T
*
* @tparam T
*/
template <typename T>
struct is_field_type {
template <typename T> struct is_field_type {
static constexpr bool value = false;
};

template <typename Params>
struct is_field_type<barretenberg::field<Params>> {
template <typename Params> struct is_field_type<barretenberg::field<Params>> {
static constexpr bool value = true;
};

template<typename T>
inline constexpr bool is_field_type_v = is_field_type<T>::value;
template <typename T> inline constexpr bool is_field_type_v = is_field_type<T>::value;

/**
* @brief Exposes BarycentricData with compile time arrays if the type is bberg::field and runtime arrays otherwise
* @brief Exposes BarycentricData with compile time arrays if the type is bberg::field and runtime arrays otherwise
* @details This method is also needed for stdlib field, for which the arrays are not compile time computable
* @tparam Fr
* @tparam domain_size
* @tparam num_evals
* @tparam Fr
* @tparam domain_size
* @tparam num_evals
*/
template <class Fr, size_t domain_size, size_t num_evals>
using BarycentricData = std::conditional_t<
is_field_type_v<Fr>,
BarycentricDataCompileTime<Fr, domain_size, num_evals>,
BarycentricDataRunTime<Fr, domain_size, num_evals>
>;
using BarycentricData = std::conditional_t<is_field_type_v<Fr>,
BarycentricDataCompileTime<Fr, domain_size, num_evals>,
BarycentricDataRunTime<Fr, domain_size, num_evals>>;

} // namespace proof_system::honk::sumcheck
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,8 @@ TYPED_TEST(BarycentricDataTests, CompileTimeComputation)
BARYCENTIC_DATA_TESTS_TYPE_ALIASES
const size_t domain_size(2);
const size_t num_evals(10);
// if constexpr (std::is_literal_type_v<FF>) {

static_assert(BarycentricData<FF, domain_size, num_evals>::big_domain[5] == 5);
// }
}

TYPED_TEST(BarycentricDataTests, Extend)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,8 @@ template <typename FF> struct PowUnivariate {
{
FF current_univariate_eval = univariate_eval(challenge);
zeta_pow = zeta_pow_sqr;
zeta_pow_sqr = zeta_pow_sqr.sqr(); // WORKTODO: used to be self_sqr
// TODO(luke): for native FF, this could be self_sqr()
zeta_pow_sqr = zeta_pow_sqr.sqr();

partial_evaluation_constant *= current_univariate_eval;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@ namespace proof_system::honk::sumcheck {
*
* @tparam FF
*/
// WORKTODO: cant init via FF::zero() for stdlib. Do we need this?
template <typename FF> struct RelationParameters {
FF eta = FF(0); // Lookup
FF beta = FF(0); // Permutation + Lookup
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -424,7 +424,7 @@ template <typename Flavor> class SumcheckVerifierRound {
// S^{l}(1) = ( (1−1) + 1⋅ζ^{ 2^l } ) ⋅ T^{l}(1) = ζ^{ 2^l } ⋅ T^{l}(1)
FF total_sum = univariate.value_at(0) + univariate.value_at(1);
// target_total_sum = sigma_{l} =
// WORKTODO: perhaps conditionals like this go away once native verification is is just recursive verification
// TODO(#673): Conditionals like this can go away once native verification is is just recursive verification
// with a simulated builder.
bool sumcheck_round_failed(false);
if constexpr (IsRecursiveFlavor<Flavor>) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,9 @@ namespace stdlib {

template <typename CircuitBuilder> struct bn254 {
static constexpr proof_system::CurveType type = proof_system::CurveType::BN254;
// TODO(#673): This flag is temporary. It is needed in the verifier classes (GeminiVerifier, etc.) while these
// classes are instantiated with "native" curve types. Eventually, the verifier classes will be instantiated only
// with stdlib types, and "native" verification will be acheived via a simulated builder.
static constexpr bool is_stdlib_type = true;

// Corresponding native types (used exclusively for testing)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
#include "barretenberg/honk/flavor/ultra_recursive.hpp"
#include "barretenberg/honk/sumcheck/polynomials/univariate.hpp"
#include "barretenberg/honk/transcript/transcript.hpp"
#include "barretenberg/stdlib/recursion/honk/transcript/trancript.hpp"
#include "barretenberg/stdlib/recursion/honk/transcript/transcript.hpp"

namespace proof_system::plonk::stdlib::recursion::honk {

Expand Down Expand Up @@ -111,7 +111,7 @@ TEST(RecursiveHonkTranscript, InterfacesMatch)
Transcript<Builder> transcript{ &builder, proof_data };
perform_mock_verifier_transcript_operations<UltraRecursiveFlavor, LENGTH>(transcript);

// Confirm that the native and stdlib transcripts have generated the same manifest
// Confirm that the native and stdlib verifier transcripts have generated the same manifest
EXPECT_EQ(transcript.get_manifest(), native_transcript.get_manifest());

// TODO(luke): This doesn't check much of anything until hashing is constrained in the stdlib transcript
Expand Down
Loading

0 comments on commit 779f3c2

Please sign in to comment.