Skip to content

Commit

Permalink
chore: convert commitment to struct so earthly runs
Browse files Browse the repository at this point in the history
  • Loading branch information
MirandaWood committed Nov 19, 2024
1 parent 4a8d811 commit 5d57b6d
Show file tree
Hide file tree
Showing 6 changed files with 74 additions and 34 deletions.
33 changes: 19 additions & 14 deletions noir-projects/noir-protocol-circuits/crates/blob/src/blob.nr
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
// TODO(#9982): Replace unconstrained_config with config and import ROOTS - calculating ROOTS in unconstrained is insecure.
use crate::{
blob_public_inputs::{BlobPublicInputs, BlockBlobPublicInputs},
blob_public_inputs::{BlobCommitment, BlobPublicInputs, BlockBlobPublicInputs},
unconstrained_config::{
compute_roots_of_unity, D, D_INV, F, FIELDS_CARRYING_AN_EXTRA_BIT_PER_BLOB,
LOG_FIELDS_PER_BLOB, NOIR_FIELDS_PER_BLOB,
Expand Down Expand Up @@ -115,8 +115,8 @@ pub fn check_block_blob_sponge(
sponge_hash
}

fn compute_challenge(hashed_blobs_fields: Field, kzg_commitment: [Field; 2]) -> Field {
let preimage = [hashed_blobs_fields, kzg_commitment[0], kzg_commitment[1]];
fn compute_challenge(hashed_blobs_fields: Field, kzg_commitment: BlobCommitment) -> Field {
let preimage = [hashed_blobs_fields, kzg_commitment.inner[0], kzg_commitment.inner[1]];
let challenge = std::hash::poseidon2::Poseidon2::hash(preimage, 3);
challenge
}
Expand All @@ -130,7 +130,7 @@ fn compute_challenge(hashed_blobs_fields: Field, kzg_commitment: [Field; 2]) ->
// See yarn-project/foundation/src/blob/index.ts -> commitmentToFields() for encoding
fn evaluate_blob(
blob_as_fields: [Field; FIELDS_PER_BLOB],
kzg_commitment: [Field; 2],
kzg_commitment: BlobCommitment,
hashed_blobs_fields: Field,
) -> BlobPublicInputs {
let challenge_z: Field = compute_challenge(hashed_blobs_fields, kzg_commitment);
Expand All @@ -149,7 +149,7 @@ fn evaluate_blob(
// Evaluates each blob required for a block
pub fn evaluate_blobs(
blobs_as_fields: [Field; FIELDS_PER_BLOB * BLOBS_PER_BLOCK],
kzg_commitments: [[Field; 2]; BLOBS_PER_BLOCK],
kzg_commitments: [BlobCommitment; BLOBS_PER_BLOCK],
mut sponge_blob: SpongeBlob,
) -> BlockBlobPublicInputs {
// Note that with multiple blobs per block, each blob uses the same hashed_blobs_fields in:
Expand Down Expand Up @@ -414,6 +414,7 @@ mod tests {
barycentric_evaluate_blob_at_z, check_block_blob_sponge, evaluate_blob, evaluate_blobs,
field_to_bignum,
},
blob_public_inputs::BlobCommitment,
unconstrained_config::{
D, D_INV, F, FIELDS_CARRYING_AN_EXTRA_BIT_PER_BLOB, LOG_FIELDS_PER_BLOB,
NOIR_FIELDS_PER_BLOB,
Expand Down Expand Up @@ -456,7 +457,7 @@ mod tests {
let mut sponge_blob = SpongeBlob::new(blob_fields.len());
sponge_blob.absorb(blob_fields, blob_fields.len());

let kzg_commitment_in = [1, 2]; // this is made-up nonsense.
let kzg_commitment_in = BlobCommitment { inner: [1, 2] }; // this is made-up nonsense.
let padded_blob_fields = pad_end(blob, 0);
let hashed_blob = check_block_blob_sponge(padded_blob_fields, sponge_blob);
let output = evaluate_blob(blob, kzg_commitment_in, hashed_blob);
Expand Down Expand Up @@ -521,10 +522,12 @@ mod tests {
let mut sponge_blob = SpongeBlob::new(400);
sponge_blob.absorb(blob, 400);

let kzg_commitment_in = [
0x00b2803d5fe972914ba3616033e2748bbaa6dbcddefc3721a54895a7a45e7750,
0x0000000000000000000000000000004dd1a971c7e8d8292be943d05bccebcfea,
];
let kzg_commitment_in = BlobCommitment {
inner: [
0x00b2803d5fe972914ba3616033e2748bbaa6dbcddefc3721a54895a7a45e7750,
0x0000000000000000000000000000004dd1a971c7e8d8292be943d05bccebcfea,
],
};

let padded_blob_fields = pad_end(blob, 0);
let hashed_blob = check_block_blob_sponge(padded_blob_fields, sponge_blob);
Expand All @@ -551,10 +554,12 @@ mod tests {
let mut sponge_blob = SpongeBlob::new(FIELDS_PER_BLOB * BLOBS_PER_BLOCK);
sponge_blob.absorb(blob, FIELDS_PER_BLOB * BLOBS_PER_BLOCK);

let kzg_commitment_in = [
0x00ac771dea41e29fc2b7016c32731602c0812548ba0f491864a4e03fdb94b8d3,
0x000000000000000000000000000000d195faad1967cdf005acf73088b0e8474a,
];
let kzg_commitment_in = BlobCommitment {
inner: [
0x00ac771dea41e29fc2b7016c32731602c0812548ba0f491864a4e03fdb94b8d3,
0x000000000000000000000000000000d195faad1967cdf005acf73088b0e8474a,
],
};

let output = evaluate_blobs(blob, [kzg_commitment_in; BLOBS_PER_BLOCK], sponge_blob);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,29 @@ use types::{
utils::reader::Reader,
};

// NB: This only exists because a nested array of [[Field; 2]; N] did not build with earthly, but was fine otherwise
// For blobs, we use the compressed 48 byte BLS12 commitment to compute the challenge. We never need to operate on it,
// so it's encoded as 2 fields. The first is the first 31 bytes, and the second is the next 17 bytes.
pub struct BlobCommitment {
pub inner: [Field; 2],
}

impl Eq for BlobCommitment {
fn eq(self, other: Self) -> bool {
self.inner.eq(other.inner)
}
}

impl Empty for BlobCommitment {
fn empty() -> Self {
Self { inner: [0; 2] }
}
}

pub struct BlobPublicInputs {
pub z: Field,
pub y: F,
pub kzg_commitment: [Field; 2],
pub kzg_commitment: BlobCommitment,
}

impl BlobPublicInputs {
Expand All @@ -25,13 +44,13 @@ impl BlobPublicInputs {
// This is not equivalent to being empty, since the challenge point z is a hash and won't have 0 value.
fn is_zero(self) -> bool {
// Note: there is no constrained is_zero in bignum
(self.y == BigNum { limbs: [0, 0, 0] }) & (self.kzg_commitment == [0, 0])
(self.y == BigNum { limbs: [0, 0, 0] }) & (self.kzg_commitment.inner == [0, 0])
}
}

impl Empty for BlobPublicInputs {
fn empty() -> Self {
Self { z: 0, y: BigNum::new(), kzg_commitment: [0; 2] }
Self { z: 0, y: BigNum::new(), kzg_commitment: BlobCommitment::empty() }
}
}

Expand All @@ -42,8 +61,8 @@ impl Serialize<BLOB_PUBLIC_INPUTS> for BlobPublicInputs {
self.y.limbs[0],
self.y.limbs[1],
self.y.limbs[2],
self.kzg_commitment[0],
self.kzg_commitment[1],
self.kzg_commitment.inner[0],
self.kzg_commitment.inner[1],
]
}
}
Expand All @@ -53,7 +72,7 @@ impl Deserialize<BLOB_PUBLIC_INPUTS> for BlobPublicInputs {
Self {
z: fields[0],
y: BigNum { limbs: [fields[1], fields[2], fields[3]] },
kzg_commitment: [fields[4], fields[5]],
kzg_commitment: BlobCommitment { inner: [fields[4], fields[5]] },
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ use crate::{
},
components,
};
use blob::{blob::evaluate_blobs, blob_public_inputs::BlockBlobPublicInputs};
use blob::{blob::evaluate_blobs, blob_public_inputs::{BlobCommitment, BlockBlobPublicInputs}};
use parity_lib::root::root_rollup_parity_input::RootRollupParityInput;
use types::{
abis::append_only_tree_snapshot::AppendOnlyTreeSnapshot,
Expand Down Expand Up @@ -49,9 +49,9 @@ pub struct BlockRootRollupInputs {
prover_id: Field,
// Fields for blob verification made of all tx effects in this block
// (will likely change to be accumulated and sent up to the final root)
blob_fields: [Field; FIELDS_PER_BLOB * BLOBS_PER_BLOCK],
// see blob/src/main.nr -> main() - this is used for creating the challenge z
blob_commitments: [[Field; 2]; BLOBS_PER_BLOCK],
blobs_fields: [Field; FIELDS_PER_BLOB * BLOBS_PER_BLOCK],
// see blob/src/main.nr -> main() - these are used for creating the challenge z
blob_commitments: [BlobCommitment; BLOBS_PER_BLOCK],
// Flat sha256 hash of the EVM blob hashes, can be injected here as the contract check its validity vs the blob_public_inputs below
// NB: to fit it into a field, we truncate to 31 bytes
blobs_hash: Field,
Expand Down Expand Up @@ -132,7 +132,7 @@ impl BlockRootRollupInputs {

let mut blob_public_inputs = [BlockBlobPublicInputs::empty(); AZTEC_MAX_EPOCH_DURATION];
blob_public_inputs[0] = evaluate_blobs(
self.blob_fields,
self.blobs_fields,
self.blob_commitments,
right.end_sponge_blob,
);
Expand Down Expand Up @@ -166,8 +166,8 @@ impl Empty for BlockRootRollupInputs {
new_archive_sibling_path: [0; ARCHIVE_HEIGHT],
previous_block_hash: 0,
prover_id: 0,
blob_fields: [0; FIELDS_PER_BLOB * BLOBS_PER_BLOCK],
blob_commitments: [[0; 2]; BLOBS_PER_BLOCK],
blobs_fields: [0; FIELDS_PER_BLOB * BLOBS_PER_BLOCK],
blob_commitments: [BlobCommitment::empty(); BLOBS_PER_BLOCK],
blobs_hash: 0,
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,11 @@ mod tests {

let hashed_tx_effects = expected_sponge.squeeze();
let expected_z = Poseidon2::hash(
[hashed_tx_effects, inputs.blob_commitments[0][0], inputs.blob_commitments[0][1]],
[
hashed_tx_effects,
inputs.blob_commitments[0].inner[0],
inputs.blob_commitments[0].inner[1],
],
3,
);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,8 +58,8 @@ pub fn default_block_root_rollup_inputs() -> BlockRootRollupInputs {

inputs.previous_rollup_data = default_previous_rollup_data();

inputs.blob_fields[0] = 1;
inputs.blob_fields[1] = 2;
inputs.blobs_fields[0] = 1;
inputs.blobs_fields[1] = 2;

inputs
}
20 changes: 16 additions & 4 deletions yarn-project/noir-protocol-circuits-types/src/type_conversion.ts
Original file line number Diff line number Diff line change
Expand Up @@ -168,6 +168,7 @@ import type {
BaseOrMergeRollupPublicInputs as BaseOrMergeRollupPublicInputsNoir,
BaseParityInputs as BaseParityInputsNoir,
BigNum,
BlobCommitment as BlobCommitmentNoir,
BlobPublicInputs as BlobPublicInputsNoir,
BlockBlobPublicInputs as BlockBlobPublicInputsNoir,
BlockMergeRollupInputs as BlockMergeRollupInputsNoir,
Expand Down Expand Up @@ -2084,6 +2085,17 @@ export function mapSpongeBlobFromNoir(spongeBlob: SpongeBlobNoir): SpongeBlob {
);
}

/**
* Maps blob commitment to noir.
* @param commitment - The circuits.js commitment.
* @returns The noir commitment.
*/
export function mapBlobCommitmentToNoir(commitment: [Fr, Fr]): BlobCommitmentNoir {
return {
inner: mapTuple(commitment, mapFieldToNoir),
};
}

/**
* Maps blob public inputs to noir.
* @param blobPublicInputs - The circuits.js blob public inputs.
Expand All @@ -2093,7 +2105,7 @@ export function mapBlobPublicInputsToNoir(blobPublicInputs: BlobPublicInputs): B
return {
z: mapFieldToNoir(blobPublicInputs.z),
y: mapBLS12BigNumToNoir(blobPublicInputs.y),
kzg_commitment: mapTuple(blobPublicInputs.kzgCommitment, mapFieldToNoir),
kzg_commitment: mapBlobCommitmentToNoir(blobPublicInputs.kzgCommitment),
};
}

Expand All @@ -2106,7 +2118,7 @@ export function mapBlobPublicInputsFromNoir(blobPublicInputs: BlobPublicInputsNo
return new BlobPublicInputs(
mapFieldFromNoir(blobPublicInputs.z),
mapBLS12BigNumFromNoir(blobPublicInputs.y),
mapTupleFromNoir(blobPublicInputs.kzg_commitment, 2, mapFieldFromNoir),
mapTupleFromNoir(blobPublicInputs.kzg_commitment.inner, 2, mapFieldFromNoir),
);
}

Expand Down Expand Up @@ -2429,8 +2441,8 @@ export function mapBlockRootRollupInputsToNoir(rootRollupInputs: BlockRootRollup
previous_block_hash: mapFieldToNoir(rootRollupInputs.previousBlockHash),
prover_id: mapFieldToNoir(rootRollupInputs.proverId),
// @ts-expect-error - below line gives error 'Type instantiation is excessively deep and possibly infinite. ts(2589)'
blob_fields: mapTuple(rootRollupInputs.blobFields, mapFieldToNoir),
blob_commitments: mapTuple(rootRollupInputs.blobCommitments, pair => mapTuple(pair, mapFieldToNoir)),
blobs_fields: mapTuple(rootRollupInputs.blobFields, mapFieldToNoir),
blob_commitments: mapTuple(rootRollupInputs.blobCommitments, mapBlobCommitmentToNoir),
blobs_hash: mapFieldToNoir(rootRollupInputs.blobsHash),
};
}
Expand Down

0 comments on commit 5d57b6d

Please sign in to comment.