From faa69b43f3d63eadea00cf4bdbc7d44c2eb71ec5 Mon Sep 17 00:00:00 2001 From: "mergify[bot]" <37929162+mergify[bot]@users.noreply.github.com> Date: Mon, 26 Feb 2024 22:52:56 +0000 Subject: [PATCH] v1.18: uses struct instead of tuple for Merkle shreds variant (backport of #35303) (#35321) uses struct instead of tuple for Merkle shreds variant (#35303) Working towards adding a new Merkle shred variant with retransmitter's signature, the commit uses struct instead of tuple to describe Merkle shred variant. (cherry picked from commit c8ee4f59ade88f8e5c097a6bc1569fd9a2d26c35) Co-authored-by: behzad nouri --- ledger/src/shred.rs | 252 ++++++++++++++++++++++++--------- ledger/src/shred/common.rs | 4 +- ledger/src/shred/merkle.rs | 165 +++++++++++++++------ ledger/src/shred/shred_data.rs | 6 +- 4 files changed, 316 insertions(+), 111 deletions(-) diff --git a/ledger/src/shred.rs b/ledger/src/shred.rs index e3c896f71befa8..c2219c1370d47c 100644 --- a/ledger/src/shred.rs +++ b/ledger/src/shred.rs @@ -200,8 +200,8 @@ enum ShredVariant { // 0b0110_???? MerkleCode chained // 0b1000_???? MerkleData // 0b1001_???? MerkleData chained - MerkleCode(/*proof_size:*/ u8, /*chained:*/ bool), // 0b01?0_???? - MerkleData(/*proof_size:*/ u8, /*chained:*/ bool), // 0b100?_???? + MerkleCode { proof_size: u8, chained: bool }, // 0b01?0_???? + MerkleData { proof_size: u8, chained: bool }, // 0b100?_???? } /// A common header that is present in data and code shred headers @@ -390,11 +390,11 @@ impl Shred { let shred = legacy::ShredData::from_payload(shred)?; Self::from(ShredData::from(shred)) } - ShredVariant::MerkleCode(..) => { + ShredVariant::MerkleCode { .. } => { let shred = merkle::ShredCode::from_payload(shred)?; Self::from(ShredCode::from(shred)) } - ShredVariant::MerkleData(..) => { + ShredVariant::MerkleData { .. } => { let shred = merkle::ShredData::from_payload(shred)?; Self::from(ShredData::from(shred)) } @@ -653,12 +653,18 @@ pub mod layout { let chunk = shred.get(self::legacy::SIGNED_MESSAGE_OFFSETS)?; SignedData::Chunk(chunk) } - ShredVariant::MerkleCode(proof_size, chained) => { + ShredVariant::MerkleCode { + proof_size, + chained, + } => { let merkle_root = self::merkle::ShredCode::get_merkle_root(shred, proof_size, chained)?; SignedData::MerkleRoot(merkle_root) } - ShredVariant::MerkleData(proof_size, chained) => { + ShredVariant::MerkleData { + proof_size, + chained, + } => { let merkle_root = self::merkle::ShredData::get_merkle_root(shred, proof_size, chained)?; SignedData::MerkleRoot(merkle_root) @@ -677,8 +683,8 @@ pub mod layout { // Merkle shreds sign merkle tree root which can be recovered from // the merkle proof embedded in the payload but itself is not // stored the payload. - ShredVariant::MerkleCode(..) => None, - ShredVariant::MerkleData(..) => None, + ShredVariant::MerkleCode { .. } => None, + ShredVariant::MerkleData { .. } => None, } } @@ -695,12 +701,14 @@ pub mod layout { pub fn get_merkle_root(shred: &[u8]) -> Option { match get_shred_variant(shred).ok()? { ShredVariant::LegacyCode | ShredVariant::LegacyData => None, - ShredVariant::MerkleCode(proof_size, chained) => { - merkle::ShredCode::get_merkle_root(shred, proof_size, chained) - } - ShredVariant::MerkleData(proof_size, chained) => { - merkle::ShredData::get_merkle_root(shred, proof_size, chained) - } + ShredVariant::MerkleCode { + proof_size, + chained, + } => merkle::ShredCode::get_merkle_root(shred, proof_size, chained), + ShredVariant::MerkleData { + proof_size, + chained, + } => merkle::ShredData::get_merkle_root(shred, proof_size, chained), } } @@ -719,9 +727,8 @@ pub mod layout { let shred = get_shred(packet).unwrap(); let merkle_proof_size = match get_shred_variant(shred).unwrap() { ShredVariant::LegacyCode | ShredVariant::LegacyData => None, - ShredVariant::MerkleCode(proof_size, _) | ShredVariant::MerkleData(proof_size, _) => { - Some(proof_size) - } + ShredVariant::MerkleCode { proof_size, .. } + | ShredVariant::MerkleData { proof_size, .. } => Some(proof_size), }; let coin_flip: bool = rng.gen(); if coin_flip { @@ -802,8 +809,8 @@ impl From for ShredType { match shred_variant { ShredVariant::LegacyCode => ShredType::Code, ShredVariant::LegacyData => ShredType::Data, - ShredVariant::MerkleCode(..) => ShredType::Code, - ShredVariant::MerkleData(..) => ShredType::Data, + ShredVariant::MerkleCode { .. } => ShredType::Code, + ShredVariant::MerkleData { .. } => ShredType::Data, } } } @@ -813,10 +820,22 @@ impl From for u8 { match shred_variant { ShredVariant::LegacyCode => u8::from(ShredType::Code), ShredVariant::LegacyData => u8::from(ShredType::Data), - ShredVariant::MerkleCode(proof_size, false) => proof_size | 0x40, - ShredVariant::MerkleCode(proof_size, true) => proof_size | 0x60, - ShredVariant::MerkleData(proof_size, false) => proof_size | 0x80, - ShredVariant::MerkleData(proof_size, true) => proof_size | 0x90, + ShredVariant::MerkleCode { + proof_size, + chained: false, + } => proof_size | 0x40, + ShredVariant::MerkleCode { + proof_size, + chained: true, + } => proof_size | 0x60, + ShredVariant::MerkleData { + proof_size, + chained: false, + } => proof_size | 0x80, + ShredVariant::MerkleData { + proof_size, + chained: true, + } => proof_size | 0x90, } } } @@ -831,14 +850,22 @@ impl TryFrom for ShredVariant { } else { let proof_size = shred_variant & 0x0F; match shred_variant & 0xF0 { - 0x40 => Ok(ShredVariant::MerkleCode( - proof_size, /*chained:*/ false, - )), - 0x60 => Ok(ShredVariant::MerkleCode(proof_size, /*chained:*/ true)), - 0x80 => Ok(ShredVariant::MerkleData( - proof_size, /*chained:*/ false, - )), - 0x90 => Ok(ShredVariant::MerkleData(proof_size, /*chained:*/ true)), + 0x40 => Ok(ShredVariant::MerkleCode { + proof_size, + chained: false, + }), + 0x60 => Ok(ShredVariant::MerkleCode { + proof_size, + chained: true, + }), + 0x80 => Ok(ShredVariant::MerkleData { + proof_size, + chained: false, + }), + 0x90 => Ok(ShredVariant::MerkleData { + proof_size, + chained: true, + }), _ => Err(Error::InvalidShredVariant), } } @@ -858,7 +885,7 @@ pub(crate) fn recover( ShredVariant::LegacyData | ShredVariant::LegacyCode => { Shredder::try_recovery(shreds, reed_solomon_cache) } - ShredVariant::MerkleCode(..) | ShredVariant::MerkleData(..) => { + ShredVariant::MerkleCode { .. } | ShredVariant::MerkleData { .. } => { let shreds = shreds .into_iter() .map(merkle::Shred::try_from) @@ -996,20 +1023,20 @@ pub fn should_discard_shred( return true; } } - ShredVariant::MerkleCode(_, /*chained:*/ false) => { + ShredVariant::MerkleCode { chained: false, .. } => { stats.num_shreds_merkle_code = stats.num_shreds_merkle_code.saturating_add(1); } - ShredVariant::MerkleCode(_, /*chained:*/ true) => { + ShredVariant::MerkleCode { chained: true, .. } => { if !enable_chained_merkle_shreds(slot) { return true; } stats.num_shreds_merkle_code_chained = stats.num_shreds_merkle_code_chained.saturating_add(1); } - ShredVariant::MerkleData(_, /*chained:*/ false) => { + ShredVariant::MerkleData { chained: false, .. } => { stats.num_shreds_merkle_data = stats.num_shreds_merkle_data.saturating_add(1); } - ShredVariant::MerkleData(_, /*chained:*/ true) => { + ShredVariant::MerkleData { chained: true, .. } => { if !enable_chained_merkle_shreds(slot) { return true; } @@ -1133,8 +1160,11 @@ mod tests { ); assert_eq!( SIZE_OF_SHRED_VARIANT, - bincode::serialized_size(&ShredVariant::MerkleCode(15, /*chained:*/ true)).unwrap() - as usize + bincode::serialized_size(&ShredVariant::MerkleCode { + proof_size: 15, + chained: true, + }) + .unwrap() as usize ); assert_eq!( SIZE_OF_SHRED_SLOT, @@ -1438,114 +1468,204 @@ mod tests { ); // Merkle coding shred. assert_eq!( - u8::from(ShredVariant::MerkleCode(5, /*chained:*/ false)), + u8::from(ShredVariant::MerkleCode { + proof_size: 5, + chained: false, + }), 0b0100_0101 ); assert_eq!( - u8::from(ShredVariant::MerkleCode(5, /*chained:*/ true)), + u8::from(ShredVariant::MerkleCode { + proof_size: 5, + chained: true, + }), 0b0110_0101 ); for chained in [false, true] { assert_eq!( - ShredType::from(ShredVariant::MerkleCode(5, chained)), + ShredType::from(ShredVariant::MerkleCode { + proof_size: 5, + chained, + }), ShredType::Code ); } assert_matches!( ShredVariant::try_from(0b0100_0101), - Ok(ShredVariant::MerkleCode(5, /*chained:*/ false)) + Ok(ShredVariant::MerkleCode { + proof_size: 5, + chained: false, + }) ); assert_matches!( ShredVariant::try_from(0b0110_0101), - Ok(ShredVariant::MerkleCode(5, /*chained:*/ true)) + Ok(ShredVariant::MerkleCode { + proof_size: 5, + chained: true, + }) ); - let buf = bincode::serialize(&ShredVariant::MerkleCode(5, /*chained:*/ false)).unwrap(); + let buf = bincode::serialize(&ShredVariant::MerkleCode { + proof_size: 5, + chained: false, + }) + .unwrap(); assert_eq!(buf, vec![0b0100_0101]); assert_matches!( bincode::deserialize::(&[0b0100_0101]), - Ok(ShredVariant::MerkleCode(5, /*chained:*/ false)) + Ok(ShredVariant::MerkleCode { + proof_size: 5, + chained: false, + }) ); - let buf = bincode::serialize(&ShredVariant::MerkleCode(5, /*chained:*/ true)).unwrap(); + let buf = bincode::serialize(&ShredVariant::MerkleCode { + proof_size: 5, + chained: true, + }) + .unwrap(); assert_eq!(buf, vec![0b0110_0101]); assert_matches!( bincode::deserialize::(&[0b0110_0101]), - Ok(ShredVariant::MerkleCode(5, /*chained:*/ true)) + Ok(ShredVariant::MerkleCode { + proof_size: 5, + chained: true, + }) ); for (proof_size, chained) in iproduct!(0..=15u8, [false, true]) { let byte = proof_size | if chained { 0b0110_0000 } else { 0b0100_0000 }; assert_eq!( - u8::from(ShredVariant::MerkleCode(proof_size, chained)), + u8::from(ShredVariant::MerkleCode { + proof_size, + chained, + }), byte ); assert_eq!( - ShredType::from(ShredVariant::MerkleCode(proof_size, chained)), + ShredType::from(ShredVariant::MerkleCode { + proof_size, + chained, + }), ShredType::Code ); assert_eq!( ShredVariant::try_from(byte).unwrap(), - ShredVariant::MerkleCode(proof_size, chained) + ShredVariant::MerkleCode { + proof_size, + chained, + }, ); - let buf = bincode::serialize(&ShredVariant::MerkleCode(proof_size, chained)).unwrap(); + let buf = bincode::serialize(&ShredVariant::MerkleCode { + proof_size, + chained, + }) + .unwrap(); assert_eq!(buf, vec![byte]); assert_eq!( bincode::deserialize::(&[byte]).unwrap(), - ShredVariant::MerkleCode(proof_size, chained) + ShredVariant::MerkleCode { + proof_size, + chained, + } ); } // Merkle data shred. assert_eq!( - u8::from(ShredVariant::MerkleData(10, /*chained:*/ false)), + u8::from(ShredVariant::MerkleData { + proof_size: 10, + chained: false, + }), 0b1000_1010 ); assert_eq!( - u8::from(ShredVariant::MerkleData(10, /*chained:*/ true)), + u8::from(ShredVariant::MerkleData { + proof_size: 10, + chained: true, + }), 0b1001_1010 ); for chained in [false, true] { assert_eq!( - ShredType::from(ShredVariant::MerkleData(10, chained)), + ShredType::from(ShredVariant::MerkleData { + proof_size: 10, + chained, + }), ShredType::Data ); } assert_matches!( ShredVariant::try_from(0b1000_1010), - Ok(ShredVariant::MerkleData(10, /*chained:*/ false)) + Ok(ShredVariant::MerkleData { + proof_size: 10, + chained: false, + }) ); assert_matches!( ShredVariant::try_from(0b1001_1010), - Ok(ShredVariant::MerkleData(10, /*chained:*/ true)) + Ok(ShredVariant::MerkleData { + proof_size: 10, + chained: true, + }) ); - let buf = bincode::serialize(&ShredVariant::MerkleData(10, /*chained:*/ false)).unwrap(); + let buf = bincode::serialize(&ShredVariant::MerkleData { + proof_size: 10, + chained: false, + }) + .unwrap(); assert_eq!(buf, vec![0b1000_1010]); assert_matches!( bincode::deserialize::(&[0b1000_1010]), - Ok(ShredVariant::MerkleData(10, /*chained:*/ false)) + Ok(ShredVariant::MerkleData { + proof_size: 10, + chained: false, + }) ); - let buf = bincode::serialize(&ShredVariant::MerkleData(10, /*chained:*/ true)).unwrap(); + let buf = bincode::serialize(&ShredVariant::MerkleData { + proof_size: 10, + chained: true, + }) + .unwrap(); assert_eq!(buf, vec![0b1001_1010]); assert_matches!( bincode::deserialize::(&[0b1001_1010]), - Ok(ShredVariant::MerkleData(10, /*chained:*/ true)) + Ok(ShredVariant::MerkleData { + proof_size: 10, + chained: true, + }) ); for (proof_size, chained) in iproduct!(0..=15u8, [false, true]) { let byte = proof_size | if chained { 0b1001_0000 } else { 0b1000_0000 }; assert_eq!( - u8::from(ShredVariant::MerkleData(proof_size, chained)), + u8::from(ShredVariant::MerkleData { + proof_size, + chained, + }), byte ); assert_eq!( - ShredType::from(ShredVariant::MerkleData(proof_size, chained)), + ShredType::from(ShredVariant::MerkleData { + proof_size, + chained, + }), ShredType::Data ); assert_eq!( ShredVariant::try_from(byte).unwrap(), - ShredVariant::MerkleData(proof_size, chained) + ShredVariant::MerkleData { + proof_size, + chained, + } ); - let buf = bincode::serialize(&ShredVariant::MerkleData(proof_size, chained)).unwrap(); + let buf = bincode::serialize(&ShredVariant::MerkleData { + proof_size, + chained, + }) + .unwrap(); assert_eq!(buf, vec![byte]); assert_eq!( bincode::deserialize::(&[byte]).unwrap(), - ShredVariant::MerkleData(proof_size, chained) + ShredVariant::MerkleData { + proof_size, + chained, + } ); } } diff --git a/ledger/src/shred/common.rs b/ledger/src/shred/common.rs index 64b4c775469a24..af05532a3e361c 100644 --- a/ledger/src/shred/common.rs +++ b/ledger/src/shred/common.rs @@ -56,7 +56,7 @@ macro_rules! impl_shred_common { self.common_header.index = index; bincode::serialize_into(&mut self.payload[..], &self.common_header).unwrap(); } - ShredVariant::MerkleCode(..) | ShredVariant::MerkleData(..) => { + ShredVariant::MerkleCode { .. } | ShredVariant::MerkleData { .. } => { panic!("Not Implemented!"); } } @@ -69,7 +69,7 @@ macro_rules! impl_shred_common { self.common_header.slot = slot; bincode::serialize_into(&mut self.payload[..], &self.common_header).unwrap(); } - ShredVariant::MerkleCode(..) | ShredVariant::MerkleData(..) => { + ShredVariant::MerkleCode { .. } | ShredVariant::MerkleData { .. } => { panic!("Not Implemented!"); } } diff --git a/ledger/src/shred/merkle.rs b/ledger/src/shred/merkle.rs index ebc4a711b8c774..f92c3616f5c86e 100644 --- a/ledger/src/shred/merkle.rs +++ b/ledger/src/shred/merkle.rs @@ -114,8 +114,8 @@ impl Shred { fn from_payload(shred: Vec) -> Result { match shred::layout::get_shred_variant(&shred)? { ShredVariant::LegacyCode | ShredVariant::LegacyData => Err(Error::InvalidShredVariant), - ShredVariant::MerkleCode(..) => Ok(Self::ShredCode(ShredCode::from_payload(shred)?)), - ShredVariant::MerkleData(..) => Ok(Self::ShredData(ShredData::from_payload(shred)?)), + ShredVariant::MerkleCode { .. } => Ok(Self::ShredCode(ShredCode::from_payload(shred)?)), + ShredVariant::MerkleData { .. } => Ok(Self::ShredData(ShredData::from_payload(shred)?)), } } } @@ -138,7 +138,7 @@ impl ShredData { // proof_size is the number of merkle proof entries. fn proof_size(&self) -> Result { match self.common_header.shred_variant { - ShredVariant::MerkleData(proof_size, _) => Ok(proof_size), + ShredVariant::MerkleData { proof_size, .. } => Ok(proof_size), _ => Err(Error::InvalidShredVariant), } } @@ -160,7 +160,11 @@ impl ShredData { // Where the merkle proof starts in the shred binary. fn proof_offset(&self) -> Result { - let ShredVariant::MerkleData(proof_size, chained) = self.common_header.shred_variant else { + let ShredVariant::MerkleData { + proof_size, + chained, + } = self.common_header.shred_variant + else { return Err(Error::InvalidShredVariant); }; Self::get_proof_offset(proof_size, chained) @@ -173,8 +177,10 @@ impl ShredData { } fn chained_merkle_root_offset(&self) -> Result { - let ShredVariant::MerkleData(proof_size, /*chained:*/ true) = - self.common_header.shred_variant + let ShredVariant::MerkleData { + proof_size, + chained: true, + } = self.common_header.shred_variant else { return Err(Error::InvalidShredVariant); }; @@ -225,7 +231,11 @@ impl ShredData { // Deserialize headers. let mut cursor = Cursor::new(&shard[..]); let common_header: ShredCommonHeader = deserialize_from_with_limit(&mut cursor)?; - let ShredVariant::MerkleData(proof_size, chained) = common_header.shred_variant else { + let ShredVariant::MerkleData { + proof_size, + chained, + } = common_header.shred_variant + else { return Err(Error::InvalidShredVariant); }; if ShredCode::capacity(proof_size, chained)? != shard_size { @@ -264,7 +274,10 @@ impl ShredData { pub(super) fn get_merkle_root(shred: &[u8], proof_size: u8, chained: bool) -> Option { debug_assert_eq!( shred::layout::get_shred_variant(shred).unwrap(), - ShredVariant::MerkleData(proof_size, chained) + ShredVariant::MerkleData { + proof_size, + chained, + }, ); // Shred index in the erasure batch. let index = { @@ -287,7 +300,7 @@ impl ShredCode { // proof_size is the number of merkle proof entries. fn proof_size(&self) -> Result { match self.common_header.shred_variant { - ShredVariant::MerkleCode(proof_size, _) => Ok(proof_size), + ShredVariant::MerkleCode { proof_size, .. } => Ok(proof_size), _ => Err(Error::InvalidShredVariant), } } @@ -307,7 +320,11 @@ impl ShredCode { // Where the merkle proof starts in the shred binary. fn proof_offset(&self) -> Result { - let ShredVariant::MerkleCode(proof_size, chained) = self.common_header.shred_variant else { + let ShredVariant::MerkleCode { + proof_size, + chained, + } = self.common_header.shred_variant + else { return Err(Error::InvalidShredVariant); }; Self::get_proof_offset(proof_size, chained) @@ -320,8 +337,10 @@ impl ShredCode { } fn chained_merkle_root_offset(&self) -> Result { - let ShredVariant::MerkleCode(proof_size, /*chained:*/ true) = - self.common_header.shred_variant + let ShredVariant::MerkleCode { + proof_size, + chained: true, + } = self.common_header.shred_variant else { return Err(Error::InvalidShredVariant); }; @@ -371,7 +390,11 @@ impl ShredCode { chained_merkle_root: &Option, mut shard: Vec, ) -> Result { - let ShredVariant::MerkleCode(proof_size, chained) = common_header.shred_variant else { + let ShredVariant::MerkleCode { + proof_size, + chained, + } = common_header.shred_variant + else { return Err(Error::InvalidShredVariant); }; let shard_size = shard.len(); @@ -418,7 +441,10 @@ impl ShredCode { pub(super) fn get_merkle_root(shred: &[u8], proof_size: u8, chained: bool) -> Option { debug_assert_eq!( shred::layout::get_shred_variant(shred).unwrap(), - ShredVariant::MerkleCode(proof_size, chained) + ShredVariant::MerkleCode { + proof_size, + chained, + }, ); // Shred index in the erasure batch. let index = { @@ -461,7 +487,7 @@ impl<'a> ShredTrait<'a> for ShredData { payload.truncate(Self::SIZE_OF_PAYLOAD); let mut cursor = Cursor::new(&payload[..]); let common_header: ShredCommonHeader = deserialize_from_with_limit(&mut cursor)?; - if !matches!(common_header.shred_variant, ShredVariant::MerkleData(..)) { + if !matches!(common_header.shred_variant, ShredVariant::MerkleData { .. }) { return Err(Error::InvalidShredVariant); } let data_header = deserialize_from_with_limit(&mut cursor)?; @@ -485,7 +511,11 @@ impl<'a> ShredTrait<'a> for ShredData { if self.payload.len() != Self::SIZE_OF_PAYLOAD { return Err(Error::InvalidPayloadSize(self.payload.len())); } - let ShredVariant::MerkleData(proof_size, chained) = self.common_header.shred_variant else { + let ShredVariant::MerkleData { + proof_size, + chained, + } = self.common_header.shred_variant + else { return Err(Error::InvalidShredVariant); }; let offset = Self::SIZE_OF_HEADERS + Self::capacity(proof_size, chained)?; @@ -499,7 +529,11 @@ impl<'a> ShredTrait<'a> for ShredData { if self.payload.len() != Self::SIZE_OF_PAYLOAD { return Err(Error::InvalidPayloadSize(self.payload.len())); } - let ShredVariant::MerkleData(proof_size, chained) = self.common_header.shred_variant else { + let ShredVariant::MerkleData { + proof_size, + chained, + } = self.common_header.shred_variant + else { return Err(Error::InvalidShredVariant); }; let offset = Self::SIZE_OF_HEADERS + Self::capacity(proof_size, chained)?; @@ -510,7 +544,7 @@ impl<'a> ShredTrait<'a> for ShredData { fn sanitize(&self) -> Result<(), Error> { let shred_variant = self.common_header.shred_variant; - if !matches!(shred_variant, ShredVariant::MerkleData(..)) { + if !matches!(shred_variant, ShredVariant::MerkleData { .. }) { return Err(Error::InvalidShredVariant); } let _ = self.merkle_proof()?; @@ -532,7 +566,7 @@ impl<'a> ShredTrait<'a> for ShredCode { fn from_payload(mut payload: Vec) -> Result { let mut cursor = Cursor::new(&payload[..]); let common_header: ShredCommonHeader = deserialize_from_with_limit(&mut cursor)?; - if !matches!(common_header.shred_variant, ShredVariant::MerkleCode(..)) { + if !matches!(common_header.shred_variant, ShredVariant::MerkleCode { .. }) { return Err(Error::InvalidShredVariant); } let coding_header = deserialize_from_with_limit(&mut cursor)?; @@ -561,7 +595,11 @@ impl<'a> ShredTrait<'a> for ShredCode { if self.payload.len() != Self::SIZE_OF_PAYLOAD { return Err(Error::InvalidPayloadSize(self.payload.len())); } - let ShredVariant::MerkleCode(proof_size, chained) = self.common_header.shred_variant else { + let ShredVariant::MerkleCode { + proof_size, + chained, + } = self.common_header.shred_variant + else { return Err(Error::InvalidShredVariant); }; let offset = Self::SIZE_OF_HEADERS + Self::capacity(proof_size, chained)?; @@ -575,7 +613,11 @@ impl<'a> ShredTrait<'a> for ShredCode { if self.payload.len() != Self::SIZE_OF_PAYLOAD { return Err(Error::InvalidPayloadSize(self.payload.len())); } - let ShredVariant::MerkleCode(proof_size, chained) = self.common_header.shred_variant else { + let ShredVariant::MerkleCode { + proof_size, + chained, + } = self.common_header.shred_variant + else { return Err(Error::InvalidShredVariant); }; let offset = Self::SIZE_OF_HEADERS + Self::capacity(proof_size, chained)?; @@ -586,7 +628,7 @@ impl<'a> ShredTrait<'a> for ShredCode { fn sanitize(&self) -> Result<(), Error> { let shred_variant = self.common_header.shred_variant; - if !matches!(shred_variant, ShredVariant::MerkleCode(..)) { + if !matches!(shred_variant, ShredVariant::MerkleCode { .. }) { return Err(Error::InvalidShredVariant); } let _ = self.merkle_proof()?; @@ -605,7 +647,11 @@ impl ShredDataTrait for ShredData { } fn data(&self) -> Result<&[u8], Error> { - let ShredVariant::MerkleData(proof_size, chained) = self.common_header.shred_variant else { + let ShredVariant::MerkleData { + proof_size, + chained, + } = self.common_header.shred_variant + else { return Err(Error::InvalidShredVariant); }; let data_buffer_size = Self::capacity(proof_size, chained)?; @@ -739,10 +785,13 @@ pub(super) fn recover( Some((common_header, coding_header, chained_merkle_root)) }) .ok_or(TooFewParityShards)?; - debug_assert_matches!(common_header.shred_variant, ShredVariant::MerkleCode(..)); + debug_assert_matches!(common_header.shred_variant, ShredVariant::MerkleCode { .. }); let (proof_size, chained) = match common_header.shred_variant { - ShredVariant::MerkleCode(proof_size, chained) => (proof_size, chained), - ShredVariant::MerkleData(..) | ShredVariant::LegacyCode | ShredVariant::LegacyData => { + ShredVariant::MerkleCode { + proof_size, + chained, + } => (proof_size, chained), + ShredVariant::MerkleData { .. } | ShredVariant::LegacyCode | ShredVariant::LegacyData => { return Err(Error::InvalidShredVariant); } }; @@ -763,7 +812,11 @@ pub(super) fn recover( && fec_set_index == &common_header.fec_set_index && match shred { Shred::ShredData(_) => { - shred_variant == &ShredVariant::MerkleData(proof_size, chained) + shred_variant + == &ShredVariant::MerkleData { + proof_size, + chained, + } } Shred::ShredCode(shred) => { let CodingShredHeader { @@ -771,7 +824,11 @@ pub(super) fn recover( num_coding_shreds, position: _, } = shred.coding_header; - shred_variant == &ShredVariant::MerkleCode(proof_size, chained) + shred_variant + == &ShredVariant::MerkleCode { + proof_size, + chained, + } && num_data_shreds == coding_header.num_data_shreds && num_coding_shreds == coding_header.num_coding_shreds } @@ -824,7 +881,11 @@ pub(super) fn recover( version, fec_set_index, } = shred.common_header; - if shred_variant != ShredVariant::MerkleData(proof_size, chained) + let expected_shred_variant = ShredVariant::MerkleData { + proof_size, + chained, + }; + if shred_variant != expected_shred_variant || common_header.slot != slot || common_header.version != version || common_header.fec_set_index != fec_set_index @@ -938,7 +999,10 @@ pub(super) fn make_shreds_from_data( let chunk_size = DATA_SHREDS_PER_FEC_BLOCK * data_buffer_size; let mut common_header = ShredCommonHeader { signature: Signature::default(), - shred_variant: ShredVariant::MerkleData(proof_size, chained), + shred_variant: ShredVariant::MerkleData { + proof_size, + chained, + }, slot, index: next_shred_index, version: shred_version, @@ -989,7 +1053,10 @@ pub(super) fn make_shreds_from_data( .then_some((proof_size, data_buffer_size)) }) .ok_or(Error::UnknownProofSize)?; - common_header.shred_variant = ShredVariant::MerkleData(proof_size, chained); + common_header.shred_variant = ShredVariant::MerkleData { + proof_size, + chained, + }; common_header.fec_set_index = common_header.index; let chunks = if data.is_empty() { // Generate one data shred with empty data. @@ -1132,10 +1199,11 @@ fn make_erasure_batch( let erasure_batch_size = shredder::get_erasure_batch_size(num_data_shreds, is_last_in_slot); let num_coding_shreds = erasure_batch_size - num_data_shreds; let proof_size = get_proof_size(erasure_batch_size); - debug_assert!(shreds - .iter() - .all(|shred| shred.common_header.shred_variant - == ShredVariant::MerkleData(proof_size, chained))); + debug_assert!(shreds.iter().all(|shred| shred.common_header.shred_variant + == ShredVariant::MerkleData { + proof_size, + chained, + })); let mut common_header = match shreds.first() { None => return Err(Error::from(TooFewShards)), Some(shred) => shred.common_header, @@ -1159,7 +1227,10 @@ fn make_erasure_batch( let mut shreds: Vec<_> = shreds.into_iter().map(Shred::ShredData).collect(); // Initialize coding shreds from erasure coding shards. common_header.index = next_code_index; - common_header.shred_variant = ShredVariant::MerkleCode(proof_size, chained); + common_header.shred_variant = ShredVariant::MerkleCode { + proof_size, + chained, + }; let mut coding_header = CodingShredHeader { num_data_shreds: num_data_shreds as u16, num_coding_shreds: num_coding_shreds as u16, @@ -1356,7 +1427,10 @@ mod test { let capacity = ShredData::capacity(proof_size, chained).unwrap(); let common_header = ShredCommonHeader { signature: Signature::default(), - shred_variant: ShredVariant::MerkleData(proof_size, chained), + shred_variant: ShredVariant::MerkleData { + proof_size, + chained, + }, slot: 145_865_705, index: 1835, version: rng.gen(), @@ -1411,7 +1485,10 @@ mod test { .unwrap(); for (i, code) in parity.into_iter().enumerate() { let common_header = ShredCommonHeader { - shred_variant: ShredVariant::MerkleCode(proof_size, chained), + shred_variant: ShredVariant::MerkleCode { + proof_size, + chained, + }, index: common_header.index + i as u32 + 7, ..common_header }; @@ -1457,7 +1534,7 @@ mod test { if shreds.iter().all(|shred| { matches!( shred.common_header().shred_variant, - ShredVariant::MerkleData(..) + ShredVariant::MerkleData { .. } ) }) { assert_matches!( @@ -1672,7 +1749,10 @@ mod test { assert_eq!(common_header.index, next_code_index + num_coding_shreds); assert_eq!( common_header.shred_variant, - ShredVariant::MerkleCode(proof_size, chained) + ShredVariant::MerkleCode { + proof_size, + chained, + } ); num_coding_shreds += 1; } @@ -1680,7 +1760,10 @@ mod test { assert_eq!(common_header.index, next_shred_index + num_data_shreds); assert_eq!( common_header.shred_variant, - ShredVariant::MerkleData(proof_size, chained) + ShredVariant::MerkleData { + proof_size, + chained, + } ); assert!(common_header.fec_set_index <= common_header.index); assert_eq!( diff --git a/ledger/src/shred/shred_data.rs b/ledger/src/shred/shred_data.rs index 5b9965afd787c8..725ec90e65a14c 100644 --- a/ledger/src/shred/shred_data.rs +++ b/ledger/src/shred/shred_data.rs @@ -97,8 +97,10 @@ impl ShredData { // Possibly zero pads bytes stored in blockstore. pub(crate) fn resize_stored_shred(shred: Vec) -> Result, Error> { match shred::layout::get_shred_variant(&shred)? { - ShredVariant::LegacyCode | ShredVariant::MerkleCode(..) => Err(Error::InvalidShredType), - ShredVariant::MerkleData(..) => { + ShredVariant::LegacyCode | ShredVariant::MerkleCode { .. } => { + Err(Error::InvalidShredType) + } + ShredVariant::MerkleData { .. } => { if shred.len() != merkle::ShredData::SIZE_OF_PAYLOAD { return Err(Error::InvalidPayloadSize(shred.len())); }