Skip to content

Commit

Permalink
Reject storage proofs with unused nodes: begin (paritytech#1878)
Browse files Browse the repository at this point in the history
* reject storage proofs with unused nodes: begin

* fix ignores_parachain_head_if_it_is_missing_from_storage_proof

* message_proof_is_rejected_if_it_has_duplicate_trie_nodes && message_proof_is_rejected_if_it_has_unused_trie_nodes

* proof_with_duplicate_items_is_rejected and proof_with_unused_items_is_rejected

* clippy

* fix benchmarks compilation

* impl From<Error> for &'static str

* fix review comments

* added comment
  • Loading branch information
svyatonik authored Feb 15, 2023
1 parent 1ead35c commit 5279fc9
Show file tree
Hide file tree
Showing 11 changed files with 217 additions and 86 deletions.
68 changes: 55 additions & 13 deletions bin/runtime-common/src/messages.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,8 @@
//! pallet is used to dispatch incoming messages. Message identified by a tuple
//! of to elements - message lane id and message nonce.

pub use bp_runtime::{UnderlyingChainOf, UnderlyingChainProvider};

use bp_header_chain::{HeaderChain, HeaderChainError};
use bp_messages::{
source_chain::{LaneMessageVerifier, TargetHeaderChain},
Expand All @@ -28,14 +30,15 @@ use bp_messages::{
},
InboundLaneData, LaneId, Message, MessageKey, MessageNonce, MessagePayload, OutboundLaneData,
};
use bp_runtime::{messages::MessageDispatchResult, Chain, ChainId, Size, StorageProofChecker};
pub use bp_runtime::{UnderlyingChainOf, UnderlyingChainProvider};
use bp_runtime::{
messages::MessageDispatchResult, Chain, ChainId, RawStorageProof, Size, StorageProofChecker,
StorageProofError,
};
use codec::{Decode, DecodeLimit, Encode};
use frame_support::{traits::Get, weights::Weight, RuntimeDebug};
use hash_db::Hasher;
use scale_info::TypeInfo;
use sp_std::{convert::TryFrom, fmt::Debug, marker::PhantomData, vec::Vec};
use sp_trie::StorageProof;
use xcm::latest::prelude::*;

/// Bidirectional message bridge.
Expand Down Expand Up @@ -97,9 +100,6 @@ pub type OriginOf<C> = <C as ThisChainWithMessages>::RuntimeOrigin;
/// Type of call that is used on this chain.
pub type CallOf<C> = <C as ThisChainWithMessages>::RuntimeCall;

/// Raw storage proof type (just raw trie nodes).
pub type RawStorageProof = Vec<Vec<u8>>;

/// Sub-module that is declaring types required for processing This -> Bridged chain messages.
pub mod source {
use super::*;
Expand Down Expand Up @@ -274,8 +274,8 @@ pub mod source {
proof;
B::BridgedHeaderChain::parse_finalized_storage_proof(
bridged_header_hash,
StorageProof::new(storage_proof),
|storage| {
storage_proof,
|mut storage| {
// Messages delivery proof is just proof of single storage key read => any error
// is fatal.
let storage_inbound_lane_data_key =
Expand All @@ -290,6 +290,11 @@ pub mod source {
let inbound_lane_data = InboundLaneData::decode(&mut &raw_inbound_lane_data[..])
.map_err(|_| "Failed to decode inbound lane state from the proof")?;

// check that the storage proof doesn't have any untouched trie nodes
storage
.ensure_no_unused_nodes()
.map_err(|_| "Messages delivery proof has unused trie nodes")?;

Ok((lane, inbound_lane_data))
},
)
Expand Down Expand Up @@ -608,9 +613,9 @@ pub mod target {

B::BridgedHeaderChain::parse_finalized_storage_proof(
bridged_header_hash,
StorageProof::new(storage_proof),
storage_proof,
|storage| {
let parser =
let mut parser =
StorageProofCheckerAdapter::<_, B> { storage, _dummy: Default::default() };

// receiving proofs where end < begin is ok (if proof includes outbound lane state)
Expand Down Expand Up @@ -661,6 +666,12 @@ pub mod target {
return Err(MessageProofError::Empty)
}

// check that the storage proof doesn't have any untouched trie nodes
parser
.storage
.ensure_no_unused_nodes()
.map_err(MessageProofError::StorageProof)?;

// We only support single lane messages in this generated_schema
let mut proved_messages = ProvedMessages::new();
proved_messages.insert(lane, proved_lane_messages);
Expand All @@ -686,6 +697,8 @@ pub mod target {
FailedToDecodeMessage,
/// Failed to decode outbound lane data from the proof.
FailedToDecodeOutboundLaneState,
/// Storage proof related error.
StorageProof(StorageProofError),
}

impl From<MessageProofError> for &'static str {
Expand All @@ -700,6 +713,7 @@ pub mod target {
"Failed to decode message from the proof",
MessageProofError::FailedToDecodeOutboundLaneState =>
"Failed to decode outbound lane data from the proof",
MessageProofError::StorageProof(_) => "Invalid storage proof",
}
}
}
Expand All @@ -710,15 +724,15 @@ pub mod target {
}

impl<H: Hasher, B: MessageBridge> StorageProofCheckerAdapter<H, B> {
fn read_raw_outbound_lane_data(&self, lane_id: &LaneId) -> Option<Vec<u8>> {
fn read_raw_outbound_lane_data(&mut self, lane_id: &LaneId) -> Option<Vec<u8>> {
let storage_outbound_lane_data_key = bp_messages::storage_keys::outbound_lane_data_key(
B::BRIDGED_MESSAGES_PALLET_NAME,
lane_id,
);
self.storage.read_value(storage_outbound_lane_data_key.0.as_ref()).ok()?
}

fn read_raw_message(&self, message_key: &MessageKey) -> Option<Vec<u8>> {
fn read_raw_message(&mut self, message_key: &MessageKey) -> Option<Vec<u8>> {
let storage_message_key = bp_messages::storage_keys::message_key(
B::BRIDGED_MESSAGES_PALLET_NAME,
&message_key.lane_id,
Expand Down Expand Up @@ -928,7 +942,35 @@ mod tests {
);
target::verify_messages_proof::<OnThisChainBridge>(proof, 10)
}),
Err(target::MessageProofError::HeaderChain(HeaderChainError::StorageRootMismatch)),
Err(target::MessageProofError::HeaderChain(HeaderChainError::StorageProof(
StorageProofError::StorageRootMismatch
))),
);
}

#[test]
fn message_proof_is_rejected_if_it_has_duplicate_trie_nodes() {
assert_eq!(
using_messages_proof(10, None, encode_all_messages, encode_lane_data, |mut proof| {
let node = proof.storage_proof.pop().unwrap();
proof.storage_proof.push(node.clone());
proof.storage_proof.push(node);
target::verify_messages_proof::<OnThisChainBridge>(proof, 10)
},),
Err(target::MessageProofError::HeaderChain(HeaderChainError::StorageProof(
StorageProofError::DuplicateNodesInProof
))),
);
}

#[test]
fn message_proof_is_rejected_if_it_has_unused_trie_nodes() {
assert_eq!(
using_messages_proof(10, None, encode_all_messages, encode_lane_data, |mut proof| {
proof.storage_proof.push(vec![42]);
target::verify_messages_proof::<OnThisChainBridge>(proof, 10)
},),
Err(target::MessageProofError::StorageProof(StorageProofError::UnusedNodesInTheProof)),
);
}

Expand Down
20 changes: 8 additions & 12 deletions bin/runtime-common/src/messages_benchmarking.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
use crate::{
messages::{
source::FromBridgedChainMessagesDeliveryProof, target::FromBridgedChainMessagesProof,
AccountIdOf, BridgedChain, HashOf, HasherOf, MessageBridge, RawStorageProof, ThisChain,
AccountIdOf, BridgedChain, HashOf, HasherOf, MessageBridge, ThisChain,
},
messages_generation::{
encode_all_messages, encode_lane_data, grow_trie, prepare_messages_storage_proof,
Expand All @@ -31,13 +31,15 @@ use crate::{

use bp_messages::storage_keys;
use bp_polkadot_core::parachains::ParaHash;
use bp_runtime::{record_all_trie_keys, Chain, Parachain, StorageProofSize, UnderlyingChainOf};
use bp_runtime::{
record_all_trie_keys, Chain, Parachain, RawStorageProof, StorageProofSize, UnderlyingChainOf,
};
use codec::Encode;
use frame_support::weights::Weight;
use pallet_bridge_messages::benchmarking::{MessageDeliveryProofParams, MessageProofParams};
use sp_runtime::traits::{Header, Zero};
use sp_std::prelude::*;
use sp_trie::{trie_types::TrieDBMutBuilderV1, LayoutV1, MemoryDB, Recorder, TrieMut};
use sp_trie::{trie_types::TrieDBMutBuilderV1, LayoutV1, MemoryDB, TrieMut};

/// Prepare proof of messages for the `receive_messages_proof` call.
///
Expand Down Expand Up @@ -209,15 +211,9 @@ where
root = grow_trie(root, &mut mdb, params.size);

// generate storage proof to be delivered to This chain
let mut proof_recorder = Recorder::<LayoutV1<HasherOf<BridgedChain<B>>>>::new();
record_all_trie_keys::<LayoutV1<HasherOf<BridgedChain<B>>>, _>(
&mdb,
&root,
&mut proof_recorder,
)
.map_err(|_| "record_all_trie_keys has failed")
.expect("record_all_trie_keys should not fail in benchmarks");
let storage_proof = proof_recorder.drain().into_iter().map(|n| n.data.to_vec()).collect();
let storage_proof = record_all_trie_keys::<LayoutV1<HasherOf<BridgedChain<B>>>, _>(&mdb, &root)
.map_err(|_| "record_all_trie_keys has failed")
.expect("record_all_trie_keys should not fail in benchmarks");

(root, storage_proof)
}
Expand Down
23 changes: 8 additions & 15 deletions bin/runtime-common/src/messages_generation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,16 +18,16 @@

#![cfg(any(feature = "runtime-benchmarks", test))]

use crate::messages::{BridgedChain, HashOf, HasherOf, MessageBridge, RawStorageProof};
use crate::messages::{BridgedChain, HashOf, HasherOf, MessageBridge};

use bp_messages::{
storage_keys, LaneId, MessageKey, MessageNonce, MessagePayload, OutboundLaneData,
};
use bp_runtime::{record_all_trie_keys, StorageProofSize};
use bp_runtime::{record_all_trie_keys, RawStorageProof, StorageProofSize};
use codec::Encode;
use sp_core::Hasher;
use sp_std::{ops::RangeInclusive, prelude::*};
use sp_trie::{trie_types::TrieDBMutBuilderV1, LayoutV1, MemoryDB, Recorder, TrieMut};
use sp_trie::{trie_types::TrieDBMutBuilderV1, LayoutV1, MemoryDB, TrieMut};

/// Simple and correct message data encode function.
pub(crate) fn encode_all_messages(_: MessageNonce, m: &MessagePayload) -> Option<Vec<u8>> {
Expand Down Expand Up @@ -97,15 +97,9 @@ where
root = grow_trie(root, &mut mdb, size);

// generate storage proof to be delivered to This chain
let mut proof_recorder = Recorder::<LayoutV1<HasherOf<BridgedChain<B>>>>::new();
record_all_trie_keys::<LayoutV1<HasherOf<BridgedChain<B>>>, _>(
&mdb,
&root,
&mut proof_recorder,
)
.map_err(|_| "record_all_trie_keys has failed")
.expect("record_all_trie_keys should not fail in benchmarks");
let storage_proof = proof_recorder.drain().into_iter().map(|n| n.data.to_vec()).collect();
let storage_proof = record_all_trie_keys::<LayoutV1<HasherOf<BridgedChain<B>>>, _>(&mdb, &root)
.map_err(|_| "record_all_trie_keys has failed")
.expect("record_all_trie_keys should not fail in benchmarks");

(root, storage_proof)
}
Expand All @@ -125,11 +119,10 @@ pub fn grow_trie<H: Hasher>(
let mut key_index = 0;
loop {
// generate storage proof to be delivered to This chain
let mut proof_recorder = Recorder::<LayoutV1<H>>::new();
record_all_trie_keys::<LayoutV1<H>, _>(mdb, &root, &mut proof_recorder)
let storage_proof = record_all_trie_keys::<LayoutV1<H>, _>(mdb, &root)
.map_err(|_| "record_all_trie_keys has failed")
.expect("record_all_trie_keys should not fail in benchmarks");
let size: usize = proof_recorder.drain().into_iter().map(|n| n.data.len()).sum();
let size: usize = storage_proof.iter().map(|n| n.len()).sum();
if size > minimal_trie_size as _ {
return root
}
Expand Down
6 changes: 2 additions & 4 deletions bin/runtime-common/src/parachains_benchmarking.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ use codec::Encode;
use frame_support::traits::Get;
use pallet_bridge_parachains::{RelayBlockHash, RelayBlockHasher, RelayBlockNumber};
use sp_std::prelude::*;
use sp_trie::{trie_types::TrieDBMutBuilderV1, LayoutV1, MemoryDB, Recorder, TrieMut};
use sp_trie::{trie_types::TrieDBMutBuilderV1, LayoutV1, MemoryDB, TrieMut};

/// Prepare proof of messages for the `receive_messages_proof` call.
///
Expand Down Expand Up @@ -72,11 +72,9 @@ where
state_root = grow_trie(state_root, &mut mdb, size);

// generate heads storage proof
let mut proof_recorder = Recorder::<LayoutV1<RelayBlockHasher>>::new();
record_all_trie_keys::<LayoutV1<RelayBlockHasher>, _>(&mdb, &state_root, &mut proof_recorder)
let proof = record_all_trie_keys::<LayoutV1<RelayBlockHasher>, _>(&mdb, &state_root)
.map_err(|_| "record_all_trie_keys has failed")
.expect("record_all_trie_keys should not fail in benchmarks");
let proof = proof_recorder.drain().into_iter().map(|n| n.data.to_vec()).collect();

let (relay_block_number, relay_block_hash) =
insert_header_to_grandpa_pallet::<R, R::BridgesGrandpaPalletInstance>(state_root);
Expand Down
2 changes: 1 addition & 1 deletion modules/grandpa/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1050,7 +1050,7 @@ mod tests {
assert_noop!(
Pallet::<TestRuntime>::parse_finalized_storage_proof(
Default::default(),
sp_trie::StorageProof::new(vec![]),
vec![],
|_| (),
),
bp_header_chain::HeaderChainError::UnknownHeader,
Expand Down
34 changes: 23 additions & 11 deletions modules/parachains/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -330,10 +330,10 @@ pub mod pallet {

pallet_bridge_grandpa::Pallet::<T, T::BridgesGrandpaPalletInstance>::parse_finalized_storage_proof(
relay_block_hash,
sp_trie::StorageProof::new(parachain_heads_proof.0),
move |storage| {
parachain_heads_proof.0,
move |mut storage| {
for (parachain, parachain_head_hash) in parachains {
let parachain_head = match Pallet::<T, I>::read_parachain_head(&storage, parachain) {
let parachain_head = match Pallet::<T, I>::read_parachain_head(&mut storage, parachain) {
Ok(Some(parachain_head)) => parachain_head,
Ok(None) => {
log::trace!(
Expand Down Expand Up @@ -381,7 +381,10 @@ pub mod pallet {
}

// convert from parachain head into stored parachain head data
let parachain_head_data = match T::ParaStoredHeaderDataBuilder::try_build(parachain, &parachain_head) {
let parachain_head_data = match T::ParaStoredHeaderDataBuilder::try_build(
parachain,
&parachain_head,
) {
Some(parachain_head_data) => parachain_head_data,
None => {
log::trace!(
Expand Down Expand Up @@ -418,9 +421,20 @@ pub mod pallet {
.saturating_sub(WeightInfoOf::<T, I>::parachain_head_pruning_weight(T::DbWeight::get()));
}
}

// even though we may have accepted some parachain heads, we can't allow relayers to submit
// proof with unused trie nodes
// => treat this as an error
//
// (we can throw error here, because now all our calls are transactional)
storage.ensure_no_unused_nodes()
},
)
.map_err(|_| Error::<T, I>::InvalidStorageProof)?;
.and_then(|r| r.map_err(bp_header_chain::HeaderChainError::StorageProof))
.map_err(|e| {
log::trace!(target: LOG_TARGET, "Parachain heads storage proof is invalid: {:?}", e);
Error::<T, I>::InvalidStorageProof
})?;

Ok(PostDispatchInfo { actual_weight: Some(actual_weight), pays_fee: Pays::Yes })
}
Expand Down Expand Up @@ -488,7 +502,7 @@ pub mod pallet {

/// Read parachain head from storage proof.
fn read_parachain_head(
storage: &bp_runtime::StorageProofChecker<RelayBlockHasher>,
storage: &mut bp_runtime::StorageProofChecker<RelayBlockHasher>,
parachain: ParaId,
) -> Result<Option<ParaHead>, StorageProofError> {
let parachain_head_key =
Expand Down Expand Up @@ -705,7 +719,7 @@ mod tests {
use frame_system::{EventRecord, Pallet as System, Phase};
use sp_core::Hasher;
use sp_runtime::{traits::Header as HeaderT, DispatchError};
use sp_trie::{trie_types::TrieDBMutBuilderV1, LayoutV1, MemoryDB, Recorder, TrieMut};
use sp_trie::{trie_types::TrieDBMutBuilderV1, LayoutV1, MemoryDB, TrieMut};

type BridgesGrandpaPalletInstance = pallet_bridge_grandpa::Instance1;
type WeightInfo = <TestRuntime as Config>::WeightInfo;
Expand Down Expand Up @@ -759,11 +773,9 @@ mod tests {
}

// generate storage proof to be delivered to This chain
let mut proof_recorder = Recorder::<LayoutV1<RelayBlockHasher>>::new();
record_all_trie_keys::<LayoutV1<RelayBlockHasher>, _>(&mdb, &root, &mut proof_recorder)
let storage_proof = record_all_trie_keys::<LayoutV1<RelayBlockHasher>, _>(&mdb, &root)
.map_err(|_| "record_all_trie_keys has failed")
.expect("record_all_trie_keys should not fail in benchmarks");
let storage_proof = proof_recorder.drain().into_iter().map(|n| n.data.to_vec()).collect();

(root, ParaHeadsProof(storage_proof), parachains)
}
Expand Down Expand Up @@ -1447,7 +1459,7 @@ mod tests {

#[test]
fn ignores_parachain_head_if_it_is_missing_from_storage_proof() {
let (state_root, proof, _) = prepare_parachain_heads_proof(vec![(1, head_data(1, 0))]);
let (state_root, proof, _) = prepare_parachain_heads_proof(vec![]);
let parachains = vec![(ParaId(2), Default::default())];
run_test(|| {
initialize(state_root);
Expand Down
2 changes: 0 additions & 2 deletions primitives/header-chain/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@ sp-core = { git = "https://github.com/paritytech/substrate", branch = "master",
sp-finality-grandpa = { git = "https://github.com/paritytech/substrate", branch = "master", default-features = false }
sp-runtime = { git = "https://github.com/paritytech/substrate", branch = "master", default-features = false }
sp-std = { git = "https://github.com/paritytech/substrate", branch = "master", default-features = false }
sp-trie = { git = "https://github.com/paritytech/substrate", branch = "master", default-features = false }

[dev-dependencies]
bp-test-utils = { path = "../test-utils" }
Expand All @@ -43,5 +42,4 @@ std = [
"sp-finality-grandpa/std",
"sp-runtime/std",
"sp-std/std",
"sp-trie/std",
]
Loading

0 comments on commit 5279fc9

Please sign in to comment.