Skip to content

Commit

Permalink
refactor relaychain hash storage structure on light client to substit…
Browse files Browse the repository at this point in the history
…ute inmemory hash vector to never loose any relaychain hash on misbehavior check
  • Loading branch information
kkast committed Mar 11, 2024
1 parent d775eb6 commit e50f434
Show file tree
Hide file tree
Showing 21 changed files with 314 additions and 321 deletions.
2 changes: 1 addition & 1 deletion algorithms/beefy/verifier/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ beefy-prover = { path = "../prover" }
hex = "0.4.3"
futures = "0.3.21"
sc-consensus-beefy = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43" }
hyperspace-core = { path = "../../../hyperspace/core", features = ["testing", "build-metadata-from-ws"] }
hyperspace-core = { path = "../../../hyperspace/core", features = ["testing"] }


[features]
Expand Down
6 changes: 1 addition & 5 deletions algorithms/grandpa/primitives/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ pub struct ParachainHeadersWithFinalityProof<H: codec::Codec> {
/// Contains a map of relay chain header hashes to parachain headers
/// finalzed at the relay chain height. We check for this parachain header finalization
/// via state proofs. Also contains extrinsic proof for timestamp.
pub parachain_headers: BTreeMap<Hash, ParachainHeaderProofs>,
pub parachain_header: (Hash, ParachainHeaderProofs),
/// The latest finalized height on the parachain.
pub latest_para_height: u32,
}
Expand All @@ -106,10 +106,6 @@ pub trait HostFunctions: light_client_common::HostFunctions + 'static {

/// Verify an ed25519 signature
fn ed25519_verify(sig: &ed25519::Signature, msg: &[u8], pub_key: &ed25519::Public) -> bool;
/// Stores the given list of RelayChain header hashes in the light client's storage.
fn insert_relay_header_hashes(headers: &[<Self::Header as Header>::Hash]);
/// Checks if a RelayChain header hash exists in the light client's storage.
fn contains_relay_header_hash(hash: <Self::Header as Header>::Hash) -> bool;
}

/// This returns the storage key for a parachain header on the relay chain.
Expand Down
8 changes: 0 additions & 8 deletions algorithms/grandpa/prover/src/host_functions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,12 +35,4 @@ impl HostFunctions for HostFunctionsProvider {
fn ed25519_verify(sig: &Signature, msg: &[u8], pubkey: &Public) -> bool {
pubkey.verify(&msg, sig)
}

fn insert_relay_header_hashes(_headers: &[<Self::Header as Header>::Hash]) {
unimplemented!()
}

fn contains_relay_header_hash(_hash: <Self::Header as Header>::Hash) -> bool {
unimplemented!()
}
}
144 changes: 59 additions & 85 deletions algorithms/grandpa/prover/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -339,101 +339,75 @@ where
let change_set = self
.relay_client
.rpc()
.query_storage(keys.clone(), start, Some(latest_finalized_hash))
.await?;

let mut change_set_join_set: JoinSet<Result<Option<_>, anyhow::Error>> = JoinSet::new();
let mut parachain_headers_with_proof = BTreeMap::<H256, ParachainHeaderProofs>::default();
log::debug!(target:"hyperspace", "Got {} authority set changes", change_set.len());

fn clone_storage_change_sets<T: light_client_common::config::Config + Send + Sync>(
changes: &[StorageChangeSet<T::Hash>],
) -> Vec<StorageChangeSet<T::Hash>> {
changes
.iter()
.map(|change| StorageChangeSet {
block: change.block.clone(),
changes: change.changes.clone(),
})
.collect()
}
.query_storage(keys.clone(), latest_finalized_hash, Some(latest_finalized_hash))
.await?
.pop()
.unwrap();

log::debug!(target:"hyperspace", "Got {} authority set changes", change_set.changes.len());

let change = StorageChangeSet {
block: change_set.block.clone(),
changes: change_set.changes.clone(),
};
let latest_para_height = Arc::new(AtomicU32::new(0u32));
for changes in change_set.chunks(PROCESS_CHANGES_SET_BATCH_SIZE) {
for change in clone_storage_change_sets::<T>(changes) {
let header_numbers = header_numbers.clone();
let keys = vec![para_storage_key.clone()];
let client = self.clone();
let to = self.rpc_call_delay.as_millis();
let duration1 = Duration::from_millis(rand::thread_rng().gen_range(1..to) as u64);
let latest_para_height = latest_para_height.clone();
change_set_join_set.spawn(async move {
sleep(duration1).await;
let header = client
.relay_client
.rpc()
.header(Some(change.block))
.await?
.ok_or_else(|| anyhow!("block not found {:?}", change.block))?;

let parachain_header_bytes = {
let key = T::Storage::paras_heads(client.para_id);
let data = client
.relay_client
.storage()
.at(header.hash())
.fetch(&key)
.await?
.expect("Header exists in its own changeset; qed");
<T::Storage as RuntimeStorage>::HeadData::from_inner(data)
};

let para_header: T::Header =
Decode::decode(&mut parachain_header_bytes.as_ref())?;
let para_block_number = para_header.number();
// skip genesis header or any unknown headers
if para_block_number == Zero::zero() ||
!header_numbers.contains(&para_block_number)
{
return Ok(None)
}

let state_proof = client
.relay_client
.rpc()
.read_proof(keys.iter().map(AsRef::as_ref), Some(header.hash()))
.await?
.proof
.into_iter()
.map(|p| p.0)
.collect();

let TimeStampExtWithProof { ext: extrinsic, proof: extrinsic_proof } =
fetch_timestamp_extrinsic_with_proof(
&client.para_client,
Some(para_header.hash()),
)
.await
.map_err(|err| anyhow!("Error fetching timestamp with proof: {err:?}"))?;
let proofs = ParachainHeaderProofs { state_proof, extrinsic, extrinsic_proof };
latest_para_height.fetch_max(u32::from(para_block_number), Ordering::SeqCst);
Ok(Some((H256::from(header.hash()), proofs)))
});
}

while let Some(res) = change_set_join_set.join_next().await {
if let Some((hash, proofs)) = res?? {
parachain_headers_with_proof.insert(hash, proofs);
}
}
let header_numbers = header_numbers.clone();
let keys = vec![para_storage_key.clone()];
let client = self.clone();
let to = self.rpc_call_delay.as_millis();
let duration1 = Duration::from_millis(rand::thread_rng().gen_range(1..to) as u64);
let latest_para_height = latest_para_height.clone();
let header = client
.relay_client
.rpc()
.header(Some(change.block))
.await?
.ok_or_else(|| anyhow!("block not found {:?}", change.block))?;

let parachain_header_bytes = {
let key = T::Storage::paras_heads(client.para_id);
let data = client
.relay_client
.storage()
.at(header.hash())
.fetch(&key)
.await?
.expect("Header exists in its own changeset; qed");
<T::Storage as RuntimeStorage>::HeadData::from_inner(data)
};

let para_header: T::Header = Decode::decode(&mut parachain_header_bytes.as_ref())?;
let para_block_number = para_header.number();
// skip genesis header or any unknown headers
if para_block_number == Zero::zero() || !header_numbers.contains(&para_block_number) {
return Err(anyhow!("genesis header or unknown header"))
}

let state_proof = client
.relay_client
.rpc()
.read_proof(keys.iter().map(AsRef::as_ref), Some(header.hash()))
.await?
.proof
.into_iter()
.map(|p| p.0)
.collect();

let TimeStampExtWithProof { ext: extrinsic, proof: extrinsic_proof } =
fetch_timestamp_extrinsic_with_proof(&client.para_client, Some(para_header.hash()))
.await
.map_err(|err| anyhow!("Error fetching timestamp with proof: {err:?}"))?;
let proofs = ParachainHeaderProofs { state_proof, extrinsic, extrinsic_proof };
latest_para_height.fetch_max(u32::from(para_block_number), Ordering::SeqCst);

unknown_headers.sort_by_key(|header| header.number());
// overwrite unknown headers
finality_proof.unknown_headers = unknown_headers;

Ok(ParachainHeadersWithFinalityProof {
finality_proof,
parachain_headers: parachain_headers_with_proof,
parachain_header: (H256::from(header.hash()), proofs),
latest_para_height: latest_para_height.load(Ordering::SeqCst),
})
}
Expand Down
2 changes: 1 addition & 1 deletion algorithms/grandpa/verifier/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ grandpa-prover = { path = "../prover" }
sp-core = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43" }
jsonrpsee-ws-client = "0.16.2"
jsonrpsee-core = "0.16.2"
hyperspace-core = { path = "../../../hyperspace/core", features = ["testing", "build-metadata-from-ws"] }
hyperspace-core = { path = "../../../hyperspace/core", features = ["testing", ] }
light-client-common = { path = "../../../light-clients/common", features = ["std"] }

[features]
Expand Down
79 changes: 38 additions & 41 deletions algorithms/grandpa/verifier/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ where
Host: HostFunctions,
Host::BlakeTwo256: Hasher<Out = H256>,
{
let ParachainHeadersWithFinalityProof { finality_proof, parachain_headers, latest_para_height } =
let ParachainHeadersWithFinalityProof { finality_proof, parachain_header, latest_para_height } =
proof;

// 1. First validate unknown headers.
Expand Down Expand Up @@ -102,51 +102,48 @@ where
justification.verify::<Host>(client_state.current_set_id, &client_state.current_authorities)?;

// 3. verify state proofs of parachain headers in finalized relay chain headers.
let mut para_heights = vec![];
for (hash, proofs) in parachain_headers {
if finalized.binary_search(&hash).is_err() {
// seems relay hash isn't in the finalized chain.
continue
}
let relay_chain_header =
headers.header(&hash).expect("Headers have been checked by AncestryChain; qed");

let ParachainHeaderProofs { extrinsic_proof, extrinsic, state_proof } = proofs;
let proof = StorageProof::new(state_proof);
let key = parachain_header_storage_key(client_state.para_id);
// verify patricia-merkle state proofs
let header = state_machine::read_proof_check::<Host::BlakeTwo256, _>(
relay_chain_header.state_root(),
proof,
&[key.as_ref()],
)
.map_err(|err| anyhow!("error verifying parachain header state proof: {err}"))?
.remove(key.as_ref())
.flatten()
.ok_or_else(|| anyhow!("Invalid proof, parachain header not found"))?;
let parachain_header = H::decode(&mut &header[..])?;
para_heights.push(parachain_header.number().clone().into());
// Timestamp extrinsic should be the first inherent and hence the first extrinsic
// https://github.com/paritytech/substrate/blob/d602397a0bbb24b5d627795b797259a44a5e29e9/primitives/trie/src/lib.rs#L99-L101
let key = codec::Compact(0u64).encode();
// verify extrinsic proof for timestamp extrinsic
sp_trie::verify_trie_proof::<LayoutV0<Host::BlakeTwo256>, _, _, _>(
parachain_header.extrinsics_root(),
&extrinsic_proof,
&vec![(key, Some(&extrinsic[..]))],
)
.map_err(|_| anyhow!("Invalid extrinsic proof"))?;
}

let (hash, proofs) = parachain_header;
finalized
.binary_search(&hash)
.map_err(|err| anyhow!("error searching for relaychain hash: {err}"))?;
let relay_chain_header =
headers.header(&hash).expect("Headers have been checked by AncestryChain; qed");

let ParachainHeaderProofs { extrinsic_proof, extrinsic, state_proof } = proofs;
let proof = StorageProof::new(state_proof);
let key = parachain_header_storage_key(client_state.para_id);
// verify patricia-merkle state proofs
let header = state_machine::read_proof_check::<Host::BlakeTwo256, _>(
relay_chain_header.state_root(),
proof,
&[key.as_ref()],
)
.map_err(|err| anyhow!("error verifying parachain header state proof: {err}"))?
.remove(key.as_ref())
.flatten()
.ok_or_else(|| anyhow!("Invalid proof, parachain header not found"))?;
let parachain_header = H::decode(&mut &header[..])?;
// Timestamp extrinsic should be the first inherent and hence the first extrinsic
// https://github.com/paritytech/substrate/blob/d602397a0bbb24b5d627795b797259a44a5e29e9/primitives/trie/src/lib.rs#L99-L101
let key = codec::Compact(0u64).encode();
// verify extrinsic proof for timestamp extrinsic
sp_trie::verify_trie_proof::<LayoutV0<Host::BlakeTwo256>, _, _, _>(
parachain_header.extrinsics_root(),
&extrinsic_proof,
&vec![(key, Some(&extrinsic[..]))],
)
.map_err(|_| anyhow!("Invalid extrinsic proof"))?;

// 4. set new client state, optionally rotating authorities
client_state.latest_relay_hash = target.hash();
client_state.latest_relay_height = (*target.number()).into();
if let Some(max_height) = para_heights.into_iter().max() {
if max_height != latest_para_height {
Err(anyhow!("Latest parachain header height doesn't match the one in the proof"))?;
}
client_state.latest_para_height = max_height;

if *parachain_header.number() != latest_para_height {
Err(anyhow!("Latest parachain header height doesn't match the one in the proof"))?;
}
client_state.latest_para_height = *parachain_header.number();

if let Some(scheduled_change) = find_scheduled_change::<H>(&target) {
client_state.current_set_id += 1;
client_state.current_authorities = scheduled_change.next_authorities;
Expand Down
1 change: 1 addition & 0 deletions contracts/pallet-ibc/src/client.rs
Original file line number Diff line number Diff line change
Expand Up @@ -275,6 +275,7 @@ where
let cs_state = ics10_grandpa::consensus_state::ConsensusState {
timestamp,
root: header.state_root().as_ref().to_vec().into(),
relaychain_hashes: vec![],
};
let cs = AnyConsensusState::Grandpa(cs_state);

Expand Down
35 changes: 1 addition & 34 deletions contracts/pallet-ibc/src/light_clients.rs
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ use prost::Message;
use sp_core::{crypto::ByteArray, ed25519, H256};
use sp_runtime::{
app_crypto::RuntimePublic,
traits::{BlakeTwo256, ConstU32, Header},
traits::{BlakeTwo256, ConstU32},
BoundedBTreeSet, BoundedVec,
};
use tendermint::{
Expand Down Expand Up @@ -161,39 +161,6 @@ impl grandpa_client_primitives::HostFunctions for HostFunctionsManager {
fn ed25519_verify(sig: &ed25519::Signature, msg: &[u8], pub_key: &ed25519::Public) -> bool {
pub_key.verify(&msg, sig)
}

fn insert_relay_header_hashes(new_hashes: &[<Self::Header as Header>::Hash]) {
if new_hashes.is_empty() {
return
}

GrandpaHeaderHashesSetStorage::mutate(|hashes_set| {
GrandpaHeaderHashesStorage::mutate(|hashes| {
for hash in new_hashes {
match hashes.try_push(*hash) {
Ok(_) => {},
Err(_) => {
let old_hash = hashes.remove(0);
hashes_set.remove(&old_hash);
hashes.try_push(*hash).expect(
"we just removed an element, so there is space for this one; qed",
);
},
}
match hashes_set.try_insert(*hash) {
Ok(_) => {},
Err(_) => {
log::warn!("duplicated value in GrandpaHeaderHashesStorage or the storage is corrupted");
},
}
}
});
});
}

fn contains_relay_header_hash(hash: <Self::Header as Header>::Hash) -> bool {
GrandpaHeaderHashesSetStorage::get().contains(&hash)
}
}

impl light_client_common::HostFunctions for HostFunctionsManager {
Expand Down
Loading

0 comments on commit e50f434

Please sign in to comment.