From f0f5308bfb8fc22d910f038f35d1403e861c1525 Mon Sep 17 00:00:00 2001 From: Anatoly Yakovenko Date: Sun, 4 Nov 2018 11:44:00 -0800 Subject: [PATCH] docs --- src/crds_gossip.rs | 59 ++++++++++++++++++++++------------------- src/crds_gossip_pull.rs | 10 ++++--- src/crds_gossip_push.rs | 4 +-- 3 files changed, 39 insertions(+), 34 deletions(-) diff --git a/src/crds_gossip.rs b/src/crds_gossip.rs index 6434d4e348bb32..6681af7de49c93 100644 --- a/src/crds_gossip.rs +++ b/src/crds_gossip.rs @@ -275,35 +275,38 @@ mod test { } node.lock().unwrap().new_push_messages(now) }).collect(); - let transfered: Vec<_> = requests.par_iter().map(|(from, peers, msgs)| { - let mut bytes: usize = 0; - let mut delivered: usize = 0; - let mut num_msgs: usize = 0; - let mut prunes: usize = 0; - for to in peers { - bytes += serialized_size(msgs).unwrap() as usize; - num_msgs += 1; - for m in msgs { - let origin = m.label().pubkey(); - let rsp = network - .get(&to) - .map(|node| node.lock().unwrap().process_push_message(m.clone(), now)) - .unwrap(); - if rsp == Err(CrdsGossipError::PushMessagePrune) { - prunes += 1; - bytes += serialized_size(&to).unwrap() as usize; - bytes += serialized_size(&origin).unwrap() as usize; - network - .get(&from) - .map(|node| node.lock().unwrap().process_prune_msg(*to, origin)) - .unwrap(); + let transfered: Vec<_> = requests + .par_iter() + .map(|(from, peers, msgs)| { + let mut bytes: usize = 0; + let mut delivered: usize = 0; + let mut num_msgs: usize = 0; + let mut prunes: usize = 0; + for to in peers { + bytes += serialized_size(msgs).unwrap() as usize; + num_msgs += 1; + for m in msgs { + let origin = m.label().pubkey(); + let rsp = network + .get(&to) + .map(|node| { + node.lock().unwrap().process_push_message(m.clone(), now) + }).unwrap(); + if rsp == Err(CrdsGossipError::PushMessagePrune) { + prunes += 1; + bytes += serialized_size(&to).unwrap() as usize; + bytes += serialized_size(&origin).unwrap() as usize; + network + .get(&from) + .map(|node| node.lock().unwrap().process_prune_msg(*to, origin)) + .unwrap(); + } + delivered += rsp.is_ok() as usize; } - delivered += rsp.is_ok() as usize; } - } - (bytes, delivered, num_msgs, prunes) - }).collect(); - for (b,d,m,p) in transfered { + (bytes, delivered, num_msgs, prunes) + }).collect(); + for (b, d, m, p) in transfered { bytes += b; delivered += d; num_msgs += m; @@ -381,7 +384,7 @@ mod test { }); (bytes, msgs, overhead) }).collect(); - for (b,m, o) in transfered { + for (b, m, o) in transfered { bytes += b; msgs += m; overhead += o; diff --git a/src/crds_gossip_pull.rs b/src/crds_gossip_pull.rs index deb67e08e87727..8f17a19e5f57c3 100644 --- a/src/crds_gossip_pull.rs +++ b/src/crds_gossip_pull.rs @@ -1,11 +1,13 @@ //! Crds Gossip Pull overlay -//! This module is used to synchronize the Crds with nodes in the network. -//! The basic strategy is as follows +//! This module implements the anti-entropy protocol for the newwork. +//! +//! The basic strategy is as follows: //! 1. Construct a bloom filter of the local data set //! 2. Randomly ask a node on the network for data that is is not contained in the bloom filter. //! -//! Bloom filters have a false positive rate. Because each filter is constructed with random hash -//! functions each subsequent request will have a different distribution of false positivies. +//! Bloom filters have a false positive rate. Each requests uses a differnet bloom filter +//! with random hash functions. So each subsequent request will have a different distribution +//! of false positivies. use bincode::serialized_size; use bloom::Bloom; diff --git a/src/crds_gossip_push.rs b/src/crds_gossip_push.rs index e2fa968b5a1027..8dbb0feaf0e44f 100644 --- a/src/crds_gossip_push.rs +++ b/src/crds_gossip_push.rs @@ -3,9 +3,9 @@ //! Eager push strategy is based on Plumtree //! http://asc.di.fct.unl.pt/~jleitao/pdf/srds07-leitao.pdf //! -//! Main differences are +//! Main differences are: //! 1. There is no `max hop`. Messages are signed with a local wallclock. If they are outside of -//! the local nodes wallclock window they are droped. +//! the local nodes wallclock window they are droped silently. //! 2. The prune set is stored in a Bloom filter. use bincode::serialized_size;