Skip to content
This repository has been archived by the owner on Nov 15, 2023. It is now read-only.

Commit

Permalink
Further storage iterator refactoring (#13445)
Browse files Browse the repository at this point in the history
* Remove `Backend::apply_to_key_values_while`

* Add `IterArgs::start_at_exclusive`

* Use `start_at_exclusive` in functions which used `Backend::apply_to_key_values_while`

* Remove `Backend::apply_to_keys_while`

* Remove `for_keys_with_prefix`, `for_key_values_with_prefix` and `for_child_keys_with_prefix`

* Remove unnecessary `to_vec` calls

* Fix unused method warning in no_std

* Remove unnecessary import

* Also check proof sizes in the test

* Iterate over both keys and values in `prove_range_read_with_size` and add a test
  • Loading branch information
koute authored Feb 27, 2023
1 parent 4ad96cf commit 7d76719
Show file tree
Hide file tree
Showing 6 changed files with 226 additions and 370 deletions.
49 changes: 9 additions & 40 deletions client/api/src/backend.rs
Original file line number Diff line number Diff line change
Expand Up @@ -324,7 +324,6 @@ where
{
inner: <State as StateBackend<HashFor<Block>>>::RawIter,
state: State,
skip_if_first: Option<StorageKey>,
}

impl<State, Block> KeysIter<State, Block>
Expand All @@ -341,13 +340,9 @@ where
let mut args = IterArgs::default();
args.prefix = prefix.as_ref().map(|prefix| prefix.0.as_slice());
args.start_at = start_at.as_ref().map(|start_at| start_at.0.as_slice());
args.start_at_exclusive = true;

let start_at = args.start_at;
Ok(Self {
inner: state.raw_iter(args)?,
state,
skip_if_first: start_at.map(|key| StorageKey(key.to_vec())),
})
Ok(Self { inner: state.raw_iter(args)?, state })
}

/// Create a new iterator over a child storage's keys.
Expand All @@ -361,13 +356,9 @@ where
args.prefix = prefix.as_ref().map(|prefix| prefix.0.as_slice());
args.start_at = start_at.as_ref().map(|start_at| start_at.0.as_slice());
args.child_info = Some(child_info);
args.start_at_exclusive = true;

let start_at = args.start_at;
Ok(Self {
inner: state.raw_iter(args)?,
state,
skip_if_first: start_at.map(|key| StorageKey(key.to_vec())),
})
Ok(Self { inner: state.raw_iter(args)?, state })
}
}

Expand All @@ -379,15 +370,7 @@ where
type Item = StorageKey;

fn next(&mut self) -> Option<Self::Item> {
let key = self.inner.next_key(&self.state)?.ok().map(StorageKey)?;

if let Some(skipped_key) = self.skip_if_first.take() {
if key == skipped_key {
return self.next()
}
}

Some(key)
self.inner.next_key(&self.state)?.ok().map(StorageKey)
}
}

Expand All @@ -399,7 +382,6 @@ where
{
inner: <State as StateBackend<HashFor<Block>>>::RawIter,
state: State,
skip_if_first: Option<StorageKey>,
}

impl<State, Block> Iterator for PairsIter<State, Block>
Expand All @@ -410,19 +392,10 @@ where
type Item = (StorageKey, StorageData);

fn next(&mut self) -> Option<Self::Item> {
let (key, value) = self
.inner
self.inner
.next_pair(&self.state)?
.ok()
.map(|(key, value)| (StorageKey(key), StorageData(value)))?;

if let Some(skipped_key) = self.skip_if_first.take() {
if key == skipped_key {
return self.next()
}
}

Some((key, value))
.map(|(key, value)| (StorageKey(key), StorageData(value)))
}
}

Expand All @@ -440,13 +413,9 @@ where
let mut args = IterArgs::default();
args.prefix = prefix.as_ref().map(|prefix| prefix.0.as_slice());
args.start_at = start_at.as_ref().map(|start_at| start_at.0.as_slice());
args.start_at_exclusive = true;

let start_at = args.start_at;
Ok(Self {
inner: state.raw_iter(args)?,
state,
skip_if_first: start_at.map(|key| StorageKey(key.to_vec())),
})
Ok(Self { inner: state.raw_iter(args)?, state })
}
}

Expand Down
120 changes: 15 additions & 105 deletions primitives/state-machine/src/backend.rs
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,10 @@ pub struct IterArgs<'a> {
/// This is inclusive and the iteration will include the key which is specified here.
pub start_at: Option<&'a [u8]>,

/// If this is `true` then the iteration will *not* include
/// the key specified in `start_at`, if there is such a key.
pub start_at_exclusive: bool,

/// The info of the child trie over which to iterate over.
pub child_info: Option<ChildInfo>,

Expand Down Expand Up @@ -117,6 +121,17 @@ where
}
}

impl<'a, H, I> PairsIter<'a, H, I>
where
H: Hasher,
I: StorageIterator<H> + Default,
{
#[cfg(feature = "std")]
pub(crate) fn was_complete(&self) -> bool {
self.raw_iter.was_complete()
}
}

/// An iterator over storage keys.
pub struct KeysIter<'a, H, I>
where
Expand Down Expand Up @@ -214,111 +229,6 @@ pub trait Backend<H: Hasher>: sp_std::fmt::Debug {
key: &[u8],
) -> Result<Option<StorageKey>, Self::Error>;

/// Iterate over storage starting at key, for a given prefix and child trie.
/// Aborts as soon as `f` returns false.
/// Warning, this fails at first error when usual iteration skips errors.
/// If `allow_missing` is true, iteration stops when it reaches a missing trie node.
/// Otherwise an error is produced.
///
/// Returns `true` if trie end is reached.
// TODO: Remove this.
fn apply_to_key_values_while<F: FnMut(Vec<u8>, Vec<u8>) -> bool>(
&self,
child_info: Option<&ChildInfo>,
prefix: Option<&[u8]>,
start_at: Option<&[u8]>,
mut f: F,
allow_missing: bool,
) -> Result<bool, Self::Error> {
let args = IterArgs {
child_info: child_info.cloned(),
prefix,
start_at,
stop_on_incomplete_database: allow_missing,
..IterArgs::default()
};
let mut iter = self.pairs(args)?;
while let Some(key_value) = iter.next() {
let (key, value) = key_value?;
if !f(key, value) {
return Ok(false)
}
}
Ok(iter.raw_iter.was_complete())
}

/// Retrieve all entries keys of storage and call `f` for each of those keys.
/// Aborts as soon as `f` returns false.
// TODO: Remove this.
fn apply_to_keys_while<F: FnMut(&[u8]) -> bool>(
&self,
child_info: Option<&ChildInfo>,
prefix: Option<&[u8]>,
start_at: Option<&[u8]>,
mut f: F,
) -> Result<(), Self::Error> {
let args =
IterArgs { child_info: child_info.cloned(), prefix, start_at, ..IterArgs::default() };

for key in self.keys(args)? {
if !f(&key?) {
return Ok(())
}
}
Ok(())
}

/// Retrieve all entries keys which start with the given prefix and
/// call `f` for each of those keys.
// TODO: Remove this.
fn for_keys_with_prefix<F: FnMut(&[u8])>(
&self,
prefix: &[u8],
mut f: F,
) -> Result<(), Self::Error> {
let args = IterArgs { prefix: Some(prefix), ..IterArgs::default() };
self.keys(args)?.try_for_each(|key| {
f(&key?);
Ok(())
})
}

/// Retrieve all entries keys and values of which start with the given prefix and
/// call `f` for each of those keys.
// TODO: Remove this.
fn for_key_values_with_prefix<F: FnMut(&[u8], &[u8])>(
&self,
prefix: &[u8],
mut f: F,
) -> Result<(), Self::Error> {
let args = IterArgs { prefix: Some(prefix), ..IterArgs::default() };
self.pairs(args)?.try_for_each(|key_value| {
let (key, value) = key_value?;
f(&key, &value);
Ok(())
})
}

/// Retrieve all child entries keys which start with the given prefix and
/// call `f` for each of those keys.
// TODO: Remove this.
fn for_child_keys_with_prefix<F: FnMut(&[u8])>(
&self,
child_info: &ChildInfo,
prefix: &[u8],
mut f: F,
) -> Result<(), Self::Error> {
let args = IterArgs {
child_info: Some(child_info.clone()),
prefix: Some(prefix),
..IterArgs::default()
};
self.keys(args)?.try_for_each(|key| {
f(&key?);
Ok(())
})
}

/// Calculate the storage root, with given delta over what is already stored in
/// the backend, and produce a "transaction" that can be used to commit.
/// Does not include child storage updates.
Expand Down
74 changes: 45 additions & 29 deletions primitives/state-machine/src/ext.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,9 @@

#[cfg(feature = "std")]
use crate::overlayed_changes::OverlayedExtensions;
use crate::{backend::Backend, IndexOperation, OverlayedChanges, StorageKey, StorageValue};
use crate::{
backend::Backend, IndexOperation, IterArgs, OverlayedChanges, StorageKey, StorageValue,
};
use codec::{Decode, Encode, EncodeAppend};
use hash_db::Hasher;
#[cfg(feature = "std")]
Expand Down Expand Up @@ -750,40 +752,54 @@ where
{
fn limit_remove_from_backend(
&mut self,
maybe_child: Option<&ChildInfo>,
maybe_prefix: Option<&[u8]>,
child_info: Option<&ChildInfo>,
prefix: Option<&[u8]>,
maybe_limit: Option<u32>,
maybe_cursor: Option<&[u8]>,
start_at: Option<&[u8]>,
) -> (Option<Vec<u8>>, u32, u32) {
let iter = match self.backend.keys(IterArgs {
child_info: child_info.cloned(),
prefix,
start_at,
..IterArgs::default()
}) {
Ok(iter) => iter,
Err(error) => {
log::debug!(target: "trie", "Error while iterating the storage: {}", error);
return (None, 0, 0)
},
};

let mut delete_count: u32 = 0;
let mut loop_count: u32 = 0;
let mut maybe_next_key = None;
let result =
self.backend
.apply_to_keys_while(maybe_child, maybe_prefix, maybe_cursor, |key| {
if maybe_limit.map_or(false, |limit| loop_count == limit) {
maybe_next_key = Some(key.to_vec());
return false
}
let overlay = match maybe_child {
Some(child_info) => self.overlay.child_storage(child_info, key),
None => self.overlay.storage(key),
};
if !matches!(overlay, Some(None)) {
// not pending deletion from the backend - delete it.
if let Some(child_info) = maybe_child {
self.overlay.set_child_storage(child_info, key.to_vec(), None);
} else {
self.overlay.set_storage(key.to_vec(), None);
}
delete_count = delete_count.saturating_add(1);
}
loop_count = loop_count.saturating_add(1);
true
});
for key in iter {
let key = match key {
Ok(key) => key,
Err(error) => {
log::debug!(target: "trie", "Error while iterating the storage: {}", error);
break
},
};

if let Err(error) = result {
log::debug!(target: "trie", "Error while iterating the storage: {}", error);
if maybe_limit.map_or(false, |limit| loop_count == limit) {
maybe_next_key = Some(key);
break
}
let overlay = match child_info {
Some(child_info) => self.overlay.child_storage(child_info, &key),
None => self.overlay.storage(&key),
};
if !matches!(overlay, Some(None)) {
// not pending deletion from the backend - delete it.
if let Some(child_info) = child_info {
self.overlay.set_child_storage(child_info, key, None);
} else {
self.overlay.set_storage(key, None);
}
delete_count = delete_count.saturating_add(1);
}
loop_count = loop_count.saturating_add(1);
}

(maybe_next_key, delete_count, loop_count)
Expand Down
Loading

0 comments on commit 7d76719

Please sign in to comment.