From 2a346c507f400c2d85bd2170edab757d70e69d52 Mon Sep 17 00:00:00 2001 From: srdtrk Date: Sun, 21 Aug 2022 22:46:21 +0300 Subject: [PATCH 01/12] storage v0.4 is updated to cw1.0 this still needs to be tested in a real cw-v1.0 contract --- packages/storage/Readme.md | 285 ++++- packages/storage/src/append_store.rs | 801 ++++++++------ packages/storage/src/deque_store.rs | 830 +++++++-------- packages/storage/src/item.rs | 183 ++++ packages/storage/src/keymap.rs | 1432 ++++++++++++++++++++++++++ packages/storage/src/lib.rs | 11 +- packages/storage/src/typed_store.rs | 176 ---- 7 files changed, 2760 insertions(+), 958 deletions(-) create mode 100644 packages/storage/src/item.rs create mode 100644 packages/storage/src/keymap.rs delete mode 100644 packages/storage/src/typed_store.rs diff --git a/packages/storage/Readme.md b/packages/storage/Readme.md index b656e70..3d784cd 100644 --- a/packages/storage/Readme.md +++ b/packages/storage/Readme.md @@ -1,5 +1,286 @@ # Secret Contract Development Toolkit - Storage Tools -⚠️ This package is a sub-package of the `secret-toolkit` package. Please see its crate page for more context. +⚠️ This package is a cw v1.0 fork of the `secret-toolkit` package. Please see its crate page for more context. You need Rust 1.63+ to compile this package. -This package contains all the tools related to storage access patterns +This package contains many tools related to storage access patterns. This readme file assumes basic familiarity with basic cosmwasm storage, [click here to learn about this](https://docs.scrt.network/secret-network-documentation/development/secret-contracts/storage). + +## **How to Import This Subpackage** + +To import this package, add one of the following lines to your `Cargo.toml` file + +```toml +secret-toolkit = { version = "0.4", default-features = false, features = ["utils", "storage", "serialization"] } +``` + +for the release versions (when it is updated to cosmwasm 1.0), or + +```toml +secret-toolkit = { git = "https://github.com/scrtlabs/secret-toolkit", branch = "cosmwasm-v1.0", default-features = false, features = ["utils", "storage", "serialization"]} +``` + +for the github version. We also import the `serialization` feature in case we want to switch to using Json instead of Bincode2 to serialize/deserialize data. + +## **Storage Objects** + +### **Item** + +This is the simplest storage object in this toolkit. It based on the similarly named Item from cosmwasm-storage-plus. Item allows the user to specify the type of the object being stored and the serialization/deserialization method used to store it (default being Bincode2). **One can think of the Item struct as a wrapper for the storage key.** Note that you want to use Json to serde an enum or any struct that stores an enum (except for the standard Option enum), because Bincode2 somehow uses floats during the deserialization of enums. This is why other cosmwasm chains don't use Bincode2 at all, however, you gain some performance when you can use it. + +#### **Initialize** + +This object is meant to be initialized as a static constant in `state.rs`. However, it would also work perfectly fine if it was initialized during run time with a variable key (in this case though, you'd have to remind it what type of object is stored and its serde). Import it using the following lines: + +```ignore +use secret_toolkit_storage::{Item} +``` + +And initialize it using the following lines: + +```ignore +pub static OWNER: Item = Item::new(b"owner"); +``` + +This uses Bincode2 to serde HumanAddr by default. To specify the Serde algorithm as Json, first import it from `secret-toolkit-serialization` + +```ignore +use secret_toolkit::serialization::{Bincode2, Json}; +``` + +then + +```ignore +pub static SOME_ENUM: Item = Item::new(b"some_enum"); +``` + +#### **Read/Write** + +The way to read/write to/from strorage is to use its methods. These methods are `save`, `load`, `may_load`, `remove`, `update`. Here is an example usecase for each in execution inside `contract.rs`: + +```ignore +// The compiler knows that owner_addr is HumanAddr +let owner_addr = OWNER.load(&deps.storage)?; +``` + +```ignore +OWNER.save(&mut deps.storage, &env.message.sender)?; +``` + +```ignore +// The compiler knows that may_addr is Option +let may_addr = OWNER.may_load(&deps.storage)?; +``` + +```ignore +// The compiler knows that may_addr is Option +let may_addr = OWNER.remove(&mut deps.storage)?; +``` + +```ignore +// The compiler knows that may_addr is Option +let may_addr = OWNER.update(&mut deps.storage, |_x| Ok(env.message.sender))?; +``` + +### **AppendStore** + +AppendStore is meant replicate the functionality of an append list in a cosmwasm efficient manner. The length of the list is stored and used to pop/push items to the list. It also has a method to create a read only iterator. + +This storage object also has the method `remove` to remove a stored object from an arbitrary position in the list, but this can be exteremely inefficient. + +> ❗ Removing a storage object further from the tail gets increasingly inefficient. We recommend you use `pop` and `push` whenever possible. + +The same conventions from `Item` also apply here, that is: + +1. AppendStore has to be told the type of the stored objects. And the serde optionally. +2. Every methods needs it's own reference to `deps.storage`. + +#### **Initialize** + +To import and intialize this storage object as a static constant in `state.rs`, do the following: + +```ignore +use secret_toolkit::storage::{AppendStore} +``` + +```ignore +pub static COUNT_STORE: AppendStore = AppendStore::new(b"count"); +``` + +> ❗ Initializing the object as const instead of static will also work but be less efficient since the variable won't be able to cache length data. + +Often times we need these storage objects to be associated to a user address or some other key that is variable. In this case, you need not initialize a completely new AppendStore inside `contract.rs`. Instead, you can create a new AppendStore by adding a suffix to an already existing AppendStore. This has the benefit of preventing you from having to rewrite the signature of the AppendStore. For example + +```ignore +// The compiler knows that user_count_store is AppendStore +let user_count_store = COUNT_STORE.add_suffix(env.message.sender.to_string().as_bytes()); +``` + +#### **Read/Write** + +The main user facing methods to read/write to AppendStore are `pop`, `push`, `get_len`, `set_at` (which replaces data at a position within the length bound), `clear` (which deletes all data in the storage), `remove` (which removes an item in an arbitrary position, this is very inefficient). An extensive list of examples of these being used can be found inside the unit tests of AppendStore found in `append_store.rs`. + +#### **Iterator** + +AppendStore also implements a readonly iterator feature. This feature is also used to create a paging wrapper method called `paging`. The way you create the iterator is: + +```ignore +let iter = user_count_store.iter(&deps.storage)?; +``` + +More examples can be found in the unit tests. And the paging wrapper is used in the following manner: + +```ignore +let start_page: u32 = 0; +let page_size: u32 = 5; +// The compiler knows that values is Vec +let values = user_count_store.paging(&deps.storage, start_page, page_size)?; +``` + +### **DequeStore** + +This is a storage wrapper based on AppendStore that replicates a double ended list. This storage object allows the user to efficiently pop/push items to either end of the list. + +#### **Init** + +To import and intialize this storage object as a static constant in `state.rs`, do the following: + +```ignore +use secret_toolkit::storage::{DequeStore} +``` + +```ignore +pub static COUNT_STORE: DequeStore = DequeStore::new(b"count"); +``` + +> ❗ Initializing the object as const instead of static will also work but be less efficient since the variable won't be able to cache length data. + +#### **Read/Write** + +The main user facing methods to read/write to DequeStore are `pop_back`, `pop_front`, `push_back`, `push_front`, `get_len`, `get_off`, `set_at` (which replaces data at a position within the length bound), `clear` (which deletes all data in the storage), `remove` (which removes an item in an arbitrary position, this is very inefficient). An extensive list of examples of these being used can be found inside the unit tests of DequeStore found in `deque_store.rs`. + +#### **Iterator** + +This is exactly same as that of AppendStore. + +### **Keymap** + +This hashmap-like storage structure allows the user to use generic typed keys to store objects. Allows iteration with paging over keys and/or items (without guaranteed ordering, although the order of insertion is preserved until you start removing objects). +An example use-case for such a structure is if you want to contain a large amount of votes, deposits, or bets and iterate over them at some time in the future. +Since iterating over large amounts of data at once may be prohibitive, this structure allows you to specify the amount of data that will +be returned in each page. + +#### **Init** + +To import and intialize this storage object as a static constant in `state.rs`, do the following: + +```ignore +use secret_toolkit::storage::{Keymap} +``` + +```ignore +pub static ADDR_VOTE: Keymap = Keymap::new(b"vote"); +pub static BET_STORE: Keymap = Keymap::new(b"vote"); +``` + +> ❗ Initializing the object as const instead of static will also work but be less efficient since the variable won't be able to cache length data. + +You can use Json serde algorithm by changing the signature to `Keymap`, similar to all the other storage objects above. However, keep in mind that the Serde algorthm is used to serde both the stored object (`Uint128`) AND the key (`HumanAddr`). + +If you need to associate a keymap to a user address (or any other variable), then you can also do this using the `.add_suffix` method. + +For example suppose that in your contract, a user can make multiple bets. Then, you'd want a Keymap to be associated to each user. You would achieve this my doing the following during execution in `contract.rs`. + +```ignore +// The compiler knows that user_bet_store is AppendStore +let user_count_store = BET_STORE.add_suffix(env.message.sender.to_string().as_bytes()); +``` + +#### **Read/Write** + +You can find more examples of using keymaps in the unit tests of Keymap in `keymap.rs`. + +To insert, remove, read from the keymap, do the following: + +```ignore +let user_addr: HumanAddr = env.message.sender; + +let foo = Foo { + message: "string one".to_string(), + votes: 1111, +}; + +ADDR_VOTE.insert(&mut deps.storage, &user_addr, &foo)?; +// Compiler knows that this is Foo +let read_foo = ADDR_VOTE.get(&deps.storage, &user_addr).unwrap(); +assert_eq!(read_foo, foo1); +ADDR_VOTE.remove(&mut deps.storage, &user_addr)?; +assert_eq!(ADDR_VOTE.get_len(&deps.storage)?, 0); +``` + +#### **Iterator** + +There are two methods that create an iterator in Keymap. These are `.iter` and `.iter_keys`. `iter_keys` only iterates over the keys whereas `iter` iterates over (key, item) pairs. Needless to say, `.iter_keys` is more efficient as it does not attempt to read the item. + +Keymap also has two paging methods, these are `.paging` and `.paging_keys`. `paging_keys` only paginates keys whereas `iter` iterates over (key, item) pairs. Needless to say, `.iter_keys` is more efficient as it does not attempt to read the item. + +Here are some select examples from the unit tests: + +```ignore +fn test_keymap_iter_keys() -> StdResult<()> { + let mut storage = MockStorage::new(); + + let keymap: Keymap = Keymap::new(b"test"); + let foo1 = Foo { + string: "string one".to_string(), + number: 1111, + }; + let foo2 = Foo { + string: "string two".to_string(), + number: 1111, + }; + + let key1 = "key1".to_string(); + let key2 = "key2".to_string(); + + keymap.insert(&mut storage, &key1, &foo1)?; + keymap.insert(&mut storage, &key2, &foo2)?; + + let mut x = keymap.iter_keys(&storage)?; + let (len, _) = x.size_hint(); + assert_eq!(len, 2); + + assert_eq!(x.next().unwrap()?, key1); + + assert_eq!(x.next().unwrap()?, key2); + + Ok(()) +} +``` + +```ignore +fn test_keymap_iter() -> StdResult<()> { + let mut storage = MockStorage::new(); + + let keymap: Keymap, Foo> = Keymap::new(b"test"); + let foo1 = Foo { + string: "string one".to_string(), + number: 1111, + }; + let foo2 = Foo { + string: "string two".to_string(), + number: 1111, + }; + + keymap.insert(&mut storage, &b"key1".to_vec(), &foo1)?; + keymap.insert(&mut storage, &b"key2".to_vec(), &foo2)?; + + let mut x = keymap.iter(&storage)?; + let (len, _) = x.size_hint(); + assert_eq!(len, 2); + + assert_eq!(x.next().unwrap()?.1, foo1); + + assert_eq!(x.next().unwrap()?.1, foo2); + + Ok(()) +} +``` diff --git a/packages/storage/src/append_store.rs b/packages/storage/src/append_store.rs index 6354555..0e5f793 100644 --- a/packages/storage/src/append_store.rs +++ b/packages/storage/src/append_store.rs @@ -3,8 +3,10 @@ //! //! This is achieved by storing each item in a separate storage entry. A special key is reserved //! for storing the length of the collection so far. +use std::any::type_name; use std::convert::TryInto; use std::marker::PhantomData; +use std::sync::Mutex; use serde::{de::DeserializeOwned, Serialize}; @@ -14,354 +16,260 @@ use secret_toolkit_serialization::{Bincode2, Serde}; const LEN_KEY: &[u8] = b"len"; -// Mutable append-store - -/// A type allowing both reads from and writes to the append store at a given storage location. -pub struct AppendStoreMut<'a, T, Ser = Bincode2> +pub struct AppendStore<'a, T, Ser = Bincode2> where T: Serialize + DeserializeOwned, Ser: Serde, { - storage: &'a mut dyn Storage, - item_type: PhantomData<*const T>, - serialization_type: PhantomData<*const Ser>, - len: u32, + /// prefix of the newly constructed Storage + namespace: &'a [u8], + /// needed if any suffixes were added to the original namespace. + /// therefore it is not necessarily same as the namespace. + prefix: Option>, + length: Mutex>, + item_type: PhantomData, + serialization_type: PhantomData, } -impl<'a, T> AppendStoreMut<'a, T, Bincode2> -where - T: Serialize + DeserializeOwned, -{ - /// Try to use the provided storage as an AppendStore. If it doesn't seem to be one, then - /// initialize it as one. - /// - /// Returns Err if the contents of the storage can not be parsed. - pub fn attach_or_create(storage: &'a mut dyn Storage) -> StdResult { - AppendStoreMut::attach_or_create_with_serialization(storage, Bincode2) - } - - /// Try to use the provided storage as an AppendStore. - /// - /// Returns None if the provided storage doesn't seem like an AppendStore. - /// Returns Err if the contents of the storage can not be parsed. - pub fn attach(storage: &'a mut dyn Storage) -> Option> { - AppendStoreMut::attach_with_serialization(storage, Bincode2) - } -} - -impl<'a, T, Ser> AppendStoreMut<'a, T, Ser> -where - T: Serialize + DeserializeOwned, - Ser: Serde, -{ - /// Try to use the provided storage as an AppendStore. If it doesn't seem to be one, then - /// initialize it as one. This method allows choosing the serialization format you want to use. - /// - /// Returns Err if the contents of the storage can not be parsed. - pub fn attach_or_create_with_serialization( - storage: &'a mut dyn Storage, - _ser: Ser, - ) -> StdResult { - if let Some(len_vec) = storage.get(LEN_KEY) { - Self::new(storage, &len_vec) - } else { - let len_vec = 0_u32.to_be_bytes(); - storage.set(LEN_KEY, &len_vec); - Self::new(storage, &len_vec) - } - } - - /// Try to use the provided storage as an AppendStore. - /// This method allows choosing the serialization format you want to use. - /// - /// Returns None if the provided storage doesn't seem like an AppendStore. - /// Returns Err if the contents of the storage can not be parsed. - pub fn attach_with_serialization( - storage: &'a mut dyn Storage, - _ser: Ser, - ) -> Option> { - let len_vec = storage.get(LEN_KEY)?; - Some(Self::new(storage, &len_vec)) - } - - fn new(storage: &'a mut dyn Storage, len_vec: &[u8]) -> StdResult { - let len_array = len_vec - .try_into() - .map_err(|err| StdError::parse_err("u32", err))?; - let len = u32::from_be_bytes(len_array); - - Ok(Self { - storage, +impl<'a, T: Serialize + DeserializeOwned, Ser: Serde> AppendStore<'a, T, Ser> { + /// constructor + pub const fn new(prefix: &'a [u8]) -> Self { + Self { + namespace: prefix, + prefix: None, + length: Mutex::new(None), item_type: PhantomData, serialization_type: PhantomData, - len, - }) + } } - - pub fn len(&self) -> u32 { - self.len + /// This is used to produce a new AppendListStorage. This can be used when you want to associate an AppendListStorage to each user + /// and you still get to define the AppendListStorage as a static constant + pub fn add_suffix(&self, suffix: &[u8]) -> Self { + let prefix = if let Some(prefix) = &self.prefix { + [prefix.clone(), suffix.to_vec()].concat() + } else { + [self.namespace.to_vec(), suffix.to_vec()].concat() + }; + Self { + namespace: self.namespace, + prefix: Some(prefix), + length: Mutex::new(None), + item_type: self.item_type, + serialization_type: self.serialization_type, + } } +} - pub fn is_empty(&self) -> bool { - self.len == 0 +impl<'a, T: Serialize + DeserializeOwned, Ser: Serde> AppendStore<'a, T, Ser> { + /// gets the length from storage, and otherwise sets it to 0 + pub fn get_len(&self, storage: &S) -> StdResult { + let mut may_len = self.length.lock().unwrap(); + match *may_len { + Some(len) => Ok(len), + None => { + let len_key = [self.as_slice(), LEN_KEY].concat(); + if let Some(len_vec) = storage.get(&len_key) { + let len_bytes = len_vec + .as_slice() + .try_into() + .map_err(|err| StdError::parse_err("u32", err))?; + let len = u32::from_be_bytes(len_bytes); + *may_len = Some(len); + Ok(len) + } else { + *may_len = Some(0); + Ok(0) + } + } + } } - - pub fn storage(&mut self) -> &mut dyn Storage { - self.storage + /// checks if the collection has any elements + pub fn is_empty(&self, storage: &S) -> StdResult { + Ok(self.get_len(storage)? == 0) } - - pub fn readonly_storage(&self) -> &dyn Storage { - self.storage + /// gets the element at pos if within bounds + pub fn get_at(&self, storage: &S, pos: u32) -> StdResult { + let len = self.get_len(storage)?; + if pos > len { + return Err(StdError::generic_err("AppendStore access out of bounds")); + } + self.get_at_unchecked(storage, pos) } - - /// Return an iterator over the items in the collection - pub fn iter(&self) -> Iter { - self.as_readonly().iter() + /// tries to get the element at pos + fn get_at_unchecked(&self, storage: &S, pos: u32) -> StdResult { + let key = pos.to_be_bytes(); + self.load_impl(storage, &key) } - /// Get the value stored at a given position. - /// - /// # Errors - /// Will return an error if pos is out of bounds or if an item is not found. - pub fn get_at(&self, pos: u32) -> StdResult { - self.as_readonly().get_at(pos) - } + /// Set the length of the collection + fn set_len(&self, storage: &mut S, len: u32) { + let len_key = [self.as_slice(), LEN_KEY].concat(); + storage.set(&len_key, &len.to_be_bytes()); - fn get_at_unchecked(&self, pos: u32) -> StdResult { - self.as_readonly().get_at_unchecked(pos) + let mut may_len = self.length.lock().unwrap(); + *may_len = Some(len); } - - /// Set the value of the item stored at a given position. - /// - /// # Errors - /// Will return an error if the position is out of bounds - pub fn set_at(&mut self, pos: u32, item: &T) -> StdResult<()> { - if pos >= self.len { - return Err(StdError::generic_err("AppendStorage access out of bounds")); + /// Clear the collection + pub fn clear(&self, storage: &mut S) { + self.set_len(storage, 0); + } + /// Replaces data at a position within bounds + pub fn set_at(&self, storage: &mut S, pos: u32, item: &T) -> StdResult<()> { + let len = self.get_len(storage)?; + if pos >= len { + return Err(StdError::generic_err("AppendStore access out of bounds")); } - self.set_at_unchecked(pos, item) + self.set_at_unchecked(storage, pos, item) } - - fn set_at_unchecked(&mut self, pos: u32, item: &T) -> StdResult<()> { - let serialized = Ser::serialize(item)?; - self.storage.set(&pos.to_be_bytes(), &serialized); - Ok(()) + /// Sets data at a given index + fn set_at_unchecked(&self, storage: &mut S, pos: u32, item: &T) -> StdResult<()> { + self.save_impl(storage, &pos.to_be_bytes(), item) } - - /// Append an item to the end of the collection. - /// - /// This operation has a constant cost. - pub fn push(&mut self, item: &T) -> StdResult<()> { - self.set_at_unchecked(self.len, item)?; - self.set_length(self.len + 1); + /// Pushes an item to AppendStorage + pub fn push(&self, storage: &mut S, item: &T) -> StdResult<()> { + let len = self.get_len(storage)?; + self.set_at_unchecked(storage, len, item)?; + self.set_len(storage, len + 1); Ok(()) } - - /// Pop the last item off the collection - pub fn pop(&mut self) -> StdResult { - if let Some(len) = self.len.checked_sub(1) { - let item = self.get_at_unchecked(len); - self.set_length(len); + /// Pops an item from AppendStore + pub fn pop(&self, storage: &mut S) -> StdResult { + if let Some(len) = self.get_len(storage)?.checked_sub(1) { + let item = self.get_at_unchecked(storage, len); + self.set_len(storage, len); item } else { Err(StdError::generic_err("Can not pop from empty AppendStore")) } } + /// Remove an element from the collection at the specified position. + /// + /// Removing the last element has a constant cost. + /// The cost of removing from the middle/start will depend on the proximity to tail of the list. + /// All elements above the specified position will be shifted in storage. + /// + /// Removing an element from the start (head) of the collection + /// has the worst runtime and gas cost. + pub fn remove(&self, storage: &mut S, pos: u32) -> StdResult { + let len = self.get_len(storage)?; - /// Clear the collection - pub fn clear(&mut self) { - self.set_length(0); - } - - /// Set the length of the collection - fn set_length(&mut self, len: u32) { - self.storage.set(LEN_KEY, &len.to_be_bytes()); - self.len = len; - } - - /// Gain access to the implementation of the immutable methods - fn as_readonly(&self) -> AppendStore { - AppendStore { - storage: self.storage, - item_type: self.item_type, - serialization_type: self.serialization_type, - len: self.len, + if pos >= len { + return Err(StdError::generic_err("DequeStorage access out of bounds")); } - } -} - -// Doing this is fundamentally flawed because it would theoretically permanently turn the `&mut S` -// into a `&S`, preventing any further mutation of the entire storage. -// In practice this just gave annoying lifetime errors either here or at `AppendStoreMut::as_readonly`. -/* -impl<'a, T, S> IntoIterator for AppendStoreMut<'a, T, S> -where - T: 'a + Serialize + DeserializeOwned, - S: Storage, -{ - type Item = StdResult; - type IntoIter = Iter<'a, T, S>; + let item = self.get_at_unchecked(storage, pos); - fn into_iter(self) -> Iter<'a, T, S> { - Iter { - storage: self.as_readonly(), - start: 0, - end: self.len, + for i in pos..(len - 1) { + let element_to_shift = self.get_at_unchecked(storage, i + 1)?; + self.set_at_unchecked(storage, i, &element_to_shift)?; } + self.set_len(storage, len - 1); + item + } + /// Returns a readonly iterator + pub fn iter(&self, storage: &'a S) -> StdResult> { + let len = self.get_len(storage)?; + let iter = AppendStoreIter::new(self, storage, 0, len); + Ok(iter) + } + /// does paging with the given parameters + pub fn paging(&self, storage: &S, start_page: u32, size: u32) -> StdResult> { + self.iter(storage)? + .skip((start_page as usize) * (size as usize)) + .take(size as usize) + .collect() } } -*/ - -// Readonly append-store - -/// A type allowing only reads from an append store. useful in the context_, u8 of queries. -pub struct AppendStore<'a, T, Ser = Bincode2> -where - T: Serialize + DeserializeOwned, - Ser: Serde, -{ - storage: &'a dyn Storage, - item_type: PhantomData<*const T>, - serialization_type: PhantomData<*const Ser>, - len: u32, -} -impl<'a, T> AppendStore<'a, T, Bincode2> -where - T: Serialize + DeserializeOwned, -{ - /// Try to use the provided storage as an AppendStore. - /// - /// Returns None if the provided storage doesn't seem like an AppendStore. - /// Returns Err if the contents of the storage can not be parsed. - pub fn attach(storage: &'a dyn Storage) -> Option> { - AppendStore::attach_with_serialization(storage, Bincode2) +impl<'a, T: Serialize + DeserializeOwned, Ser: Serde> Clone for AppendStore<'a, T, Ser> { + fn clone(&self) -> Self { + Self { + namespace: self.namespace.clone(), + prefix: self.prefix.clone(), + length: Mutex::new(None), + item_type: self.item_type.clone(), + serialization_type: self.serialization_type.clone(), + } } } -impl<'a, T, Ser> AppendStore<'a, T, Ser> -where - T: Serialize + DeserializeOwned, - Ser: Serde, -{ - /// Try to use the provided storage as an AppendStore. - /// This method allows choosing the serialization format you want to use. - /// - /// Returns None if the provided storage doesn't seem like an AppendStore. - /// Returns Err if the contents of the storage can not be parsed. - pub fn attach_with_serialization( - storage: &'a dyn Storage, - _ser: Ser, - ) -> Option> { - let len_vec = storage.get(LEN_KEY)?; - Some(AppendStore::new(storage, len_vec)) - } - - fn new(storage: &'a dyn Storage, len_vec: Vec) -> StdResult { - let len_array = len_vec - .as_slice() - .try_into() - .map_err(|err| StdError::parse_err("u32", err))?; - let len = u32::from_be_bytes(len_array); - - Ok(Self { - storage, - item_type: PhantomData, - serialization_type: PhantomData, - len, - }) - } - - pub fn len(&self) -> u32 { - self.len - } - - pub fn is_empty(&self) -> bool { - self.len == 0 - } - - pub fn readonly_storage(&self) -> &dyn Storage { - self.storage - } - - /// Return an iterator over the items in the collection - pub fn iter(&self) -> Iter<'a, T, Ser> { - Iter { - storage: AppendStore::clone(self), - start: 0, - end: self.len, +impl<'a, T: Serialize + DeserializeOwned, Ser: Serde> AppendStore<'a, T, Ser> { + fn as_slice(&self) -> &[u8] { + if let Some(prefix) = &self.prefix { + prefix + } else { + self.namespace } } - /// Get the value stored at a given position. + /// Returns StdResult from retrieving the item with the specified key. Returns a + /// StdError::NotFound if there is no item with that key /// - /// # Errors - /// Will return an error if pos is out of bounds or if an item is not found. - pub fn get_at(&self, pos: u32) -> StdResult { - if pos >= self.len { - return Err(StdError::generic_err("AppendStorage access out of bounds")); - } - self.get_at_unchecked(pos) - } - - fn get_at_unchecked(&self, pos: u32) -> StdResult { - let serialized = self.storage.get(&pos.to_be_bytes()).ok_or_else(|| { - StdError::generic_err(format!("No item in AppendStorage at position {}", pos)) - })?; - Ser::deserialize(&serialized) + /// # Arguments + /// + /// * `storage` - a reference to the storage this item is in + /// * `key` - a byte slice representing the key to access the stored item + fn load_impl(&self, storage: &S, key: &[u8]) -> StdResult { + let prefixed_key = [self.as_slice(), key].concat(); + Ser::deserialize( + &storage + .get(&prefixed_key) + .ok_or(StdError::not_found(type_name::()))?, + ) + } + + /// Returns StdResult<()> resulting from saving an item to storage + /// + /// # Arguments + /// + /// * `storage` - a mutable reference to the storage this item should go to + /// * `key` - a byte slice representing the key to access the stored item + /// * `value` - a reference to the item to store + fn save_impl(&self, storage: &mut S, key: &[u8], value: &T) -> StdResult<()> { + let prefixed_key = [self.as_slice(), key].concat(); + storage.set(&prefixed_key, &Ser::serialize(value)?); + Ok(()) } } -impl<'a, T, Ser> IntoIterator for AppendStore<'a, T, Ser> +/// An iterator over the contents of the append store. +pub struct AppendStoreIter<'a, T, S, Ser> where T: Serialize + DeserializeOwned, + S: Storage, Ser: Serde, { - type Item = StdResult; - type IntoIter = Iter<'a, T, Ser>; - - fn into_iter(self) -> Iter<'a, T, Ser> { - let end = self.len; - Iter { - storage: self, - start: 0, - end, - } - } + append_store: &'a AppendStore<'a, T, Ser>, + storage: &'a S, + start: u32, + end: u32, } -// Manual `Clone` implementation because the default one tries to clone the Storage?? -impl<'a, T, Ser> Clone for AppendStore<'a, T, Ser> +impl<'a, T, S, Ser> AppendStoreIter<'a, T, S, Ser> where T: Serialize + DeserializeOwned, + S: Storage, Ser: Serde, { - fn clone(&self) -> Self { + /// constructor + pub fn new( + append_store: &'a AppendStore<'a, T, Ser>, + storage: &'a S, + start: u32, + end: u32, + ) -> Self { Self { - storage: self.storage, - item_type: self.item_type, - serialization_type: self.serialization_type, - len: self.len, + append_store, + storage, + start, + end, } } } -// Owning iterator - -/// An iterator over the contents of the append store. -pub struct Iter<'a, T, Ser> -where - T: Serialize + DeserializeOwned, - Ser: Serde, -{ - storage: AppendStore<'a, T, Ser>, - start: u32, - end: u32, -} - -impl<'a, T, Ser> Iterator for Iter<'a, T, Ser> +impl<'a, T, S, Ser> Iterator for AppendStoreIter<'a, T, S, Ser> where T: Serialize + DeserializeOwned, + S: Storage, Ser: Serde, { type Item = StdResult; @@ -370,7 +278,7 @@ where if self.start >= self.end { return None; } - let item = self.storage.get_at(self.start); + let item = self.append_store.get_at(self.storage, self.start); self.start += 1; Some(item) } @@ -393,9 +301,10 @@ where } } -impl<'a, T, Ser> DoubleEndedIterator for Iter<'a, T, Ser> +impl<'a, T, S, Ser> DoubleEndedIterator for AppendStoreIter<'a, T, S, Ser> where T: Serialize + DeserializeOwned, + S: Storage, Ser: Serde, { fn next_back(&mut self) -> Option { @@ -403,7 +312,7 @@ where return None; } self.end -= 1; - let item = self.storage.get_at(self.end); + let item = self.append_store.get_at(self.storage, self.end); Some(item) } @@ -420,9 +329,10 @@ where } // This enables writing `append_store.iter().skip(n).rev()` -impl<'a, T, Ser> ExactSizeIterator for Iter<'a, T, Ser> +impl<'a, T, S, Ser> ExactSizeIterator for AppendStoreIter<'a, T, S, Ser> where T: Serialize + DeserializeOwned, + S: Storage, Ser: Serde, { } @@ -438,17 +348,50 @@ mod tests { #[test] fn test_push_pop() -> StdResult<()> { let mut storage = MockStorage::new(); - let mut append_store = AppendStoreMut::attach_or_create(&mut storage)?; - append_store.push(&1234)?; - append_store.push(&2143)?; - append_store.push(&3412)?; - append_store.push(&4321)?; - - assert_eq!(append_store.pop(), Ok(4321)); - assert_eq!(append_store.pop(), Ok(3412)); - assert_eq!(append_store.pop(), Ok(2143)); - assert_eq!(append_store.pop(), Ok(1234)); - assert!(append_store.pop().is_err()); + let append_store: AppendStore = AppendStore::new(b"test"); + append_store.push(&mut storage, &1234)?; + append_store.push(&mut storage, &2143)?; + append_store.push(&mut storage, &3412)?; + append_store.push(&mut storage, &4321)?; + + assert_eq!(append_store.pop(&mut storage), Ok(4321)); + assert_eq!(append_store.pop(&mut storage), Ok(3412)); + assert_eq!(append_store.pop(&mut storage), Ok(2143)); + assert_eq!(append_store.pop(&mut storage), Ok(1234)); + assert!(append_store.pop(&mut storage).is_err()); + + Ok(()) + } + + #[test] + fn test_length() -> StdResult<()> { + let mut storage = MockStorage::new(); + let append_store: AppendStore = AppendStore::new(b"test"); + + assert!(append_store.length.lock().unwrap().eq(&None)); + assert_eq!(append_store.get_len(&mut storage)?, 0); + assert!(append_store.length.lock().unwrap().eq(&Some(0))); + + append_store.push(&mut storage, &1234)?; + append_store.push(&mut storage, &2143)?; + append_store.push(&mut storage, &3412)?; + append_store.push(&mut storage, &4321)?; + assert!(append_store.length.lock().unwrap().eq(&Some(4))); + assert_eq!(append_store.get_len(&mut storage)?, 4); + + assert_eq!(append_store.pop(&mut storage), Ok(4321)); + assert_eq!(append_store.pop(&mut storage), Ok(3412)); + assert!(append_store.length.lock().unwrap().eq(&Some(2))); + assert_eq!(append_store.get_len(&mut storage)?, 2); + + assert_eq!(append_store.pop(&mut storage), Ok(2143)); + assert_eq!(append_store.pop(&mut storage), Ok(1234)); + assert!(append_store.length.lock().unwrap().eq(&Some(0))); + assert_eq!(append_store.get_len(&mut storage)?, 0); + + assert!(append_store.pop(&mut storage).is_err()); + assert!(append_store.length.lock().unwrap().eq(&Some(0))); + assert_eq!(append_store.get_len(&mut storage)?, 0); Ok(()) } @@ -456,21 +399,21 @@ mod tests { #[test] fn test_iterator() -> StdResult<()> { let mut storage = MockStorage::new(); - let mut append_store = AppendStoreMut::attach_or_create(&mut storage)?; - append_store.push(&1234)?; - append_store.push(&2143)?; - append_store.push(&3412)?; - append_store.push(&4321)?; + let append_store: AppendStore = AppendStore::new(b"test"); + append_store.push(&mut storage, &1234)?; + append_store.push(&mut storage, &2143)?; + append_store.push(&mut storage, &3412)?; + append_store.push(&mut storage, &4321)?; // iterate twice to make sure nothing changed - let mut iter = append_store.iter(); + let mut iter = append_store.iter(&storage)?; assert_eq!(iter.next(), Some(Ok(1234))); assert_eq!(iter.next(), Some(Ok(2143))); assert_eq!(iter.next(), Some(Ok(3412))); assert_eq!(iter.next(), Some(Ok(4321))); assert_eq!(iter.next(), None); - let mut iter = append_store.iter(); + let mut iter = append_store.iter(&storage)?; assert_eq!(iter.next(), Some(Ok(1234))); assert_eq!(iter.next(), Some(Ok(2143))); assert_eq!(iter.next(), Some(Ok(3412))); @@ -478,7 +421,7 @@ mod tests { assert_eq!(iter.next(), None); // make sure our implementation of `nth` doesn't break anything - let mut iter = append_store.iter().skip(2); + let mut iter = append_store.iter(&storage)?.skip(2); assert_eq!(iter.next(), Some(Ok(3412))); assert_eq!(iter.next(), Some(Ok(4321))); assert_eq!(iter.next(), None); @@ -489,13 +432,13 @@ mod tests { #[test] fn test_reverse_iterator() -> StdResult<()> { let mut storage = MockStorage::new(); - let mut append_store = AppendStoreMut::attach_or_create(&mut storage)?; - append_store.push(&1234)?; - append_store.push(&2143)?; - append_store.push(&3412)?; - append_store.push(&4321)?; + let append_store: AppendStore = AppendStore::new(b"test"); + append_store.push(&mut storage, &1234)?; + append_store.push(&mut storage, &2143)?; + append_store.push(&mut storage, &3412)?; + append_store.push(&mut storage, &4321)?; - let mut iter = append_store.iter().rev(); + let mut iter = append_store.iter(&storage)?.rev(); assert_eq!(iter.next(), Some(Ok(4321))); assert_eq!(iter.next(), Some(Ok(3412))); assert_eq!(iter.next(), Some(Ok(2143))); @@ -503,7 +446,7 @@ mod tests { assert_eq!(iter.next(), None); // iterate twice to make sure nothing changed - let mut iter = append_store.iter().rev(); + let mut iter = append_store.iter(&storage)?.rev(); assert_eq!(iter.next(), Some(Ok(4321))); assert_eq!(iter.next(), Some(Ok(3412))); assert_eq!(iter.next(), Some(Ok(2143))); @@ -511,13 +454,13 @@ mod tests { assert_eq!(iter.next(), None); // make sure our implementation of `nth_back` doesn't break anything - let mut iter = append_store.iter().rev().skip(2); + let mut iter = append_store.iter(&storage)?.rev().skip(2); assert_eq!(iter.next(), Some(Ok(2143))); assert_eq!(iter.next(), Some(Ok(1234))); assert_eq!(iter.next(), None); // make sure our implementation of `ExactSizeIterator` works well - let mut iter = append_store.iter().skip(2).rev(); + let mut iter = append_store.iter(&storage)?.skip(2).rev(); assert_eq!(iter.next(), Some(Ok(4321))); assert_eq!(iter.next(), Some(Ok(3412))); assert_eq!(iter.next(), None); @@ -526,10 +469,121 @@ mod tests { } #[test] - fn test_attach_to_wrong_location() { + fn test_json_push_pop() -> StdResult<()> { + let mut storage = MockStorage::new(); + let append_store: AppendStore = AppendStore::new(b"test"); + append_store.push(&mut storage, &1234)?; + append_store.push(&mut storage, &2143)?; + append_store.push(&mut storage, &3412)?; + append_store.push(&mut storage, &4321)?; + + assert_eq!(append_store.pop(&mut storage), Ok(4321)); + assert_eq!(append_store.pop(&mut storage), Ok(3412)); + assert_eq!(append_store.pop(&mut storage), Ok(2143)); + assert_eq!(append_store.pop(&mut storage), Ok(1234)); + assert!(append_store.pop(&mut storage).is_err()); + + Ok(()) + } + + #[test] + fn test_suffixed_pop() -> StdResult<()> { let mut storage = MockStorage::new(); - assert!(AppendStore::::attach(&storage).is_none()); - assert!(AppendStoreMut::::attach(&mut storage).is_none()); + let suffix: &[u8] = b"test_suffix"; + let original_store: AppendStore = AppendStore::new(b"test"); + let append_store = original_store.add_suffix(suffix); + append_store.push(&mut storage, &1234)?; + append_store.push(&mut storage, &2143)?; + append_store.push(&mut storage, &3412)?; + append_store.push(&mut storage, &4321)?; + + assert_eq!(append_store.pop(&mut storage), Ok(4321)); + assert_eq!(append_store.pop(&mut storage), Ok(3412)); + assert_eq!(append_store.pop(&mut storage), Ok(2143)); + assert_eq!(append_store.pop(&mut storage), Ok(1234)); + assert!(append_store.pop(&mut storage).is_err()); + + Ok(()) + } + + #[test] + fn test_suffixed_reverse_iter() -> StdResult<()> { + let mut storage = MockStorage::new(); + let suffix: &[u8] = b"test_suffix"; + let original_store: AppendStore = AppendStore::new(b"test"); + let append_store = original_store.add_suffix(suffix); + + append_store.push(&mut storage, &1234)?; + append_store.push(&mut storage, &2143)?; + append_store.push(&mut storage, &3412)?; + append_store.push(&mut storage, &4321)?; + + assert_eq!(original_store.get_len(&storage)?, 0); + + let mut iter = append_store.iter(&storage)?.rev(); + assert_eq!(iter.next(), Some(Ok(4321))); + assert_eq!(iter.next(), Some(Ok(3412))); + assert_eq!(iter.next(), Some(Ok(2143))); + assert_eq!(iter.next(), Some(Ok(1234))); + assert_eq!(iter.next(), None); + + // iterate twice to make sure nothing changed + let mut iter = append_store.iter(&storage)?.rev(); + assert_eq!(iter.next(), Some(Ok(4321))); + assert_eq!(iter.next(), Some(Ok(3412))); + assert_eq!(iter.next(), Some(Ok(2143))); + assert_eq!(iter.next(), Some(Ok(1234))); + assert_eq!(iter.next(), None); + + // make sure our implementation of `nth_back` doesn't break anything + let mut iter = append_store.iter(&storage)?.rev().skip(2); + assert_eq!(iter.next(), Some(Ok(2143))); + assert_eq!(iter.next(), Some(Ok(1234))); + assert_eq!(iter.next(), None); + + // make sure our implementation of `ExactSizeIterator` works well + let mut iter = append_store.iter(&storage)?.skip(2).rev(); + assert_eq!(iter.next(), Some(Ok(4321))); + assert_eq!(iter.next(), Some(Ok(3412))); + assert_eq!(iter.next(), None); + + Ok(()) + } + + #[test] + fn test_suffix_iter() -> StdResult<()> { + let mut storage = MockStorage::new(); + let suffix: &[u8] = b"test_suffix"; + let original_store: AppendStore = AppendStore::new(b"test"); + let append_store = original_store.add_suffix(suffix); + + append_store.push(&mut storage, &1234)?; + append_store.push(&mut storage, &2143)?; + append_store.push(&mut storage, &3412)?; + append_store.push(&mut storage, &4321)?; + + // iterate twice to make sure nothing changed + let mut iter = append_store.iter(&storage)?; + assert_eq!(iter.next(), Some(Ok(1234))); + assert_eq!(iter.next(), Some(Ok(2143))); + assert_eq!(iter.next(), Some(Ok(3412))); + assert_eq!(iter.next(), Some(Ok(4321))); + assert_eq!(iter.next(), None); + + let mut iter = append_store.iter(&storage)?; + assert_eq!(iter.next(), Some(Ok(1234))); + assert_eq!(iter.next(), Some(Ok(2143))); + assert_eq!(iter.next(), Some(Ok(3412))); + assert_eq!(iter.next(), Some(Ok(4321))); + assert_eq!(iter.next(), None); + + // make sure our implementation of `nth` doesn't break anything + let mut iter = append_store.iter(&storage)?.skip(2); + assert_eq!(iter.next(), Some(Ok(3412))); + assert_eq!(iter.next(), Some(Ok(4321))); + assert_eq!(iter.next(), None); + + Ok(()) } #[test] @@ -537,20 +591,111 @@ mod tests { // Check the default behavior is Bincode2 let mut storage = MockStorage::new(); - let mut append_store = AppendStoreMut::attach_or_create(&mut storage)?; - append_store.push(&1234)?; + let append_store: AppendStore = AppendStore::new(b"test"); + append_store.push(&mut storage, &1234)?; - let bytes = append_store.readonly_storage().get(&0_u32.to_be_bytes()); + let key = [append_store.as_slice(), &0_u32.to_be_bytes()].concat(); + let bytes = storage.get(&key); assert_eq!(bytes, Some(vec![210, 4, 0, 0])); // Check that overriding the serializer with Json works let mut storage = MockStorage::new(); - let mut append_store = - AppendStoreMut::attach_or_create_with_serialization(&mut storage, Json)?; - append_store.push(&1234)?; - let bytes = append_store.readonly_storage().get(&0_u32.to_be_bytes()); + let json_append_store: AppendStore = AppendStore::new(b"test2"); + json_append_store.push(&mut storage, &1234)?; + + let key = [json_append_store.as_slice(), &0_u32.to_be_bytes()].concat(); + let bytes = storage.get(&key); assert_eq!(bytes, Some(b"1234".to_vec())); Ok(()) } + + #[test] + fn test_removes() -> StdResult<()> { + let mut storage = MockStorage::new(); + let deque_store: AppendStore = AppendStore::new(b"test"); + deque_store.push(&mut storage, &1)?; + deque_store.push(&mut storage, &2)?; + deque_store.push(&mut storage, &3)?; + deque_store.push(&mut storage, &4)?; + deque_store.push(&mut storage, &5)?; + deque_store.push(&mut storage, &6)?; + deque_store.push(&mut storage, &7)?; + deque_store.push(&mut storage, &8)?; + + assert!(deque_store.remove(&mut storage, 8).is_err()); + assert!(deque_store.remove(&mut storage, 9).is_err()); + + assert_eq!(deque_store.remove(&mut storage, 7), Ok(8)); + assert_eq!(deque_store.get_at(&storage, 6), Ok(7)); + assert_eq!(deque_store.get_at(&storage, 5), Ok(6)); + assert_eq!(deque_store.get_at(&storage, 4), Ok(5)); + assert_eq!(deque_store.get_at(&storage, 3), Ok(4)); + assert_eq!(deque_store.get_at(&storage, 2), Ok(3)); + assert_eq!(deque_store.get_at(&storage, 1), Ok(2)); + assert_eq!(deque_store.get_at(&storage, 0), Ok(1)); + + assert_eq!(deque_store.remove(&mut storage, 6), Ok(7)); + assert_eq!(deque_store.get_at(&storage, 5), Ok(6)); + assert_eq!(deque_store.get_at(&storage, 4), Ok(5)); + assert_eq!(deque_store.get_at(&storage, 3), Ok(4)); + assert_eq!(deque_store.get_at(&storage, 2), Ok(3)); + assert_eq!(deque_store.get_at(&storage, 1), Ok(2)); + assert_eq!(deque_store.get_at(&storage, 0), Ok(1)); + + assert_eq!(deque_store.remove(&mut storage, 3), Ok(4)); + assert_eq!(deque_store.get_at(&storage, 4), Ok(6)); + assert_eq!(deque_store.get_at(&storage, 3), Ok(5)); + assert_eq!(deque_store.get_at(&storage, 2), Ok(3)); + assert_eq!(deque_store.get_at(&storage, 1), Ok(2)); + assert_eq!(deque_store.get_at(&storage, 0), Ok(1)); + + assert_eq!(deque_store.remove(&mut storage, 1), Ok(2)); + assert_eq!(deque_store.get_at(&storage, 3), Ok(6)); + assert_eq!(deque_store.get_at(&storage, 2), Ok(5)); + assert_eq!(deque_store.get_at(&storage, 1), Ok(3)); + assert_eq!(deque_store.get_at(&storage, 0), Ok(1)); + + assert_eq!(deque_store.remove(&mut storage, 2), Ok(5)); + assert_eq!(deque_store.get_at(&storage, 2), Ok(6)); + assert_eq!(deque_store.get_at(&storage, 1), Ok(3)); + assert_eq!(deque_store.get_at(&storage, 0), Ok(1)); + + assert_eq!(deque_store.remove(&mut storage, 1), Ok(3)); + assert_eq!(deque_store.get_at(&storage, 1), Ok(6)); + assert_eq!(deque_store.get_at(&storage, 0), Ok(1)); + + assert_eq!(deque_store.remove(&mut storage, 1), Ok(6)); + assert_eq!(deque_store.get_at(&storage, 0), Ok(1)); + + assert_eq!(deque_store.remove(&mut storage, 0), Ok(1)); + + assert!(deque_store.remove(&mut storage, 0).is_err()); + Ok(()) + } + + #[test] + fn test_paging() -> StdResult<()> { + let mut storage = MockStorage::new(); + let append_store: AppendStore = AppendStore::new(b"test"); + + let page_size: u32 = 5; + let total_items: u32 = 50; + + for i in 0..total_items { + append_store.push(&mut storage, &i)?; + } + + for i in 0..((total_items / page_size) - 1) { + let start_page = i; + + let values = append_store.paging(&storage, start_page, page_size)?; + + for (index, value) in values.iter().enumerate() { + assert_eq!(value, &(page_size * start_page + index as u32)) + } + } + + Ok(()) + } } diff --git a/packages/storage/src/deque_store.rs b/packages/storage/src/deque_store.rs index d107fc1..f9c7a08 100644 --- a/packages/storage/src/deque_store.rs +++ b/packages/storage/src/deque_store.rs @@ -4,7 +4,10 @@ //! This is achieved by storing each item in a separate storage entry. //! A special key is reserved for storing the length of the collection so far. //! Another special key is reserved for storing the offset of the collection. -use std::{convert::TryInto, marker::PhantomData}; +use std::any::type_name; +use std::convert::TryInto; +use std::marker::PhantomData; +use std::sync::Mutex; use serde::{de::DeserializeOwned, Serialize}; @@ -14,198 +17,194 @@ use secret_toolkit_serialization::{Bincode2, Serde}; const LEN_KEY: &[u8] = b"len"; const OFFSET_KEY: &[u8] = b"off"; -// Mutable deque_store -/// A type allowing both reads from and writes to the deque store at a given storage location. -pub struct DequeStoreMut<'a, T, Ser = Bincode2> +pub struct DequeStore<'a, T, Ser = Bincode2> where T: Serialize + DeserializeOwned, Ser: Serde, { - storage: &'a mut dyn Storage, - item_type: PhantomData<*const T>, - serialization_type: PhantomData<*const Ser>, - len: u32, - off: u32, + /// prefix of the newly constructed Storage + namespace: &'a [u8], + /// needed if any suffixes were added to the original namespace. + /// therefore it is not necessarily same as the namespace. + prefix: Option>, + length: Mutex>, + offset: Mutex>, + item_type: PhantomData, + serialization_type: PhantomData, } -impl<'a, T> DequeStoreMut<'a, T, Bincode2> -where - T: Serialize + DeserializeOwned, -{ - /// Try to use the provided storage as an DequeStore. If it doesn't seem to be one, then - /// initialize it as one. - /// - /// Returns Err if the contents of the storage can not be parsed. - pub fn attach_or_create(storage: &'a mut dyn Storage) -> StdResult { - DequeStoreMut::attach_or_create_with_serialization(storage, Bincode2) +impl<'a, 'b, T: Serialize + DeserializeOwned, Ser: Serde> DequeStore<'a, T, Ser> { + /// constructor + pub const fn new(prefix: &'a [u8]) -> Self { + Self { + namespace: prefix, + prefix: None, + length: Mutex::new(None), + offset: Mutex::new(None), + item_type: PhantomData, + serialization_type: PhantomData, + } } - - /// Try to use the provided storage as an DequeStore. - /// - /// Returns None if the provided storage doesn't seem like an DequeStore. - /// Returns Err if the contents of the storage can not be parsed. - pub fn attach(storage: &'a mut dyn Storage) -> Option> { - DequeStoreMut::attach_with_serialization(storage, Bincode2) + /// This is used to produce a new DequeStorage. This can be used when you want to associate an AppendListStorage to each user + /// and you still get to define the DequeStorage as a static constant + pub fn add_suffix(&self, suffix: &[u8]) -> Self { + let prefix = if let Some(prefix) = &self.prefix { + [prefix.clone(), suffix.to_vec()].concat() + } else { + [self.namespace.to_vec(), suffix.to_vec()].concat() + }; + Self { + namespace: self.namespace, + prefix: Some(prefix), + length: Mutex::new(None), + offset: Mutex::new(None), + item_type: self.item_type, + serialization_type: self.serialization_type, + } } } -impl<'a, T, Ser> DequeStoreMut<'a, T, Ser> -where - T: Serialize + DeserializeOwned, - Ser: Serde, -{ - /// Try to use the provided storage as an DequeStore. If it doesn't seem to be one, then - /// initialize it as one. This method allows choosing the serialization format you want to use. - /// - /// Returns Err if the contents of the storage can not be parsed. - pub fn attach_or_create_with_serialization( - storage: &'a mut dyn Storage, - _ser: Ser, - ) -> StdResult { - if let (Some(len_vec), Some(off_vec)) = (storage.get(LEN_KEY), (storage.get(OFFSET_KEY))) { - Self::new(storage, &len_vec, &off_vec) - } else { - let len_vec = 0_u32.to_be_bytes(); - storage.set(LEN_KEY, &len_vec); - let off_vec = 0_u32.to_be_bytes(); - storage.set(OFFSET_KEY, &off_vec); - Self::new(storage, &len_vec, &off_vec) +impl<'a, T: Serialize + DeserializeOwned, Ser: Serde> DequeStore<'a, T, Ser> { + /// gets the length from storage, and otherwise sets it to 0 + pub fn get_len(&self, storage: &S) -> StdResult { + let mut may_len = self.length.lock().unwrap(); + match *may_len { + Some(len) => Ok(len), + None => match self._get_u32(storage, LEN_KEY) { + Ok(len) => { + *may_len = Some(len); + Ok(len) + } + Err(e) => Err(e), + }, } } - - /// Try to use the provided storage as an DequeStore. - /// This method allows choosing the serialization format you want to use. - /// - /// Returns None if the provided storage doesn't seem like an DequeStore. - /// Returns Err if the contents of the storage can not be parsed. - pub fn attach_with_serialization( - storage: &'a mut dyn Storage, - _ser: Ser, - ) -> Option> { - let len_vec = storage.get(LEN_KEY)?; - let off_vec = storage.get(OFFSET_KEY)?; - Some(Self::new(storage, &len_vec, &off_vec)) - } - - fn new(storage: &'a mut dyn Storage, len_vec: &[u8], off_vec: &[u8]) -> StdResult { - let len_array = len_vec - .try_into() - .map_err(|err| StdError::parse_err("u32", err))?; - let len = u32::from_be_bytes(len_array); - let off_array = off_vec - .try_into() - .map_err(|err| StdError::parse_err("u32", err))?; - let off = u32::from_be_bytes(off_array); - - Ok(Self { - storage, - item_type: PhantomData, - serialization_type: PhantomData, - len, - off, - }) + /// gets the offset from storage, and otherwise sets it to 0 + pub fn get_off(&self, storage: &S) -> StdResult { + let mut may_off = self.offset.lock().unwrap(); + match *may_off { + Some(len) => Ok(len), + None => match self._get_u32(storage, OFFSET_KEY) { + Ok(len) => { + *may_off = Some(len); + Ok(len) + } + Err(e) => Err(e), + }, + } } - - pub fn len(&self) -> u32 { - self.len + /// gets offset or length + fn _get_u32(&self, storage: &S, key: &[u8]) -> StdResult { + let num_key = [self.as_slice(), key].concat(); + if let Some(num_vec) = storage.get(&num_key) { + let num_bytes = num_vec + .as_slice() + .try_into() + .map_err(|err| StdError::parse_err("u32", err))?; + let num = u32::from_be_bytes(num_bytes); + Ok(num) + } else { + Ok(0) + } } - - pub fn is_empty(&self) -> bool { - self.len == 0 + /// checks if the collection has any elements + pub fn is_empty(&self, storage: &S) -> StdResult { + Ok(self.get_len(storage)? == 0) } - - pub fn storage(&mut self) -> &mut dyn Storage { - self.storage + /// gets the element at pos if within bounds + pub fn get_at(&self, storage: &S, pos: u32) -> StdResult { + let len = self.get_len(storage)?; + if pos >= len { + return Err(StdError::generic_err("DequeStore access out of bounds")); + } + self.get_at_unchecked(storage, pos) } - - pub fn readonly_storage(&self) -> &dyn Storage { - self.storage + /// tries to get the element at pos + fn get_at_unchecked(&self, storage: &S, pos: u32) -> StdResult { + self.load_impl(storage, &self._get_offset_pos(storage, pos)?.to_be_bytes()) } - - /// Return an iterator over the items in the collection - pub fn iter(&self) -> Iter { - self.as_readonly().iter() + /// add the offset to the pos + fn _get_offset_pos(&self, storage: &S, pos: u32) -> StdResult { + let off = self.get_off(storage)?; + Ok(pos.overflowing_add(off).0) } - - /// Get the value stored at a given position. - /// - /// # Errors - /// Will return an error if pos is out of bounds or if an item is not found. - pub fn get_at(&self, pos: u32) -> StdResult { - self.as_readonly().get_at(pos) + /// Set the length of the collection + fn set_len(&self, storage: &mut S, len: u32) { + let mut may_len = self.length.lock().unwrap(); + *may_len = Some(len); + self._set_u32(storage, LEN_KEY, len) } - - fn get_at_unchecked(&self, pos: u32) -> StdResult { - self.as_readonly().get_at_unchecked(pos) + /// Set the offset of the collection + fn set_off(&self, storage: &mut S, off: u32) { + let mut may_off = self.offset.lock().unwrap(); + *may_off = Some(off); + self._set_u32(storage, OFFSET_KEY, off) } - - /// Set the value of the item stored at a given position. - /// - /// # Errors - /// Will return an error if the position is out of bounds - pub fn set_at(&mut self, pos: u32, item: &T) -> StdResult<()> { - if pos >= self.len { - return Err(StdError::generic_err("DequeStorage access out of bounds")); - } - self.set_at_unchecked(pos, item) + /// Set the length or offset of the collection + fn _set_u32(&self, storage: &mut S, key: &[u8], num: u32) { + let num_key = [self.as_slice(), key].concat(); + storage.set(&num_key, &num.to_be_bytes()); } - - fn set_at_unchecked(&mut self, pos: u32, item: &T) -> StdResult<()> { - let serialized = Ser::serialize(item)?; - self.storage.set( - &(pos.overflowing_add(self.off).0).to_be_bytes(), - &serialized, - ); - Ok(()) + /// Clear the collection + pub fn clear(&self, storage: &mut S) { + self.set_len(storage, 0); + self.set_off(storage, 0); + } + /// Replaces data at a position within bounds + pub fn set_at(&self, storage: &mut S, pos: u32, item: &T) -> StdResult<()> { + let len = self.get_len(storage)?; + if pos >= len { + return Err(StdError::generic_err("DequeStore access out of bounds")); + } + self.set_at_unchecked(storage, pos, item) } - - /// Append an item to the end of the collection. - /// - /// This operation has a constant cost. - pub fn push_back(&mut self, item: &T) -> StdResult<()> { - self.set_at_unchecked(self.len, item)?; - self.set_length(self.len + 1); + /// Sets data at a given index + fn set_at_unchecked(&self, storage: &mut S, pos: u32, item: &T) -> StdResult<()> { + self.save_impl( + storage, + &self._get_offset_pos(storage, pos)?.to_be_bytes(), + item, + ) + } + /// Pushes an item to the back + pub fn push_back(&self, storage: &mut S, item: &T) -> StdResult<()> { + let len = self.get_len(storage)?; + self.set_at_unchecked(storage, len, item)?; + self.set_len(storage, len + 1); Ok(()) } - - /// Add an item to the begining of the collection. - /// - /// This operation has a constant cost. - pub fn push_front(&mut self, item: &T) -> StdResult<()> { - self.set_offset(self.off.overflowing_sub(1).0); - self.set_at_unchecked(0, item)?; - self.set_length(self.len + 1); + /// Pushes an item to the front + pub fn push_front(&self, storage: &mut S, item: &T) -> StdResult<()> { + let off = self.get_off(storage)?; + let len = self.get_len(storage)?; + self.set_off(storage, off.overflowing_sub(1).0); + self.set_at_unchecked(storage, 0, item)?; + self.set_len(storage, len + 1); Ok(()) } - - /// Pop the last item off the collection - /// - /// This operation has a constant cost. - pub fn pop_back(&mut self) -> StdResult { - if let Some(len) = self.len.checked_sub(1) { - let item = self.get_at_unchecked(len); - self.set_length(len); + /// Pops an item from the back + pub fn pop_back(&self, storage: &mut S) -> StdResult { + if let Some(len) = self.get_len(storage)?.checked_sub(1) { + let item = self.get_at_unchecked(storage, len); + self.set_len(storage, len); item } else { Err(StdError::generic_err("Can not pop from empty DequeStore")) } } - - /// Pop the first item off the collection - /// - /// This operation has a constant cost. - pub fn pop_front(&mut self) -> StdResult { - if let Some(len) = self.len.checked_sub(1) { - let item = self.get_at_unchecked(0); - self.set_length(len); - self.set_offset(self.off.overflowing_add(1).0); + /// Pops an item from the front + pub fn pop_front(&self, storage: &mut S) -> StdResult { + if let Some(len) = self.get_len(storage)?.checked_sub(1) { + let off = self.get_off(storage)?; + let item = self.get_at_unchecked(storage, 0); + self.set_len(storage, len); + self.set_off(storage, off.overflowing_add(1).0); item } else { Err(StdError::generic_err("Can not pop from empty DequeStore")) } } - /// Remove an element from the collection at the specified position. /// /// Removing an element from the head (first) or tail (last) has a constant cost. @@ -215,225 +214,137 @@ where /// /// Removing an element from the middle of the collection /// has the worst runtime and gas cost. - pub fn remove(&mut self, pos: u32) -> StdResult { - if pos >= self.len { + pub fn remove(&self, storage: &mut S, pos: u32) -> StdResult { + let off = self.get_off(storage)?; + let len = self.get_len(storage)?; + if pos >= len { return Err(StdError::generic_err("DequeStorage access out of bounds")); } - let item = self.get_at_unchecked(pos); - let to_tail = self.len - pos; + let item = self.get_at_unchecked(storage, pos); + let to_tail = len - pos; if to_tail < pos { // closer to the tail - for i in pos..self.len - 1 { - let element_to_shift = self.get_at_unchecked(i + 1)?; - self.set_at_unchecked(i, &element_to_shift)?; + for i in pos..(len - 1) { + let element_to_shift = self.get_at_unchecked(storage, i + 1)?; + self.set_at_unchecked(storage, i, &element_to_shift)?; } } else { // closer to the head for i in (0..pos).rev() { - let element_to_shift = self.get_at_unchecked(i)?; - self.set_at_unchecked(i + 1, &element_to_shift)?; + let element_to_shift = self.get_at_unchecked(storage, i)?; + self.set_at_unchecked(storage, i + 1, &element_to_shift)?; } - self.set_offset(self.off.overflowing_add(1).0); + self.set_off(storage, off.overflowing_add(1).0); } - self.set_length(self.len - 1); + self.set_len(storage, len - 1); item } - - /// Clear the collection - pub fn clear(&mut self) { - self.set_length(0); - self.set_offset(0); - } - - /// Set the length of the collection - fn set_length(&mut self, len: u32) { - self.storage.set(LEN_KEY, &len.to_be_bytes()); - self.len = len; + /// Returns a readonly iterator + pub fn iter(&self, storage: &'a S) -> StdResult> { + let len = self.get_len(storage)?; + let iter = DequeStoreIter::new(self, storage, 0, len); + Ok(iter) } - - /// Set the offset of the collection - fn set_offset(&mut self, off: u32) { - self.storage.set(OFFSET_KEY, &off.to_be_bytes()); - self.off = off; + /// does paging with the given parameters + pub fn paging(&self, storage: &S, start_page: u32, size: u32) -> StdResult> { + self.iter(storage)? + .skip((start_page as usize) * (size as usize)) + .take(size as usize) + .collect() } +} - /// Gain access to the implementation of the immutable methods - fn as_readonly(&self) -> DequeStore { - DequeStore { - storage: self.storage, - item_type: self.item_type, - serialization_type: self.serialization_type, - len: self.len, - off: self.off, +impl<'a, T: Serialize + DeserializeOwned, Ser: Serde> DequeStore<'a, T, Ser> { + fn as_slice(&self) -> &[u8] { + if let Some(prefix) = &self.prefix { + prefix + } else { + self.namespace } } -} -// Readonly deque-store - -/// A type allowing only reads from an deque store. useful in the context_, u8 of queries. -pub struct DequeStore<'a, T, Ser = Bincode2> -where - T: Serialize + DeserializeOwned, - Ser: Serde, -{ - storage: &'a dyn Storage, - item_type: PhantomData<*const T>, - serialization_type: PhantomData<*const Ser>, - len: u32, - off: u32, -} - -impl<'a, T> DequeStore<'a, T, Bincode2> -where - T: Serialize + DeserializeOwned, -{ - /// Try to use the provided storage as an DequeStore. + /// Returns StdResult from retrieving the item with the specified key. Returns a + /// StdError::NotFound if there is no item with that key /// - /// Returns None if the provided storage doesn't seem like an DequeStore. - /// Returns Err if the contents of the storage can not be parsed. - pub fn attach(storage: &'a dyn Storage) -> Option> { - DequeStore::attach_with_serialization(storage, Bincode2) - } -} - -impl<'a, T, Ser> DequeStore<'a, T, Ser> -where - T: Serialize + DeserializeOwned, - Ser: Serde, -{ - /// Try to use the provided storage as an DequeStore. - /// This method allows choosing the serialization format you want to use. + /// # Arguments /// - /// Returns None if the provided storage doesn't seem like an DequeStore. - /// Returns Err if the contents of the storage can not be parsed. - pub fn attach_with_serialization( - storage: &'a dyn Storage, - _ser: Ser, - ) -> Option> { - let len_vec = storage.get(LEN_KEY)?; - let off_vec = storage.get(OFFSET_KEY)?; - Some(DequeStore::new(storage, len_vec, off_vec)) - } - - fn new(storage: &'a dyn Storage, len_vec: Vec, off_vec: Vec) -> StdResult { - let len_array = len_vec - .as_slice() - .try_into() - .map_err(|err| StdError::parse_err("u32", err))?; - let len = u32::from_be_bytes(len_array); - let off_array = off_vec - .as_slice() - .try_into() - .map_err(|err| StdError::parse_err("u32", err))?; - let off = u32::from_be_bytes(off_array); - - Ok(Self { - storage, - item_type: PhantomData, - serialization_type: PhantomData, - len, - off, - }) - } - - pub fn len(&self) -> u32 { - self.len - } - - pub fn is_empty(&self) -> bool { - self.len == 0 - } - - pub fn readonly_storage(&self) -> &dyn Storage { - self.storage - } - - /// Return an iterator over the items in the collection - pub fn iter(&self) -> Iter<'a, T, Ser> { - Iter { - storage: DequeStore::clone(self), - start: 0_u32, - end: self.len, - } - } - - /// Get the value stored at a given position. + /// * `storage` - a reference to the storage this item is in + /// * `key` - a byte slice representing the key to access the stored item + fn load_impl(&self, storage: &S, key: &[u8]) -> StdResult { + let prefixed_key = [self.as_slice(), key].concat(); + Ser::deserialize( + &storage + .get(&prefixed_key) + .ok_or(StdError::not_found(type_name::()))?, + ) + } + + /// Returns StdResult<()> resulting from saving an item to storage /// - /// # Errors - /// Will return an error if pos is out of bounds or if an item is not found. - pub fn get_at(&self, pos: u32) -> StdResult { - if pos >= self.len { - return Err(StdError::generic_err("DequeStorage access out of bounds")); - } - self.get_at_unchecked(pos) + /// # Arguments + /// + /// * `storage` - a mutable reference to the storage this item should go to + /// * `key` - a byte slice representing the key to access the stored item + /// * `value` - a reference to the item to store + fn save_impl(&self, storage: &mut S, key: &[u8], value: &T) -> StdResult<()> { + let prefixed_key = [self.as_slice(), key].concat(); + storage.set(&prefixed_key, &Ser::serialize(value)?); + Ok(()) } +} - fn get_at_unchecked(&self, pos: u32) -> StdResult { - let serialized = self - .storage - .get(&(pos.overflowing_add(self.off).0).to_be_bytes()) - .ok_or_else(|| { - StdError::generic_err(format!( - "No item in DequeStorage at position {}", - pos.overflowing_add(self.off).0 - )) - })?; - Ser::deserialize(&serialized) +impl<'a, T: Serialize + DeserializeOwned, Ser: Serde> Clone for DequeStore<'a, T, Ser> { + fn clone(&self) -> Self { + Self { + namespace: self.namespace.clone(), + prefix: self.prefix.clone(), + length: Mutex::new(None), + offset: Mutex::new(None), + item_type: self.item_type.clone(), + serialization_type: self.serialization_type.clone(), + } } } -impl<'a, T, Ser> IntoIterator for DequeStore<'a, T, Ser> +/// An iterator over the contents of the deque store. +pub struct DequeStoreIter<'a, T, S, Ser> where T: Serialize + DeserializeOwned, + S: Storage, Ser: Serde, { - type Item = StdResult; - type IntoIter = Iter<'a, T, Ser>; - - fn into_iter(self) -> Iter<'a, T, Ser> { - let end = self.len; - Iter { - storage: self, - start: 0_u32, - end, - } - } + deque_store: &'a DequeStore<'a, T, Ser>, + storage: &'a S, + start: u32, + end: u32, } -// Manual `Clone` implementation because the default one tries to clone the Storage?? -impl<'a, T, Ser> Clone for DequeStore<'a, T, Ser> +impl<'a, T, S, Ser> DequeStoreIter<'a, T, S, Ser> where T: Serialize + DeserializeOwned, + S: Storage, Ser: Serde, { - fn clone(&self) -> Self { + /// constructor + pub fn new( + deque_store: &'a DequeStore<'a, T, Ser>, + storage: &'a S, + start: u32, + end: u32, + ) -> Self { Self { - storage: self.storage, - item_type: self.item_type, - serialization_type: self.serialization_type, - len: self.len, - off: self.off, + deque_store, + storage, + start, + end, } } } -// Owning iterator - -/// An iterator over the contents of the deque store. -pub struct Iter<'a, T, Ser> -where - T: Serialize + DeserializeOwned, - Ser: Serde, -{ - storage: DequeStore<'a, T, Ser>, - start: u32, - end: u32, -} - -impl<'a, T, Ser> Iterator for Iter<'a, T, Ser> +impl<'a, T, S, Ser> Iterator for DequeStoreIter<'a, T, S, Ser> where T: Serialize + DeserializeOwned, + S: Storage, Ser: Serde, { type Item = StdResult; @@ -442,7 +353,7 @@ where if self.start >= self.end { return None; } - let item = self.storage.get_at(self.start); + let item = self.deque_store.get_at(self.storage, self.start); self.start += 1; Some(item) } @@ -465,9 +376,10 @@ where } } -impl<'a, T, Ser> DoubleEndedIterator for Iter<'a, T, Ser> +impl<'a, T, S, Ser> DoubleEndedIterator for DequeStoreIter<'a, T, S, Ser> where T: Serialize + DeserializeOwned, + S: Storage, Ser: Serde, { fn next_back(&mut self) -> Option { @@ -475,7 +387,7 @@ where return None; } self.end -= 1; - let item = self.storage.get_at(self.end); + let item = self.deque_store.get_at(self.storage, self.end); Some(item) } @@ -492,9 +404,10 @@ where } // This enables writing `deque_store.iter().skip(n).rev()` -impl<'a, T, Ser> ExactSizeIterator for Iter<'a, T, Ser> +impl<'a, T, S, Ser> ExactSizeIterator for DequeStoreIter<'a, T, S, Ser> where T: Serialize + DeserializeOwned, + S: Storage, Ser: Serde, { } @@ -510,114 +423,114 @@ mod tests { #[test] fn test_pushs_pops() -> StdResult<()> { let mut storage = MockStorage::new(); - let mut deque_store = DequeStoreMut::attach_or_create(&mut storage)?; - deque_store.push_front(&4)?; - deque_store.push_back(&5)?; - deque_store.push_front(&3)?; - deque_store.push_back(&6)?; - deque_store.push_front(&2)?; - deque_store.push_back(&7)?; - deque_store.push_front(&1)?; - deque_store.push_back(&8)?; - - assert_eq!(deque_store.pop_front(), Ok(1)); - assert_eq!(deque_store.pop_back(), Ok(8)); - assert_eq!(deque_store.pop_front(), Ok(2)); - assert_eq!(deque_store.pop_back(), Ok(7)); - assert_eq!(deque_store.pop_front(), Ok(3)); - assert_eq!(deque_store.pop_back(), Ok(6)); - assert_eq!(deque_store.pop_front(), Ok(4)); - assert_eq!(deque_store.pop_back(), Ok(5)); - assert!(deque_store.pop_back().is_err()); + let deque_store: DequeStore = DequeStore::new(b"test"); + deque_store.push_front(&mut storage, &4)?; + deque_store.push_back(&mut storage, &5)?; + deque_store.push_front(&mut storage, &3)?; + deque_store.push_back(&mut storage, &6)?; + deque_store.push_front(&mut storage, &2)?; + deque_store.push_back(&mut storage, &7)?; + deque_store.push_front(&mut storage, &1)?; + deque_store.push_back(&mut storage, &8)?; + + assert_eq!(deque_store.pop_front(&mut storage), Ok(1)); + assert_eq!(deque_store.pop_back(&mut storage), Ok(8)); + assert_eq!(deque_store.pop_front(&mut storage), Ok(2)); + assert_eq!(deque_store.pop_back(&mut storage), Ok(7)); + assert_eq!(deque_store.pop_front(&mut storage), Ok(3)); + assert_eq!(deque_store.pop_back(&mut storage), Ok(6)); + assert_eq!(deque_store.pop_front(&mut storage), Ok(4)); + assert_eq!(deque_store.pop_back(&mut storage), Ok(5)); + assert!(deque_store.pop_back(&mut storage).is_err()); Ok(()) } #[test] fn test_removes() -> StdResult<()> { let mut storage = MockStorage::new(); - let mut deque_store = DequeStoreMut::attach_or_create(&mut storage)?; - deque_store.push_front(&2)?; - deque_store.push_back(&3)?; - deque_store.push_back(&4)?; - deque_store.push_back(&5)?; - deque_store.push_back(&6)?; - deque_store.push_front(&1)?; - deque_store.push_back(&7)?; - deque_store.push_back(&8)?; - - assert!(deque_store.remove(8).is_err()); - assert!(deque_store.remove(9).is_err()); - - assert_eq!(deque_store.remove(7), Ok(8)); - assert_eq!(deque_store.get_at(6), Ok(7)); - assert_eq!(deque_store.get_at(5), Ok(6)); - assert_eq!(deque_store.get_at(4), Ok(5)); - assert_eq!(deque_store.get_at(3), Ok(4)); - assert_eq!(deque_store.get_at(2), Ok(3)); - assert_eq!(deque_store.get_at(1), Ok(2)); - assert_eq!(deque_store.get_at(0), Ok(1)); - - assert_eq!(deque_store.remove(6), Ok(7)); - assert_eq!(deque_store.get_at(5), Ok(6)); - assert_eq!(deque_store.get_at(4), Ok(5)); - assert_eq!(deque_store.get_at(3), Ok(4)); - assert_eq!(deque_store.get_at(2), Ok(3)); - assert_eq!(deque_store.get_at(1), Ok(2)); - assert_eq!(deque_store.get_at(0), Ok(1)); - - assert_eq!(deque_store.remove(3), Ok(4)); - assert_eq!(deque_store.get_at(4), Ok(6)); - assert_eq!(deque_store.get_at(3), Ok(5)); - assert_eq!(deque_store.get_at(2), Ok(3)); - assert_eq!(deque_store.get_at(1), Ok(2)); - assert_eq!(deque_store.get_at(0), Ok(1)); - - assert_eq!(deque_store.remove(1), Ok(2)); - assert_eq!(deque_store.get_at(3), Ok(6)); - assert_eq!(deque_store.get_at(2), Ok(5)); - assert_eq!(deque_store.get_at(1), Ok(3)); - assert_eq!(deque_store.get_at(0), Ok(1)); - - assert_eq!(deque_store.remove(2), Ok(5)); - assert_eq!(deque_store.get_at(2), Ok(6)); - assert_eq!(deque_store.get_at(1), Ok(3)); - assert_eq!(deque_store.get_at(0), Ok(1)); - - assert_eq!(deque_store.remove(1), Ok(3)); - assert_eq!(deque_store.get_at(1), Ok(6)); - assert_eq!(deque_store.get_at(0), Ok(1)); - - assert_eq!(deque_store.remove(1), Ok(6)); - assert_eq!(deque_store.get_at(0), Ok(1)); - - assert_eq!(deque_store.remove(0), Ok(1)); - - assert!(deque_store.remove(0).is_err()); + let deque_store: DequeStore = DequeStore::new(b"test"); + deque_store.push_front(&mut storage, &2)?; + deque_store.push_back(&mut storage, &3)?; + deque_store.push_back(&mut storage, &4)?; + deque_store.push_back(&mut storage, &5)?; + deque_store.push_back(&mut storage, &6)?; + deque_store.push_front(&mut storage, &1)?; + deque_store.push_back(&mut storage, &7)?; + deque_store.push_back(&mut storage, &8)?; + + assert!(deque_store.remove(&mut storage, 8).is_err()); + assert!(deque_store.remove(&mut storage, 9).is_err()); + + assert_eq!(deque_store.remove(&mut storage, 7), Ok(8)); + assert_eq!(deque_store.get_at(&storage, 6), Ok(7)); + assert_eq!(deque_store.get_at(&storage, 5), Ok(6)); + assert_eq!(deque_store.get_at(&storage, 4), Ok(5)); + assert_eq!(deque_store.get_at(&storage, 3), Ok(4)); + assert_eq!(deque_store.get_at(&storage, 2), Ok(3)); + assert_eq!(deque_store.get_at(&storage, 1), Ok(2)); + assert_eq!(deque_store.get_at(&storage, 0), Ok(1)); + + assert_eq!(deque_store.remove(&mut storage, 6), Ok(7)); + assert_eq!(deque_store.get_at(&storage, 5), Ok(6)); + assert_eq!(deque_store.get_at(&storage, 4), Ok(5)); + assert_eq!(deque_store.get_at(&storage, 3), Ok(4)); + assert_eq!(deque_store.get_at(&storage, 2), Ok(3)); + assert_eq!(deque_store.get_at(&storage, 1), Ok(2)); + assert_eq!(deque_store.get_at(&storage, 0), Ok(1)); + + assert_eq!(deque_store.remove(&mut storage, 3), Ok(4)); + assert_eq!(deque_store.get_at(&storage, 4), Ok(6)); + assert_eq!(deque_store.get_at(&storage, 3), Ok(5)); + assert_eq!(deque_store.get_at(&storage, 2), Ok(3)); + assert_eq!(deque_store.get_at(&storage, 1), Ok(2)); + assert_eq!(deque_store.get_at(&storage, 0), Ok(1)); + + assert_eq!(deque_store.remove(&mut storage, 1), Ok(2)); + assert_eq!(deque_store.get_at(&storage, 3), Ok(6)); + assert_eq!(deque_store.get_at(&storage, 2), Ok(5)); + assert_eq!(deque_store.get_at(&storage, 1), Ok(3)); + assert_eq!(deque_store.get_at(&storage, 0), Ok(1)); + + assert_eq!(deque_store.remove(&mut storage, 2), Ok(5)); + assert_eq!(deque_store.get_at(&storage, 2), Ok(6)); + assert_eq!(deque_store.get_at(&storage, 1), Ok(3)); + assert_eq!(deque_store.get_at(&storage, 0), Ok(1)); + + assert_eq!(deque_store.remove(&mut storage, 1), Ok(3)); + assert_eq!(deque_store.get_at(&storage, 1), Ok(6)); + assert_eq!(deque_store.get_at(&storage, 0), Ok(1)); + + assert_eq!(deque_store.remove(&mut storage, 1), Ok(6)); + assert_eq!(deque_store.get_at(&storage, 0), Ok(1)); + + assert_eq!(deque_store.remove(&mut storage, 0), Ok(1)); + + assert!(deque_store.remove(&mut storage, 0).is_err()); Ok(()) } #[test] fn test_iterator() -> StdResult<()> { let mut storage = MockStorage::new(); - let mut deque_store = DequeStoreMut::attach_or_create(&mut storage)?; + let deque_store: DequeStore = DequeStore::new(b"test"); - deque_store.push_front(&2143)?; - deque_store.push_back(&3333)?; - deque_store.push_back(&3412)?; - deque_store.push_front(&1234)?; - deque_store.push_back(&4321)?; + deque_store.push_front(&mut storage, &2143)?; + deque_store.push_back(&mut storage, &3333)?; + deque_store.push_back(&mut storage, &3412)?; + deque_store.push_front(&mut storage, &1234)?; + deque_store.push_back(&mut storage, &4321)?; - deque_store.remove(2)?; + deque_store.remove(&mut storage, 2)?; // iterate twice to make sure nothing changed - let mut iter = deque_store.iter(); + let mut iter = deque_store.iter(&storage)?; assert_eq!(iter.next(), Some(Ok(1234))); assert_eq!(iter.next(), Some(Ok(2143))); assert_eq!(iter.next(), Some(Ok(3412))); assert_eq!(iter.next(), Some(Ok(4321))); assert_eq!(iter.next(), None); - let mut iter = deque_store.iter(); + let mut iter = deque_store.iter(&storage)?; assert_eq!(iter.next(), Some(Ok(1234))); assert_eq!(iter.next(), Some(Ok(2143))); assert_eq!(iter.next(), Some(Ok(3412))); @@ -625,7 +538,7 @@ mod tests { assert_eq!(iter.next(), None); // make sure our implementation of `nth` doesn't break anything - let mut iter = deque_store.iter().skip(2); + let mut iter = deque_store.iter(&storage)?.skip(2); assert_eq!(iter.next(), Some(Ok(3412))); assert_eq!(iter.next(), Some(Ok(4321))); assert_eq!(iter.next(), None); @@ -636,16 +549,16 @@ mod tests { #[test] fn test_reverse_iterator() -> StdResult<()> { let mut storage = MockStorage::new(); - let mut deque_store = DequeStoreMut::attach_or_create(&mut storage)?; - deque_store.push_front(&2143)?; - deque_store.push_back(&3412)?; - deque_store.push_back(&3333)?; - deque_store.push_front(&1234)?; - deque_store.push_back(&4321)?; + let deque_store: DequeStore = DequeStore::new(b"test"); + deque_store.push_front(&mut storage, &2143)?; + deque_store.push_back(&mut storage, &3412)?; + deque_store.push_back(&mut storage, &3333)?; + deque_store.push_front(&mut storage, &1234)?; + deque_store.push_back(&mut storage, &4321)?; - deque_store.remove(3)?; + deque_store.remove(&mut storage, 3)?; - let mut iter = deque_store.iter().rev(); + let mut iter = deque_store.iter(&storage)?.rev(); assert_eq!(iter.next(), Some(Ok(4321))); assert_eq!(iter.next(), Some(Ok(3412))); assert_eq!(iter.next(), Some(Ok(2143))); @@ -653,7 +566,7 @@ mod tests { assert_eq!(iter.next(), None); // iterate twice to make sure nothing changed - let mut iter = deque_store.iter().rev(); + let mut iter = deque_store.iter(&storage)?.rev(); assert_eq!(iter.next(), Some(Ok(4321))); assert_eq!(iter.next(), Some(Ok(3412))); assert_eq!(iter.next(), Some(Ok(2143))); @@ -661,13 +574,13 @@ mod tests { assert_eq!(iter.next(), None); // make sure our implementation of `nth_back` doesn't break anything - let mut iter = deque_store.iter().rev().skip(2); + let mut iter = deque_store.iter(&storage)?.rev().skip(2); assert_eq!(iter.next(), Some(Ok(2143))); assert_eq!(iter.next(), Some(Ok(1234))); assert_eq!(iter.next(), None); // make sure our implementation of `ExactSizeIterator` works well - let mut iter = deque_store.iter().skip(2).rev(); + let mut iter = deque_store.iter(&storage)?.skip(2).rev(); assert_eq!(iter.next(), Some(Ok(4321))); assert_eq!(iter.next(), Some(Ok(3412))); assert_eq!(iter.next(), None); @@ -675,32 +588,53 @@ mod tests { Ok(()) } - #[test] - fn test_attach_to_wrong_location() { - let mut storage = MockStorage::new(); - assert!(DequeStore::::attach(&storage).is_none()); - assert!(DequeStoreMut::::attach(&mut storage).is_none()); - } - #[test] fn test_serializations() -> StdResult<()> { // Check the default behavior is Bincode2 let mut storage = MockStorage::new(); - let mut deque_store = DequeStoreMut::attach_or_create(&mut storage)?; - deque_store.push_back(&1234)?; + let deque_store: DequeStore = DequeStore::new(b"test"); + deque_store.push_back(&mut storage, &1234)?; - let bytes = deque_store.readonly_storage().get(&0_u32.to_be_bytes()); + let key = [deque_store.as_slice(), &0_u32.to_be_bytes()].concat(); + let bytes = storage.get(&key); assert_eq!(bytes, Some(vec![210, 4, 0, 0])); // Check that overriding the serializer with Json works let mut storage = MockStorage::new(); - let mut deque_store = - DequeStoreMut::attach_or_create_with_serialization(&mut storage, Json)?; - deque_store.push_back(&1234)?; - let bytes = deque_store.readonly_storage().get(&0_u32.to_be_bytes()); + let json_deque_store: DequeStore = DequeStore::new(b"test2"); + json_deque_store.push_back(&mut storage, &1234)?; + + let key = [json_deque_store.as_slice(), &0_u32.to_be_bytes()].concat(); + let bytes = storage.get(&key); assert_eq!(bytes, Some(b"1234".to_vec())); Ok(()) } + + #[test] + fn test_paging() -> StdResult<()> { + let mut storage = MockStorage::new(); + let append_store: DequeStore = DequeStore::new(b"test"); + + let page_size: u32 = 5; + let total_items: u32 = 50; + + for j in 0..total_items { + let i = total_items - j; + append_store.push_front(&mut storage, &i)?; + } + + for i in 0..((total_items / page_size) - 1) { + let start_page = i; + + let values = append_store.paging(&storage, start_page, page_size)?; + + for (index, value) in values.iter().enumerate() { + assert_eq!(value, &(page_size * start_page + index as u32 + 1)) + } + } + + Ok(()) + } } diff --git a/packages/storage/src/item.rs b/packages/storage/src/item.rs new file mode 100644 index 0000000..8c909b1 --- /dev/null +++ b/packages/storage/src/item.rs @@ -0,0 +1,183 @@ +use std::any::type_name; + +use std::marker::PhantomData; + +use cosmwasm_std::{StdError, StdResult, Storage}; +use secret_toolkit_serialization::{Bincode2, Serde}; +use serde::{de::DeserializeOwned, Serialize}; + +/// This storage struct is based on Item from cosmwasm-storage-plus +pub struct Item<'a, T, Ser = Bincode2> +where + T: Serialize + DeserializeOwned, + Ser: Serde, +{ + storage_key: &'a [u8], + item_type: PhantomData, + serialization_type: PhantomData, +} + +impl<'a, T: Serialize + DeserializeOwned, Ser: Serde> Item<'a, T, Ser> { + pub const fn new(key: &'a [u8]) -> Self { + Self { + storage_key: key, + item_type: PhantomData, + serialization_type: PhantomData, + } + } +} + +impl<'a, T, Ser> Item<'a, T, Ser> +where + T: Serialize + DeserializeOwned, + Ser: Serde, +{ + /// save will serialize the model and store, returns an error on serialization issues + pub fn save(&self, storage: &mut S, data: &T) -> StdResult<()> { + self.save_impl(storage, data) + } + + /// userfacing remove function + pub fn remove(&self, storage: &mut S) { + self.remove_impl(storage); + } + + /// load will return an error if no data is set at the given key, or on parse error + pub fn load(&self, storage: &S) -> StdResult { + self.load_impl(storage) + } + + /// may_load will parse the data stored at the key if present, returns `Ok(None)` if no data there. + /// returns an error on issues parsing + pub fn may_load(&self, storage: &S) -> StdResult> { + self.may_load_impl(storage) + } + + /// efficient way to see if any object is currently saved. + pub fn is_empty(&self, storage: &S) -> bool { + match storage.get(self.as_slice()) { + Some(_) => false, + None => true, + } + } + + /// Loads the data, perform the specified action, and store the result + /// in the database. This is shorthand for some common sequences, which may be useful. + /// + /// It assumes, that data was initialized before, and if it doesn't exist, `Err(StdError::NotFound)` + /// is returned. + pub fn update(&self, storage: &mut S, action: A) -> StdResult + where + S: Storage, + A: FnOnce(T) -> StdResult, + { + let input = self.load_impl(storage)?; + let output = action(input)?; + self.save_impl(storage, &output)?; + Ok(output) + } + + /// Returns StdResult from retrieving the item with the specified key. Returns a + /// StdError::NotFound if there is no item with that key + /// + /// # Arguments + /// + /// * `storage` - a reference to the storage this item is in + fn load_impl(&self, storage: &S) -> StdResult { + Ser::deserialize( + &storage + .get(self.as_slice()) + .ok_or(StdError::not_found(type_name::()))?, + ) + } + + /// Returns StdResult> from retrieving the item with the specified key. Returns a + /// None if there is no item with that key + /// + /// # Arguments + /// + /// * `storage` - a reference to the storage this item is in + fn may_load_impl(&self, storage: &S) -> StdResult> { + match storage.get(self.as_slice()) { + Some(value) => Ser::deserialize(&value).map(Some), + None => Ok(None), + } + } + + /// Returns StdResult<()> resulting from saving an item to storage + /// + /// # Arguments + /// + /// * `storage` - a mutable reference to the storage this item should go to + /// * `value` - a reference to the item to store + fn save_impl(&self, storage: &mut S, value: &T) -> StdResult<()> { + storage.set(self.as_slice(), &Ser::serialize(value)?); + Ok(()) + } + + /// Removes an item from storage + /// + /// # Arguments + /// + /// * `storage` - a mutable reference to the storage this item is in + fn remove_impl(&self, storage: &mut S) { + storage.remove(self.as_slice()); + } + + fn as_slice(&self) -> &[u8] { + self.storage_key + } +} + +#[cfg(test)] +mod tests { + use cosmwasm_std::testing::MockStorage; + + use secret_toolkit_serialization::Json; + + use super::*; + + #[test] + fn test_item() -> StdResult<()> { + let mut storage = MockStorage::new(); + let item: Item = Item::new(b"test"); + + assert!(item.is_empty(&storage)); + assert_eq!(item.may_load(&storage)?, None); + assert!(item.load(&storage).is_err()); + item.save(&mut storage, &6)?; + assert!(!item.is_empty(&storage)); + assert_eq!(item.load(&storage)?, 6); + assert_eq!(item.may_load(&storage)?, Some(6)); + item.remove(&mut storage); + assert!(item.is_empty(&storage)); + assert_eq!(item.may_load(&storage)?, None); + assert!(item.load(&storage).is_err()); + + Ok(()) + } + + #[test] + fn test_serializations() -> StdResult<()> { + // Check the default behavior is Bincode2 + let mut storage = MockStorage::new(); + + let item: Item = Item::new(b"test"); + item.save(&mut storage, &1234)?; + + let key = b"test"; + let bytes = storage.get(key); + assert_eq!(bytes, Some(vec![210, 4, 0, 0])); + + // Check that overriding the serializer with Json works + let mut storage = MockStorage::new(); + let json_item: Item = Item::new(b"test2"); + json_item.save(&mut storage, &1234)?; + + let key = b"test2"; + let bytes = storage.get(key); + assert_eq!(bytes, Some(b"1234".to_vec())); + + Ok(()) + } +} diff --git a/packages/storage/src/keymap.rs b/packages/storage/src/keymap.rs new file mode 100644 index 0000000..baeaa9e --- /dev/null +++ b/packages/storage/src/keymap.rs @@ -0,0 +1,1432 @@ +use std::any::type_name; +use std::convert::TryInto; +use std::marker::PhantomData; +use std::sync::Mutex; + +use serde::Deserialize; +use serde::{de::DeserializeOwned, Serialize}; + +use cosmwasm_std::{StdError, StdResult, Storage}; + +use secret_toolkit_serialization::{Bincode2, Serde}; + +const INDEXES: &[u8] = b"indexes"; +const MAP_LENGTH: &[u8] = b"length"; + +const PAGE_SIZE: u32 = 5; + +fn _page_from_position(position: u32) -> u32 { + position / PAGE_SIZE +} + +#[derive(Serialize, Deserialize)] +struct InternalItem +where + T: Serialize + DeserializeOwned, + Ser: Serde, +{ + item: Vec, + index_pos: u32, + item_type: PhantomData, + serialization_type: PhantomData, +} + +impl InternalItem { + fn new(index_pos: u32, item: &T) -> StdResult { + Ok(Self { + item: Ser::serialize(item)?, + index_pos, + item_type: PhantomData, + serialization_type: PhantomData, + }) + } + fn get_item(&self) -> StdResult { + Ser::deserialize(&self.item) + } +} + +pub struct Keymap<'a, K, T, Ser = Bincode2> +where + K: Serialize + DeserializeOwned, + T: Serialize + DeserializeOwned, + Ser: Serde, +{ + /// prefix of the newly constructed Storage + namespace: &'a [u8], + /// needed if any suffixes were added to the original namespace. + /// therefore it is not necessarily same as the namespace. + prefix: Option>, + length: Mutex>, + key_type: PhantomData, + item_type: PhantomData, + serialization_type: PhantomData, +} + +impl<'a, K: Serialize + DeserializeOwned, T: Serialize + DeserializeOwned, Ser: Serde> + Keymap<'a, K, T, Ser> +{ + /// constructor + pub const fn new(prefix: &'a [u8]) -> Self { + Self { + namespace: prefix, + prefix: None, + length: Mutex::new(None), + key_type: PhantomData, + item_type: PhantomData, + serialization_type: PhantomData, + } + } + /// This is used to produce a new Keymap. This can be used when you want to associate an Keymap to each user + /// and you still get to define the Keymap as a static constant + pub fn add_suffix(&self, suffix: &[u8]) -> Self { + let prefix = if let Some(prefix) = &self.prefix { + [prefix.clone(), suffix.to_vec()].concat() + } else { + [self.namespace.to_vec(), suffix.to_vec()].concat() + }; + Self { + namespace: self.namespace, + prefix: Some(prefix), + length: Mutex::new(None), + key_type: self.key_type, + item_type: self.item_type, + serialization_type: self.serialization_type, + } + } +} + +impl<'a, K: Serialize + DeserializeOwned, T: Serialize + DeserializeOwned, Ser: Serde> + Keymap<'a, K, T, Ser> +{ + /// Serialize key + fn serialize_key(&self, key: &K) -> StdResult> { + Ser::serialize(key) + } + /// Deserialize key + fn deserialize_key(&self, key_data: &[u8]) -> StdResult { + Ser::deserialize(key_data) + } + /// get total number of objects saved + pub fn get_len(&self, storage: &S) -> StdResult { + let mut may_len = self.length.lock().unwrap(); + match *may_len { + Some(length) => Ok(length), + None => { + let len_key = [self.as_slice(), MAP_LENGTH].concat(); + if let Some(len_vec) = storage.get(&len_key) { + let len_bytes = len_vec + .as_slice() + .try_into() + .map_err(|err| StdError::parse_err("u32", err))?; + let len = u32::from_be_bytes(len_bytes); + *may_len = Some(len); + Ok(len) + } else { + *may_len = Some(0); + Ok(0) + } + } + } + } + /// checks if the collection has any elements + pub fn is_empty(&self, storage: &S) -> StdResult { + Ok(self.get_len(storage)? == 0) + } + /// set length of the map + fn set_len(&self, storage: &mut S, len: u32) -> StdResult<()> { + let len_key = [self.as_slice(), MAP_LENGTH].concat(); + storage.set(&len_key, &len.to_be_bytes()); + + let mut may_len = self.length.lock().unwrap(); + *may_len = Some(len); + + Ok(()) + } + /// Used to get the indexes stored in the given page number + fn _get_indexes(&self, storage: &S, page: u32) -> StdResult>> { + let indexes_key = [self.as_slice(), INDEXES, page.to_be_bytes().as_slice()].concat(); + let maybe_serialized = storage.get(&indexes_key); + match maybe_serialized { + Some(serialized) => Bincode2::deserialize(&serialized), + None => Ok(vec![]), + } + } + /// Set an indexes page + fn _set_indexes_page( + &self, + storage: &mut S, + page: u32, + indexes: &Vec>, + ) -> StdResult<()> { + let indexes_key = [self.as_slice(), INDEXES, page.to_be_bytes().as_slice()].concat(); + storage.set(&indexes_key, &Bincode2::serialize(indexes)?); + Ok(()) + } + /// user facing get function + pub fn get(&self, storage: &S, key: &K) -> Option { + if let Ok(internal_item) = self._get_from_key(storage, key) { + internal_item.get_item().ok() + } else { + None + } + } + /// internal item get function + fn _get_from_key(&self, storage: &S, key: &K) -> StdResult> { + let key_vec = self.serialize_key(key)?; + self.load_impl(storage, &key_vec) + } + /// user facing remove function + pub fn remove(&self, storage: &mut S, key: &K) -> StdResult<()> { + let key_vec = self.serialize_key(key)?; + let removed_pos = self._get_from_key(storage, key)?.index_pos; + + self.remove_impl(storage, &key_vec); + let page = _page_from_position(removed_pos); + + let mut len = self.get_len(storage)?; + len -= 1; + self.set_len(storage, len)?; + + let mut indexes = self._get_indexes(storage, page)?; + + let pos_in_indexes = (removed_pos % PAGE_SIZE) as usize; + + if indexes[pos_in_indexes] != key_vec { + return Err(StdError::generic_err( + "Tried to remove, but hash not found - should never happen", + )); + } + + // if our object is the last item, then just remove it + if len == 0 || len == removed_pos { + indexes.pop(); + self._set_indexes_page(storage, page, &indexes)?; + return Ok(()); + } + + // max page should use previous_len - 1 which is exactly the current len + let max_page = _page_from_position(len); + if max_page == page { + // last page indexes is the same as indexes + let last_key = indexes.pop().ok_or(StdError::generic_err( + "Last item's key not found - should never happen", + ))?; + // modify last item + let mut last_internal_item = self.load_impl(storage, &last_key)?; + last_internal_item.index_pos = removed_pos; + self.save_impl(storage, &last_key, &last_internal_item)?; + // save to indexes + indexes[pos_in_indexes] = last_key; + self._set_indexes_page(storage, page, &indexes)?; + } else { + let mut last_page_indexes = self._get_indexes(storage, max_page)?; + let last_key = last_page_indexes.pop().ok_or(StdError::generic_err( + "Last item's key not found - should never happen", + ))?; + // modify last item + let mut last_internal_item = self.load_impl(storage, &last_key)?; + last_internal_item.index_pos = removed_pos; + self.save_impl(storage, &last_key, &last_internal_item)?; + // save indexes + indexes[pos_in_indexes] = last_key; + self._set_indexes_page(storage, page, &indexes)?; + self._set_indexes_page(storage, max_page, &last_page_indexes)?; + } + + Ok(()) + } + /// user facing insert function + pub fn insert(&self, storage: &mut S, key: &K, item: &T) -> StdResult<()> { + let key_vec = self.serialize_key(key)?; + match self.may_load_impl(storage, &key_vec)? { + Some(existing_internal_item) => { + // if item already exists + let new_internal_item = InternalItem::new(existing_internal_item.index_pos, item)?; + self.save_impl(storage, &key_vec, &new_internal_item) + } + None => { + // not already saved + let pos = self.get_len(storage)?; + self.set_len(storage, pos + 1)?; + let page = _page_from_position(pos); + // save the item + let internal_item = InternalItem::new(pos, item)?; + self.save_impl(storage, &key_vec, &internal_item)?; + // add index + let mut indexes = self._get_indexes(storage, page)?; + indexes.push(key_vec); + self._set_indexes_page(storage, page, &indexes) + } + } + } + /// user facing method that checks if any item is stored with this key. + pub fn contains(&self, storage: &S, key: &K) -> bool { + match self.serialize_key(key) { + Ok(key_vec) => self.contains_impl(storage, &key_vec), + Err(_) => false, + } + } + /// paginates (key, item) pairs. + pub fn paging( + &self, + storage: &S, + start_page: u32, + size: u32, + ) -> StdResult> { + let start_pos = start_page * size; + let mut end_pos = start_pos + size - 1; + + let max_size = self.get_len(storage)?; + + if max_size == 0 { + return Ok(vec![]); + } + + if start_pos > max_size { + return Err(StdError::NotFound { + kind: "Out of bounds".to_string(), + }); + } else if end_pos > max_size { + end_pos = max_size - 1; + } + self.get_pairs_at_positions(storage, start_pos, end_pos) + } + /// paginates only the keys. More efficient than paginating both items and keys + pub fn paging_keys( + &self, + storage: &S, + start_page: u32, + size: u32, + ) -> StdResult> { + let start_pos = start_page * size; + let mut end_pos = start_pos + size - 1; + + let max_size = self.get_len(storage)?; + + if max_size == 0 { + return Ok(vec![]); + } + + if start_pos > max_size { + return Err(StdError::NotFound { + kind: "Out of bounds".to_string(), + }); + } else if end_pos > max_size { + end_pos = max_size - 1; + } + self.get_keys_at_positions(storage, start_pos, end_pos) + } + /// tries to list keys without checking start/end bounds + fn get_keys_at_positions( + &self, + storage: &S, + start: u32, + end: u32, + ) -> StdResult> { + let start_page = _page_from_position(start); + let end_page = _page_from_position(end); + + let mut res = vec![]; + + for page in start_page..=end_page { + let indexes = self._get_indexes(storage, page)?; + let start_page_pos = if page == start_page { + start % PAGE_SIZE + } else { + 0 + }; + let end_page_pos = if page == end_page { + end % PAGE_SIZE + } else { + PAGE_SIZE - 1 + }; + for i in start_page_pos..=end_page_pos { + let key_vec = &indexes[i as usize]; + let key = self.deserialize_key(key_vec)?; + res.push(key); + } + } + Ok(res) + } + /// tries to list (key, item) pairs without checking start/end bounds + fn get_pairs_at_positions( + &self, + storage: &S, + start: u32, + end: u32, + ) -> StdResult> { + let start_page = _page_from_position(start); + let end_page = _page_from_position(end); + + let mut res = vec![]; + + for page in start_page..=end_page { + let indexes = self._get_indexes(storage, page)?; + let start_page_pos = if page == start_page { + start % PAGE_SIZE + } else { + 0 + }; + let end_page_pos = if page == end_page { + end % PAGE_SIZE + } else { + PAGE_SIZE - 1 + }; + for i in start_page_pos..=end_page_pos { + let key_vec = &indexes[i as usize]; + let key = self.deserialize_key(key_vec)?; + let item = self.load_impl(storage, key_vec)?.get_item()?; + res.push((key, item)); + } + } + Ok(res) + } + /// gets a key from a specific position in indexes + fn get_key_from_pos(&self, storage: &S, pos: u32) -> StdResult { + let page = _page_from_position(pos); + let indexes = self._get_indexes(storage, page)?; + let index = pos % PAGE_SIZE; + let key_vec = &indexes[index as usize]; + self.deserialize_key(key_vec) + } + /// gets a key from a specific position in indexes + fn get_pair_from_pos(&self, storage: &S, pos: u32) -> StdResult<(K, T)> { + let page = _page_from_position(pos); + let indexes = self._get_indexes(storage, page)?; + let index = pos % PAGE_SIZE; + let key_vec = &indexes[index as usize]; + let key = self.deserialize_key(key_vec)?; + let item = self.load_impl(storage, key_vec)?.get_item()?; + Ok((key, item)) + } + /// Returns a readonly iterator only for keys. More efficient than iter(). + pub fn iter_keys(&self, storage: &'a S) -> StdResult> { + let len = self.get_len(storage)?; + let iter = KeyIter::new(self, storage, 0, len); + Ok(iter) + } + /// Returns a readonly iterator for (key-item) pairs + pub fn iter(&self, storage: &'a S) -> StdResult> { + let len = self.get_len(storage)?; + let iter = KeyItemIter::new(self, storage, 0, len); + Ok(iter) + } +} + +impl<'a, K: Serialize + DeserializeOwned, T: Serialize + DeserializeOwned, Ser: Serde> + PrefixedTypedStorage, Ser> for Keymap<'a, K, T, Ser> +{ + fn as_slice(&self) -> &[u8] { + if let Some(prefix) = &self.prefix { + prefix + } else { + self.namespace + } + } +} + +impl<'a, K: Serialize + DeserializeOwned, T: Serialize + DeserializeOwned, Ser: Serde> Clone + for Keymap<'a, K, T, Ser> +{ + fn clone(&self) -> Self { + Self { + namespace: self.namespace.clone(), + prefix: self.prefix.clone(), + length: Mutex::new(None), + key_type: self.key_type.clone(), + item_type: self.item_type.clone(), + serialization_type: self.serialization_type.clone(), + } + } +} + +/// An iterator over the keys of the Keymap. +pub struct KeyIter<'a, K, T, S, Ser> +where + K: Serialize + DeserializeOwned, + T: Serialize + DeserializeOwned, + S: Storage, + Ser: Serde, +{ + keymap: &'a Keymap<'a, K, T, Ser>, + storage: &'a S, + start: u32, + end: u32, + saved_indexes: Option>>, + saved_index_page: Option, + saved_back_indexes: Option>>, + saved_back_index_page: Option, +} + +impl<'a, K, T, S, Ser> KeyIter<'a, K, T, S, Ser> +where + K: Serialize + DeserializeOwned, + T: Serialize + DeserializeOwned, + S: Storage, + Ser: Serde, +{ + /// constructor + pub fn new(keymap: &'a Keymap<'a, K, T, Ser>, storage: &'a S, start: u32, end: u32) -> Self { + Self { + keymap, + storage, + start, + end, + saved_indexes: None, + saved_index_page: None, + saved_back_indexes: None, + saved_back_index_page: None, + } + } +} + +impl<'a, K, T, S, Ser> Iterator for KeyIter<'a, K, T, S, Ser> +where + K: Serialize + DeserializeOwned, + T: Serialize + DeserializeOwned, + S: Storage, + Ser: Serde, +{ + type Item = StdResult; + + fn next(&mut self) -> Option { + if self.start >= self.end { + return None; + } + let res: Option; + if let (Some(page), Some(indexes)) = (&self.saved_index_page, &self.saved_indexes) { + let current_page = _page_from_position(self.start); + if *page == current_page { + let current_idx = (self.start % PAGE_SIZE) as usize; + if current_idx + 1 > indexes.len() { + res = None; + } else { + let key_vec = &indexes[current_idx]; + match self.keymap.deserialize_key(key_vec) { + Ok(key) => { + res = Some(Ok(key)); + } + Err(e) => { + res = Some(Err(e)); + } + } + } + } else { + match self.keymap._get_indexes(self.storage, current_page) { + Ok(new_indexes) => { + let current_idx = (self.start % PAGE_SIZE) as usize; + if current_idx + 1 > new_indexes.len() { + res = None; + } else { + let key_vec = &new_indexes[current_idx]; + match self.keymap.deserialize_key(key_vec) { + Ok(key) => { + res = Some(Ok(key)); + } + Err(e) => { + res = Some(Err(e)); + } + } + } + self.saved_index_page = Some(current_page); + self.saved_indexes = Some(new_indexes); + } + Err(_) => match self.keymap.get_key_from_pos(self.storage, self.start) { + Ok(key) => { + res = Some(Ok(key)); + } + Err(_) => { + res = None; + } + }, + } + } + } else { + let next_page = _page_from_position(self.start + 1); + let current_page = _page_from_position(self.start); + match self.keymap._get_indexes(self.storage, next_page) { + Ok(next_index) => { + if current_page == next_page { + let current_idx = (self.start % PAGE_SIZE) as usize; + if current_idx + 1 > next_index.len() { + res = None; + } else { + let key_vec = &next_index[current_idx]; + match self.keymap.deserialize_key(key_vec) { + Ok(key) => { + res = Some(Ok(key)); + } + Err(e) => { + res = Some(Err(e)); + } + } + } + } else { + match self.keymap.get_key_from_pos(self.storage, self.start) { + Ok(key) => { + res = Some(Ok(key)); + } + Err(_) => { + res = None; + } + } + } + self.saved_index_page = Some(next_page); + self.saved_indexes = Some(next_index); + } + Err(_) => match self.keymap.get_key_from_pos(self.storage, self.start) { + Ok(key) => { + res = Some(Ok(key)); + } + Err(_) => { + res = None; + } + }, + } + } + self.start += 1; + res + } + + // This needs to be implemented correctly for `ExactSizeIterator` to work. + fn size_hint(&self) -> (usize, Option) { + let len = (self.end - self.start) as usize; + (len, Some(len)) + } + + // I implement `nth` manually because it is used in the standard library whenever + // it wants to skip over elements, but the default implementation repeatedly calls next. + // because that is very expensive in this case, and the items are just discarded, we wan + // do better here. + // In practice, this enables cheap paging over the storage by calling: + // `append_store.iter().skip(start).take(length).collect()` + fn nth(&mut self, n: usize) -> Option { + self.start = self.start.saturating_add(n as u32); + self.next() + } +} + +impl<'a, K, T, S, Ser> DoubleEndedIterator for KeyIter<'a, K, T, S, Ser> +where + K: Serialize + DeserializeOwned, + T: Serialize + DeserializeOwned, + S: Storage, + Ser: Serde, +{ + fn next_back(&mut self) -> Option { + if self.start >= self.end { + return None; + } + self.end -= 1; + let res; + if let (Some(page), Some(indexes)) = (&self.saved_back_index_page, &self.saved_back_indexes) + { + let current_page = _page_from_position(self.end); + if *page == current_page { + let current_idx = (self.end % PAGE_SIZE) as usize; + if current_idx + 1 > indexes.len() { + res = None; + } else { + let key_vec = &indexes[current_idx]; + match self.keymap.deserialize_key(key_vec) { + Ok(key) => { + res = Some(Ok(key)); + } + Err(e) => { + res = Some(Err(e)); + } + } + } + } else { + match self.keymap._get_indexes(self.storage, current_page) { + Ok(new_indexes) => { + let current_idx = (self.end % PAGE_SIZE) as usize; + if current_idx + 1 > new_indexes.len() { + res = None; + } else { + let key_vec = &new_indexes[current_idx]; + match self.keymap.deserialize_key(key_vec) { + Ok(key) => { + res = Some(Ok(key)); + } + Err(e) => { + res = Some(Err(e)); + } + } + } + self.saved_back_index_page = Some(current_page); + self.saved_back_indexes = Some(new_indexes); + } + Err(_) => match self.keymap.get_key_from_pos(self.storage, self.end) { + Ok(key) => { + res = Some(Ok(key)); + } + Err(_) => { + res = None; + } + }, + } + } + } else { + let next_page = _page_from_position(self.end - 1); + let current_page = _page_from_position(self.end); + match self.keymap._get_indexes(self.storage, next_page) { + Ok(next_index) => { + if current_page == next_page { + let current_idx = (self.end % PAGE_SIZE) as usize; + if current_idx + 1 > next_index.len() { + res = None; + } else { + let key_vec = &next_index[current_idx]; + match self.keymap.deserialize_key(key_vec) { + Ok(key) => { + res = Some(Ok(key)); + } + Err(e) => { + res = Some(Err(e)); + } + } + } + } else { + match self.keymap.get_key_from_pos(self.storage, self.end) { + Ok(key) => { + res = Some(Ok(key)); + } + Err(_) => { + res = None; + } + } + } + self.saved_back_index_page = Some(next_page); + self.saved_back_indexes = Some(next_index); + } + Err(_) => match self.keymap.get_key_from_pos(self.storage, self.end) { + Ok(key) => { + res = Some(Ok(key)); + } + Err(_) => { + res = None; + } + }, + } + } + res + } + + // I implement `nth_back` manually because it is used in the standard library whenever + // it wants to skip over elements, but the default implementation repeatedly calls next_back. + // because that is very expensive in this case, and the items are just discarded, we wan + // do better here. + // In practice, this enables cheap paging over the storage by calling: + // `append_store.iter().skip(start).take(length).collect()` + fn nth_back(&mut self, n: usize) -> Option { + self.end = self.end.saturating_sub(n as u32); + self.next_back() + } +} + +// This enables writing `append_store.iter().skip(n).rev()` +impl<'a, K, T, S, Ser> ExactSizeIterator for KeyIter<'a, K, T, S, Ser> +where + K: Serialize + DeserializeOwned, + T: Serialize + DeserializeOwned, + S: Storage, + Ser: Serde, +{ +} + +// =============================================================================================== + +/// An iterator over the (key, item) pairs of the Keymap. Less efficient than just iterating over keys. +pub struct KeyItemIter<'a, K, T, S, Ser> +where + K: Serialize + DeserializeOwned, + T: Serialize + DeserializeOwned, + S: Storage, + Ser: Serde, +{ + keymap: &'a Keymap<'a, K, T, Ser>, + storage: &'a S, + start: u32, + end: u32, + saved_indexes: Option>>, + saved_index_page: Option, + saved_back_indexes: Option>>, + saved_back_index_page: Option, +} + +impl<'a, K, T, S, Ser> KeyItemIter<'a, K, T, S, Ser> +where + K: Serialize + DeserializeOwned, + T: Serialize + DeserializeOwned, + S: Storage, + Ser: Serde, +{ + /// constructor + pub fn new(keymap: &'a Keymap<'a, K, T, Ser>, storage: &'a S, start: u32, end: u32) -> Self { + Self { + keymap, + storage, + start, + end, + saved_indexes: None, + saved_index_page: None, + saved_back_indexes: None, + saved_back_index_page: None, + } + } +} + +impl<'a, K, T, S, Ser> Iterator for KeyItemIter<'a, K, T, S, Ser> +where + K: Serialize + DeserializeOwned, + T: Serialize + DeserializeOwned, + S: Storage, + Ser: Serde, +{ + type Item = StdResult<(K, T)>; + + fn next(&mut self) -> Option { + if self.start >= self.end { + return None; + } + let res: Option; + if let (Some(page), Some(indexes)) = (&self.saved_index_page, &self.saved_indexes) { + let current_page = _page_from_position(self.start); + if *page == current_page { + let current_idx = (self.start % PAGE_SIZE) as usize; + if current_idx + 1 > indexes.len() { + res = None; + } else { + let key_vec = &indexes[current_idx]; + match self.keymap.deserialize_key(key_vec) { + Ok(key) => { + let item = self.keymap.get(self.storage, &key)?; + res = Some(Ok((key, item))); + } + Err(e) => { + res = Some(Err(e)); + } + } + } + } else { + match self.keymap._get_indexes(self.storage, current_page) { + Ok(new_indexes) => { + let current_idx = (self.start % PAGE_SIZE) as usize; + if current_idx + 1 > new_indexes.len() { + res = None; + } else { + let key_vec = &new_indexes[current_idx]; + match self.keymap.deserialize_key(key_vec) { + Ok(key) => { + let item = self.keymap.get(self.storage, &key)?; + res = Some(Ok((key, item))); + } + Err(e) => { + res = Some(Err(e)); + } + } + } + self.saved_index_page = Some(current_page); + self.saved_indexes = Some(new_indexes); + } + Err(_) => match self.keymap.get_pair_from_pos(self.storage, self.start) { + Ok(pair) => { + res = Some(Ok(pair)); + } + Err(_) => { + res = None; + } + }, + } + } + } else { + let next_page = _page_from_position(self.start + 1); + let current_page = _page_from_position(self.start); + match self.keymap._get_indexes(self.storage, next_page) { + Ok(next_index) => { + if current_page == next_page { + let current_idx = (self.start % PAGE_SIZE) as usize; + if current_idx + 1 > next_index.len() { + res = None; + } else { + let key_vec = &next_index[current_idx]; + match self.keymap.deserialize_key(key_vec) { + Ok(key) => { + let item = self.keymap.get(self.storage, &key)?; + res = Some(Ok((key, item))); + } + Err(e) => { + res = Some(Err(e)); + } + } + } + } else { + match self.keymap.get_pair_from_pos(self.storage, self.start) { + Ok(pair) => { + res = Some(Ok(pair)); + } + Err(_) => { + res = None; + } + } + } + self.saved_index_page = Some(next_page); + self.saved_indexes = Some(next_index); + } + Err(_) => match self.keymap.get_pair_from_pos(self.storage, self.start) { + Ok(pair) => { + res = Some(Ok(pair)); + } + Err(_) => { + res = None; + } + }, + } + } + self.start += 1; + res + } + + // This needs to be implemented correctly for `ExactSizeIterator` to work. + fn size_hint(&self) -> (usize, Option) { + let len = (self.end - self.start) as usize; + (len, Some(len)) + } + + // I implement `nth` manually because it is used in the standard library whenever + // it wants to skip over elements, but the default implementation repeatedly calls next. + // because that is very expensive in this case, and the items are just discarded, we wan + // do better here. + // In practice, this enables cheap paging over the storage by calling: + // `append_store.iter().skip(start).take(length).collect()` + fn nth(&mut self, n: usize) -> Option { + self.start = self.start.saturating_add(n as u32); + self.next() + } +} + +impl<'a, K, T, S, Ser> DoubleEndedIterator for KeyItemIter<'a, K, T, S, Ser> +where + K: Serialize + DeserializeOwned, + T: Serialize + DeserializeOwned, + S: Storage, + Ser: Serde, +{ + fn next_back(&mut self) -> Option { + if self.start >= self.end { + return None; + } + self.end -= 1; + let res; + if let (Some(page), Some(indexes)) = (&self.saved_back_index_page, &self.saved_back_indexes) + { + let current_page = _page_from_position(self.end); + if *page == current_page { + let current_idx = (self.end % PAGE_SIZE) as usize; + if current_idx + 1 > indexes.len() { + res = None; + } else { + let key_vec = &indexes[current_idx]; + match self.keymap.deserialize_key(key_vec) { + Ok(key) => { + let item = self.keymap.get(self.storage, &key)?; + res = Some(Ok((key, item))); + } + Err(e) => { + res = Some(Err(e)); + } + } + } + } else { + match self.keymap._get_indexes(self.storage, current_page) { + Ok(new_indexes) => { + let current_idx = (self.end % PAGE_SIZE) as usize; + if current_idx + 1 > new_indexes.len() { + res = None; + } else { + let key_vec = &new_indexes[current_idx]; + match self.keymap.deserialize_key(key_vec) { + Ok(key) => { + let item = self.keymap.get(self.storage, &key)?; + res = Some(Ok((key, item))); + } + Err(e) => { + res = Some(Err(e)); + } + } + } + self.saved_back_index_page = Some(current_page); + self.saved_back_indexes = Some(new_indexes); + } + Err(_) => match self.keymap.get_pair_from_pos(self.storage, self.end) { + Ok(pair) => { + res = Some(Ok(pair)); + } + Err(_) => { + res = None; + } + }, + } + } + } else { + let next_page = _page_from_position(self.end - 1); + let current_page = _page_from_position(self.end); + match self.keymap._get_indexes(self.storage, next_page) { + Ok(next_index) => { + if current_page == next_page { + let current_idx = (self.end % PAGE_SIZE) as usize; + if current_idx + 1 > next_index.len() { + res = None; + } else { + let key_vec = &next_index[current_idx]; + match self.keymap.deserialize_key(key_vec) { + Ok(key) => { + let item = self.keymap.get(self.storage, &key)?; + res = Some(Ok((key, item))); + } + Err(e) => { + res = Some(Err(e)); + } + } + } + } else { + match self.keymap.get_pair_from_pos(self.storage, self.end) { + Ok(pair) => { + res = Some(Ok(pair)); + } + Err(_) => { + res = None; + } + } + } + self.saved_back_index_page = Some(next_page); + self.saved_back_indexes = Some(next_index); + } + Err(_) => match self.keymap.get_pair_from_pos(self.storage, self.end) { + Ok(pair) => { + res = Some(Ok(pair)); + } + Err(_) => { + res = None; + } + }, + } + } + res + } + + // I implement `nth_back` manually because it is used in the standard library whenever + // it wants to skip over elements, but the default implementation repeatedly calls next_back. + // because that is very expensive in this case, and the items are just discarded, we wan + // do better here. + // In practice, this enables cheap paging over the storage by calling: + // `append_store.iter().skip(start).take(length).collect()` + fn nth_back(&mut self, n: usize) -> Option { + self.end = self.end.saturating_sub(n as u32); + self.next_back() + } +} + +// This enables writing `append_store.iter().skip(n).rev()` +impl<'a, K, T, S, Ser> ExactSizeIterator for KeyItemIter<'a, K, T, S, Ser> +where + K: Serialize + DeserializeOwned, + T: Serialize + DeserializeOwned, + S: Storage, + Ser: Serde, +{ +} + +trait PrefixedTypedStorage { + fn as_slice(&self) -> &[u8]; + + /// Returns bool from retrieving the item with the specified key. + /// + /// # Arguments + /// + /// * `storage` - a reference to the storage this item is in + /// * `key` - a byte slice representing the key to access the stored item + fn contains_impl(&self, storage: &S, key: &[u8]) -> bool { + let prefixed_key = [self.as_slice(), key].concat(); + storage.get(&prefixed_key).is_some() + } + + /// Returns StdResult from retrieving the item with the specified key. Returns a + /// StdError::NotFound if there is no item with that key + /// + /// # Arguments + /// + /// * `storage` - a reference to the storage this item is in + /// * `key` - a byte slice representing the key to access the stored item + fn load_impl(&self, storage: &S, key: &[u8]) -> StdResult { + let prefixed_key = [self.as_slice(), key].concat(); + Ser::deserialize( + &storage + .get(&prefixed_key) + .ok_or(StdError::not_found(type_name::()))?, + ) + } + + /// Returns StdResult> from retrieving the item with the specified key. Returns a + /// None if there is no item with that key + /// + /// # Arguments + /// + /// * `storage` - a reference to the storage this item is in + /// * `key` - a byte slice representing the key to access the stored item + fn may_load_impl(&self, storage: &S, key: &[u8]) -> StdResult> { + let prefixed_key = [self.as_slice(), key].concat(); + match storage.get(&prefixed_key) { + Some(value) => Ser::deserialize(&value).map(Some), + None => Ok(None), + } + } + + /// Returns StdResult<()> resulting from saving an item to storage + /// + /// # Arguments + /// + /// * `storage` - a mutable reference to the storage this item should go to + /// * `key` - a byte slice representing the key to access the stored item + /// * `value` - a reference to the item to store + fn save_impl(&self, storage: &mut S, key: &[u8], value: &T) -> StdResult<()> { + let prefixed_key = [self.as_slice(), key].concat(); + storage.set(&prefixed_key, &Ser::serialize(value)?); + Ok(()) + } + + /// Removes an item from storage + /// + /// # Arguments + /// + /// * `storage` - a mutable reference to the storage this item is in + /// * `key` - a byte slice representing the key to access the stored item + fn remove_impl(&self, storage: &mut S, key: &[u8]) { + let prefixed_key = [self.as_slice(), key].concat(); + storage.remove(&prefixed_key); + } +} + +#[cfg(test)] +mod tests { + use serde::{Deserialize, Serialize}; + + use cosmwasm_std::testing::MockStorage; + + use super::*; + + #[derive(Serialize, Deserialize, Eq, PartialEq, Debug, Clone)] + struct Foo { + string: String, + number: i32, + } + #[test] + fn test_keymap_perf_insert() -> StdResult<()> { + let mut storage = MockStorage::new(); + + let total_items = 1000; + + let keymap: Keymap, i32> = Keymap::new(b"test"); + + for i in 0..total_items { + let key: Vec = (i as i32).to_be_bytes().to_vec(); + keymap.insert(&mut storage, &key, &i)?; + } + + assert_eq!(keymap.get_len(&storage)?, 1000); + + Ok(()) + } + + #[test] + fn test_keymap_perf_insert_remove() -> StdResult<()> { + let mut storage = MockStorage::new(); + + let total_items = 100; + + let keymap: Keymap = Keymap::new(b"test"); + + for i in 0..total_items { + keymap.insert(&mut storage, &i, &i)?; + } + + for i in 0..total_items { + keymap.remove(&mut storage, &i)?; + } + + assert_eq!(keymap.get_len(&storage)?, 0); + + Ok(()) + } + + #[test] + fn test_keymap_paging() -> StdResult<()> { + let mut storage = MockStorage::new(); + + let page_size: u32 = 5; + let total_items: u32 = 50; + let keymap: Keymap, u32> = Keymap::new(b"test"); + + for i in 0..total_items { + let key: Vec = (i as i32).to_be_bytes().to_vec(); + keymap.insert(&mut storage, &key, &i)?; + } + + for i in 0..((total_items / page_size) - 1) { + let start_page = i; + + let values = keymap.paging(&storage, start_page, page_size)?; + + for (index, (key_value, value)) in values.iter().enumerate() { + let i = page_size * start_page + index as u32; + let key: Vec = (i as i32).to_be_bytes().to_vec(); + assert_eq!(key_value, &key); + assert_eq!(value, &i); + } + } + + Ok(()) + } + + #[test] + fn test_keymap_paging_overflow() -> StdResult<()> { + let mut storage = MockStorage::new(); + + let page_size = 50; + let total_items = 10; + let keymap: Keymap = Keymap::new(b"test"); + + for i in 0..total_items { + keymap.insert(&mut storage, &(i as i32), &i)?; + } + + let values = keymap.paging_keys(&storage, 0, page_size)?; + + assert_eq!(values.len(), total_items as usize); + + for (index, value) in values.iter().enumerate() { + assert_eq!(value, &(index as i32)) + } + + Ok(()) + } + + #[test] + fn test_keymap_insert_multiple() -> StdResult<()> { + let mut storage = MockStorage::new(); + + let keymap: Keymap, Foo> = Keymap::new(b"test"); + let foo1 = Foo { + string: "string one".to_string(), + number: 1111, + }; + let foo2 = Foo { + string: "string two".to_string(), + number: 1111, + }; + + keymap.insert(&mut storage, &b"key1".to_vec(), &foo1)?; + keymap.insert(&mut storage, &b"key2".to_vec(), &foo2)?; + + let read_foo1 = keymap.get(&storage, &b"key1".to_vec()).unwrap(); + let read_foo2 = keymap.get(&storage, &b"key2".to_vec()).unwrap(); + + assert_eq!(foo1, read_foo1); + assert_eq!(foo2, read_foo2); + Ok(()) + } + + #[test] + fn test_keymap_contains() -> StdResult<()> { + let mut storage = MockStorage::new(); + + let keymap: Keymap, Foo> = Keymap::new(b"test"); + let foo1 = Foo { + string: "string one".to_string(), + number: 1111, + }; + + keymap.insert(&mut storage, &b"key1".to_vec(), &foo1)?; + let contains_k1 = keymap.contains(&storage, &b"key1".to_vec()); + + assert_eq!(contains_k1, true); + + Ok(()) + } + + #[test] + fn test_keymap_iter() -> StdResult<()> { + let mut storage = MockStorage::new(); + + let keymap: Keymap, Foo> = Keymap::new(b"test"); + let foo1 = Foo { + string: "string one".to_string(), + number: 1111, + }; + let foo2 = Foo { + string: "string two".to_string(), + number: 1111, + }; + + keymap.insert(&mut storage, &b"key1".to_vec(), &foo1)?; + keymap.insert(&mut storage, &b"key2".to_vec(), &foo2)?; + + let mut x = keymap.iter(&storage)?; + let (len, _) = x.size_hint(); + assert_eq!(len, 2); + + assert_eq!(x.next().unwrap()?, (b"key1".to_vec(), foo1)); + + assert_eq!(x.next().unwrap()?, (b"key2".to_vec(), foo2)); + + Ok(()) + } + + #[test] + fn test_keymap_iter_keys() -> StdResult<()> { + let mut storage = MockStorage::new(); + + let keymap: Keymap = Keymap::new(b"test"); + let foo1 = Foo { + string: "string one".to_string(), + number: 1111, + }; + let foo2 = Foo { + string: "string two".to_string(), + number: 1111, + }; + + let key1 = "key1".to_string(); + let key2 = "key2".to_string(); + + keymap.insert(&mut storage, &key1, &foo1)?; + keymap.insert(&mut storage, &key2, &foo2)?; + + let mut x = keymap.iter_keys(&storage)?; + let (len, _) = x.size_hint(); + assert_eq!(len, 2); + + assert_eq!(x.next().unwrap()?, key1); + + assert_eq!(x.next().unwrap()?, key2); + + Ok(()) + } + + #[test] + fn test_keymap_overwrite() -> StdResult<()> { + let mut storage = MockStorage::new(); + + let keymap: Keymap, Foo> = Keymap::new(b"test"); + let foo1 = Foo { + string: "string one".to_string(), + number: 1111, + }; + let foo2 = Foo { + string: "string two".to_string(), + number: 2222, + }; + + keymap.insert(&mut storage, &b"key1".to_vec(), &foo1)?; + keymap.insert(&mut storage, &b"key1".to_vec(), &foo2)?; + + let foo3 = keymap.get(&storage, &b"key1".to_vec()).unwrap(); + + assert_eq!(foo3, foo2); + + Ok(()) + } + + #[test] + fn test_keymap_suffixed_basics() -> StdResult<()> { + let mut storage = MockStorage::new(); + + let original_keymap: Keymap = Keymap::new(b"test"); + let keymap = original_keymap.add_suffix(b"test_suffix"); + let foo1 = Foo { + string: "string one".to_string(), + number: 1111, + }; + let foo2 = Foo { + string: "string one".to_string(), + number: 1111, + }; + keymap.insert(&mut storage, &"key1".to_string(), &foo1)?; + keymap.insert(&mut storage, &"key2".to_string(), &foo2)?; + + let read_foo1 = keymap.get(&storage, &"key1".to_string()).unwrap(); + let read_foo2 = keymap.get(&storage, &"key2".to_string()).unwrap(); + + assert_eq!(original_keymap.get_len(&storage)?, 0); + assert_eq!(foo1, read_foo1); + assert_eq!(foo2, read_foo2); + + let alternative_keymap: Keymap = Keymap::new(b"alternative"); + let alt_same_suffix = alternative_keymap.add_suffix(b"test_suffix"); + + assert!(alt_same_suffix.is_empty(&storage)?); + + // show that it loads foo1 before removal + let before_remove_foo1 = keymap.get(&storage, &"key1".to_string()); + assert!(before_remove_foo1.is_some()); + assert_eq!(foo1, before_remove_foo1.unwrap()); + // and returns None after removal + keymap.remove(&mut storage, &"key1".to_string())?; + let removed_foo1 = keymap.get(&storage, &"key1".to_string()); + assert!(removed_foo1.is_none()); + + // show what happens when reading from keys that have not been set yet. + assert!(keymap.get(&storage, &"key3".to_string()).is_none()); + + Ok(()) + } + + #[test] + fn test_keymap_length() -> StdResult<()> { + let mut storage = MockStorage::new(); + + let keymap: Keymap = Keymap::new(b"test"); + let foo1 = Foo { + string: "string one".to_string(), + number: 1111, + }; + let foo2 = Foo { + string: "string one".to_string(), + number: 1111, + }; + + assert!(keymap.length.lock().unwrap().eq(&None)); + assert_eq!(keymap.get_len(&storage)?, 0); + assert!(keymap.length.lock().unwrap().eq(&Some(0))); + + let key1 = "k1".to_string(); + let key2 = "k2".to_string(); + + keymap.insert(&mut storage, &key1, &foo1)?; + assert_eq!(keymap.get_len(&storage)?, 1); + assert!(keymap.length.lock().unwrap().eq(&Some(1))); + + // add another item + keymap.insert(&mut storage, &key2, &foo2)?; + assert_eq!(keymap.get_len(&storage)?, 2); + assert!(keymap.length.lock().unwrap().eq(&Some(2))); + + // remove item and check length + keymap.remove(&mut storage, &key1)?; + assert_eq!(keymap.get_len(&storage)?, 1); + assert!(keymap.length.lock().unwrap().eq(&Some(1))); + + // override item (should not change length) + keymap.insert(&mut storage, &key2, &foo1)?; + assert_eq!(keymap.get_len(&storage)?, 1); + assert!(keymap.length.lock().unwrap().eq(&Some(1))); + + // remove item and check length + keymap.remove(&mut storage, &key2)?; + assert_eq!(keymap.get_len(&storage)?, 0); + assert!(keymap.length.lock().unwrap().eq(&Some(0))); + + Ok(()) + } +} diff --git a/packages/storage/src/lib.rs b/packages/storage/src/lib.rs index 0bacff8..65eed61 100644 --- a/packages/storage/src/lib.rs +++ b/packages/storage/src/lib.rs @@ -1,7 +1,10 @@ +#[doc = include_str!("../Readme.md")] pub mod append_store; pub mod deque_store; -pub mod typed_store; +pub mod item; +pub mod keymap; -pub use append_store::{AppendStore, AppendStoreMut}; -pub use deque_store::{DequeStore, DequeStoreMut}; -pub use typed_store::{TypedStore, TypedStoreMut}; +pub use append_store::AppendStore; +pub use deque_store::DequeStore; +pub use item::Item; +pub use keymap::Keymap; diff --git a/packages/storage/src/typed_store.rs b/packages/storage/src/typed_store.rs deleted file mode 100644 index 95ec8df..0000000 --- a/packages/storage/src/typed_store.rs +++ /dev/null @@ -1,176 +0,0 @@ -use std::any::type_name; -use std::marker::PhantomData; - -use serde::{de::DeserializeOwned, Serialize}; - -use cosmwasm_std::{StdError, StdResult, Storage}; - -use secret_toolkit_serialization::{Bincode2, Serde}; - -pub struct TypedStoreMut<'a, T, Ser = Bincode2> -where - T: Serialize + DeserializeOwned, - Ser: Serde, -{ - storage: &'a mut dyn Storage, - item_type: PhantomData<*const T>, - serialization_type: PhantomData<*const Ser>, -} - -impl<'a, T> TypedStoreMut<'a, T, Bincode2> -where - T: Serialize + DeserializeOwned, -{ - pub fn attach(storage: &'a mut dyn Storage) -> Self { - Self::attach_with_serialization(storage, Bincode2) - } -} - -impl<'a, T, Ser> TypedStoreMut<'a, T, Ser> -where - T: Serialize + DeserializeOwned, - Ser: Serde, -{ - pub fn attach_with_serialization(storage: &'a mut dyn Storage, _serialization: Ser) -> Self { - Self { - storage, - serialization_type: PhantomData, - item_type: PhantomData, - } - } - - pub fn store(&mut self, key: &[u8], item: &T) -> StdResult<()> { - self.storage.set(key, &Ser::serialize(item)?); - Ok(()) - } - - pub fn remove(&mut self, key: &[u8]) { - self.storage.remove(key); - } - - fn as_readonly(&self) -> TypedStore { - TypedStore { - storage: self.storage, - item_type: self.item_type, - serialization_type: self.serialization_type, - } - } - - pub fn load(&self, key: &[u8]) -> StdResult { - self.as_readonly().load(key) - } - - pub fn may_load(&self, key: &[u8]) -> StdResult> { - self.as_readonly().may_load(key) - } -} - -pub struct TypedStore<'a, T, Ser = Bincode2> -where - T: Serialize + DeserializeOwned, - Ser: Serde, -{ - storage: &'a dyn Storage, - item_type: PhantomData<*const T>, - serialization_type: PhantomData<*const Ser>, -} - -impl<'a, T> TypedStore<'a, T, Bincode2> -where - T: Serialize + DeserializeOwned, -{ - pub fn attach(storage: &'a dyn Storage) -> Self { - Self::attach_with_serialization(storage, Bincode2) - } -} - -impl<'a, T, Ser> TypedStore<'a, T, Ser> -where - T: Serialize + DeserializeOwned, - Ser: Serde, -{ - pub fn attach_with_serialization(storage: &'a dyn Storage, _serialization: Ser) -> Self { - Self { - storage, - serialization_type: PhantomData, - item_type: PhantomData, - } - } - - pub fn load(&self, key: &[u8]) -> StdResult { - let maybe_serialized = self.storage.get(key); - let serialized = maybe_serialized.ok_or_else(|| StdError::not_found(type_name::()))?; - Ser::deserialize(&serialized) - } - - pub fn may_load(&self, key: &[u8]) -> StdResult> { - match self.storage.get(key) { - Some(serialized) => Ser::deserialize(&serialized).map(Some), - None => Ok(None), - } - } -} - -#[cfg(test)] -mod tests { - use serde::{Deserialize, Serialize}; - - use cosmwasm_std::testing::MockStorage; - - use secret_toolkit_serialization::Json; - - use super::*; - - #[derive(Serialize, Deserialize, Eq, PartialEq, Debug)] - struct Foo { - string: String, - number: i32, - } - - #[test] - fn test_typed_store() -> StdResult<()> { - let mut storage = MockStorage::new(); - - let mut typed_store_mut = TypedStoreMut::attach(&mut storage); - let foo1 = Foo { - string: "string one".to_string(), - number: 1111, - }; - let foo2 = Foo { - string: "string one".to_string(), - number: 1111, - }; - typed_store_mut.store(b"key1", &foo1)?; - typed_store_mut.store(b"key2", &foo2)?; - - let read_foo1 = typed_store_mut.load(b"key1")?; - let read_foo2 = typed_store_mut.load(b"key2")?; - - assert_eq!(foo1, read_foo1); - assert_eq!(foo2, read_foo2); - - // show that it loads foo1 before removal - let before_remove_foo1 = typed_store_mut.may_load(b"key1")?; - assert!(before_remove_foo1.is_some()); - assert_eq!(foo1, before_remove_foo1.unwrap()); - // and returns None after removal - typed_store_mut.remove(b"key1"); - let removed_foo1 = typed_store_mut.may_load(b"key1")?; - assert!(removed_foo1.is_none()); - - // show what happens when reading from keys that have not been set yet. - assert!(typed_store_mut.load(b"key3").is_err()); - assert!(typed_store_mut.may_load(b"key3")?.is_none()); - - // Try to load it with the wrong format - let typed_store = TypedStore::::attach_with_serialization(&storage, Json); - match typed_store.load(b"key2") { - Err(StdError::ParseErr { - target_type, msg, .. - }) if target_type == "i32" && msg == "Invalid type" => {} - other => panic!("unexpected value: {:?}", other), - } - - Ok(()) - } -} From 4f4e154df8c0a845ed4c2da8b477778ffd869e52 Mon Sep 17 00:00:00 2001 From: srdtrk Date: Sun, 21 Aug 2022 23:06:36 +0300 Subject: [PATCH 02/12] removed some clippy errors --- packages/storage/src/append_store.rs | 6 +++--- packages/storage/src/deque_store.rs | 8 ++++---- packages/storage/src/item.rs | 5 +---- packages/storage/src/keymap.rs | 8 ++++---- 4 files changed, 12 insertions(+), 15 deletions(-) diff --git a/packages/storage/src/append_store.rs b/packages/storage/src/append_store.rs index 0e5f793..79bbff3 100644 --- a/packages/storage/src/append_store.rs +++ b/packages/storage/src/append_store.rs @@ -183,11 +183,11 @@ impl<'a, T: Serialize + DeserializeOwned, Ser: Serde> AppendStore<'a, T, Ser> { impl<'a, T: Serialize + DeserializeOwned, Ser: Serde> Clone for AppendStore<'a, T, Ser> { fn clone(&self) -> Self { Self { - namespace: self.namespace.clone(), + namespace: self.namespace, prefix: self.prefix.clone(), length: Mutex::new(None), - item_type: self.item_type.clone(), - serialization_type: self.serialization_type.clone(), + item_type: self.item_type, + serialization_type: self.serialization_type, } } } diff --git a/packages/storage/src/deque_store.rs b/packages/storage/src/deque_store.rs index f9c7a08..34de104 100644 --- a/packages/storage/src/deque_store.rs +++ b/packages/storage/src/deque_store.rs @@ -34,7 +34,7 @@ where serialization_type: PhantomData, } -impl<'a, 'b, T: Serialize + DeserializeOwned, Ser: Serde> DequeStore<'a, T, Ser> { +impl<'a, T: Serialize + DeserializeOwned, Ser: Serde> DequeStore<'a, T, Ser> { /// constructor pub const fn new(prefix: &'a [u8]) -> Self { Self { @@ -296,12 +296,12 @@ impl<'a, T: Serialize + DeserializeOwned, Ser: Serde> DequeStore<'a, T, Ser> { impl<'a, T: Serialize + DeserializeOwned, Ser: Serde> Clone for DequeStore<'a, T, Ser> { fn clone(&self) -> Self { Self { - namespace: self.namespace.clone(), + namespace: self.namespace, prefix: self.prefix.clone(), length: Mutex::new(None), offset: Mutex::new(None), - item_type: self.item_type.clone(), - serialization_type: self.serialization_type.clone(), + item_type: self.item_type, + serialization_type: self.serialization_type, } } } diff --git a/packages/storage/src/item.rs b/packages/storage/src/item.rs index 8c909b1..94a93fc 100644 --- a/packages/storage/src/item.rs +++ b/packages/storage/src/item.rs @@ -55,10 +55,7 @@ where /// efficient way to see if any object is currently saved. pub fn is_empty(&self, storage: &S) -> bool { - match storage.get(self.as_slice()) { - Some(_) => false, - None => true, - } + storage.get(self.as_slice()).is_none() } /// Loads the data, perform the specified action, and store the result diff --git a/packages/storage/src/keymap.rs b/packages/storage/src/keymap.rs index baeaa9e..adcafe7 100644 --- a/packages/storage/src/keymap.rs +++ b/packages/storage/src/keymap.rs @@ -430,12 +430,12 @@ impl<'a, K: Serialize + DeserializeOwned, T: Serialize + DeserializeOwned, Ser: { fn clone(&self) -> Self { Self { - namespace: self.namespace.clone(), + namespace: self.namespace, prefix: self.prefix.clone(), length: Mutex::new(None), - key_type: self.key_type.clone(), - item_type: self.item_type.clone(), - serialization_type: self.serialization_type.clone(), + key_type: self.key_type, + item_type: self.item_type, + serialization_type: self.serialization_type, } } } From 0d87e11211bc050368f21ca28be28627aa51000d Mon Sep 17 00:00:00 2001 From: srdtrk Date: Mon, 22 Aug 2022 00:00:45 +0300 Subject: [PATCH 03/12] updated the clone methods --- packages/storage/src/append_store.rs | 4 ++-- packages/storage/src/deque_store.rs | 4 ++-- packages/storage/src/keymap.rs | 6 +++--- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/packages/storage/src/append_store.rs b/packages/storage/src/append_store.rs index 79bbff3..a4a7ef0 100644 --- a/packages/storage/src/append_store.rs +++ b/packages/storage/src/append_store.rs @@ -186,8 +186,8 @@ impl<'a, T: Serialize + DeserializeOwned, Ser: Serde> Clone for AppendStore<'a, namespace: self.namespace, prefix: self.prefix.clone(), length: Mutex::new(None), - item_type: self.item_type, - serialization_type: self.serialization_type, + item_type: PhantomData, + serialization_type: PhantomData, } } } diff --git a/packages/storage/src/deque_store.rs b/packages/storage/src/deque_store.rs index 34de104..cb7ea48 100644 --- a/packages/storage/src/deque_store.rs +++ b/packages/storage/src/deque_store.rs @@ -300,8 +300,8 @@ impl<'a, T: Serialize + DeserializeOwned, Ser: Serde> Clone for DequeStore<'a, T prefix: self.prefix.clone(), length: Mutex::new(None), offset: Mutex::new(None), - item_type: self.item_type, - serialization_type: self.serialization_type, + item_type: PhantomData, + serialization_type: PhantomData, } } } diff --git a/packages/storage/src/keymap.rs b/packages/storage/src/keymap.rs index adcafe7..86d92c7 100644 --- a/packages/storage/src/keymap.rs +++ b/packages/storage/src/keymap.rs @@ -433,9 +433,9 @@ impl<'a, K: Serialize + DeserializeOwned, T: Serialize + DeserializeOwned, Ser: namespace: self.namespace, prefix: self.prefix.clone(), length: Mutex::new(None), - key_type: self.key_type, - item_type: self.item_type, - serialization_type: self.serialization_type, + key_type: PhantomData, + item_type: PhantomData, + serialization_type: PhantomData, } } } From e9afc5968a86a39e2877746f61eca523311dcefa Mon Sep 17 00:00:00 2001 From: srdtrk Date: Mon, 22 Aug 2022 03:21:30 +0300 Subject: [PATCH 04/12] fixed a new serde bug in keymap, should be more efficient now too --- packages/storage/src/keymap.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/storage/src/keymap.rs b/packages/storage/src/keymap.rs index 86d92c7..5a2a3bc 100644 --- a/packages/storage/src/keymap.rs +++ b/packages/storage/src/keymap.rs @@ -20,7 +20,7 @@ fn _page_from_position(position: u32) -> u32 { } #[derive(Serialize, Deserialize)] -struct InternalItem +struct InternalItem where T: Serialize + DeserializeOwned, Ser: Serde, @@ -171,7 +171,7 @@ impl<'a, K: Serialize + DeserializeOwned, T: Serialize + DeserializeOwned, Ser: } } /// internal item get function - fn _get_from_key(&self, storage: &S, key: &K) -> StdResult> { + fn _get_from_key(&self, storage: &S, key: &K) -> StdResult> { let key_vec = self.serialize_key(key)?; self.load_impl(storage, &key_vec) } @@ -414,7 +414,7 @@ impl<'a, K: Serialize + DeserializeOwned, T: Serialize + DeserializeOwned, Ser: } impl<'a, K: Serialize + DeserializeOwned, T: Serialize + DeserializeOwned, Ser: Serde> - PrefixedTypedStorage, Ser> for Keymap<'a, K, T, Ser> + PrefixedTypedStorage, Bincode2> for Keymap<'a, K, T, Ser> { fn as_slice(&self) -> &[u8] { if let Some(prefix) = &self.prefix { From 90deb2d26ba63481bd5a18303449cedd7d963e43 Mon Sep 17 00:00:00 2001 From: srdtrk Date: Mon, 22 Aug 2022 03:43:29 +0300 Subject: [PATCH 05/12] put the correct version of storage in cargo.toml, releases updated, versioning is more in sync with the master branch now. Cashmap removed from the incubator much like the master branch --- Releases.md | 44 +- packages/incubator/Cargo.toml | 4 +- packages/incubator/src/cashmap.rs | 1195 ----------------------------- packages/incubator/src/lib.rs | 5 - packages/storage/Cargo.toml | 2 +- packages/storage/src/keymap.rs | 6 +- packages/toolkit/Cargo.toml | 6 +- 7 files changed, 45 insertions(+), 1217 deletions(-) delete mode 100644 packages/incubator/src/cashmap.rs diff --git a/Releases.md b/Releases.md index 35b39a7..a03ca2a 100644 --- a/Releases.md +++ b/Releases.md @@ -1,22 +1,48 @@ # Release notes for the Secret Toolkit -## Next Release +## secret-toolkit-storage v0.4.1 + +* BUGFIX: `Item::is_empty` was returning the opposite value from what you'd expect. + +## v0.4.0 + +This release mostly includes the work of @srdtrk in #53. Thanks Srdtrk! + +It revamps the `secret-toolkit-storage` package to make it more similar to `cw-storage-plus` and much easier +to use. It also removes the `Cashmap` type from the incubator in favor of `KeyMap` in `secret-toolkit-storage`. + +This is a summary of the changes and additions in this release: + +* Minimum Rust version is bumped to the latest v1.63. This is because we want to use `Mutex::new` in a `const fn`. +* No more distinction between `Readonly*` and `*Mut` types. Instead, methods take references or mutable references to the storage every time. +* Usage of `PrefixedStore` is made mostly unnecessary. +* Storage type's constructors are const functions, which means they can be initialized as global static variables. +* Added `secret-toolkit::storage::Item` which is similar to `Item` from `cw-storage-plus` or `TypedStore` from `cosmwasm_storage` v0.10. +* Added `secret-toolkit::storage::KeyMap` which is similar to `Cashmap`. +* `Cashmap` is completely removed. + +A full guide to using the new `storage` types can be found +[in the package's readme file](https://github.com/srdtrk/secret-toolkit/blob/3725530aebe149d14f7f3f1662844340eb27e015/packages/storage/Readme.md). ## secret-toolkit-incubator v0.3.1 + * Fixed compilation issue with Rust v1.61 (#46, #48) * Removed Siphasher dependency (#46, #48) ## secret-toolkit-utils v0.3.1 ### Security + * BUGFIX: `secret-toolkit::utils::FeatureToggle::handle_pause` had an inverse authorization check: only non-pausers could pause features. ## secret-toolkit-permit v0.3.1 + * Removed the `ecc-secp256k1` feature from `secret-toolkit-crypto` dependency of `secret-toolkit-permit`. - * This tiny change significantly reduces the size of binaries that only use the permit feature. + * This tiny change significantly reduces the size of binaries that only use the permit feature. ## v0.3.0 + * Added `clear` method to `AppendStore` and `DequeStore` to quickly reset the collections (#34) * docs.rs documentation now includes all sub-crates. * BUGFIX: `secret-toolkit::snip721::Metadata` was severely out of date with the SNIP-721 specification, and not useful. @@ -29,8 +55,9 @@ * Added `secret-toolkit::utils::feature_toggle` which allow managing feature flags in your contract. ### Breaking -* `secret-toolkit::permit::validate()` Now supports validating any type of Cosmos address. -Interface changes: Now takes a reference to the current token address instead + +* `secret-toolkit::permit::validate()` Now supports validating any type of Cosmos address. +Interface changes: Now takes a reference to the current token address instead of taking it by value and an optional hrp string. In addition, it returns a String and not HumanAddr. * Renamed `secret-toolkit::permit::Permission` to `secret-toolkit::permit::TokenPermission`. @@ -44,6 +71,7 @@ In addition, it returns a String and not HumanAddr. * `secret-toolkit-incubator` now has features `["cashmap", "generational-store"]` which are all off by default. ## v0.2.0 + This release includes a ton of new features, and a few breaking changes in various interfaces. This version is also the first released to [crates.io](https://crates.io)! @@ -53,7 +81,7 @@ This version is also the first released to [crates.io](https://crates.io)! * Added support for SNIP-22 messages (batch operations) * Added support for SNIP-23 messages (improved Send operations) which broke some interfaces * Added support for SNIP-24 permits -* Added `Base64Of`, `Base64JsonOf`, and `Base64Bincode2Of`, +* Added `Base64Of`, `Base64JsonOf`, and `Base64Bincode2Of`, which are wrappers that automatically deserializes base64 strings to `T`. It can be used in message types' fields instead of `Binary` when the contents of the string should have more specific contents. @@ -65,9 +93,11 @@ This version is also the first released to [crates.io](https://crates.io)! while `["crypto", "permit", "incubator"]` are left disabled by default. ## v0.1.1 + * Removed unused dev-dependency that was slowing down test compilation times. ## v0.1.0 + This is the first release of `secret-toolkit`. It supports: * `secret-toolkit::snip20` - Helper types and functions for interaction with @@ -82,6 +112,6 @@ This is the first release of `secret-toolkit`. It supports: * `secret-toolkit::serialization` - marker types for overriding the storage format used by types in `secret-toolkit::storage`. `Json` and `Bincode2`. * `secret-toolkit::utils` - General utilities for writing contract code. - * `padding` - tools for padding queries and responses. - * `calls` - Tools for marking types as messages in queries and callbacks + * `padding` - tools for padding queries and responses. + * `calls` - Tools for marking types as messages in queries and callbacks to other contracts. diff --git a/packages/incubator/Cargo.toml b/packages/incubator/Cargo.toml index ab1a44f..2338c42 100644 --- a/packages/incubator/Cargo.toml +++ b/packages/incubator/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "secret-toolkit-incubator" -version = "0.3.1" +version = "0.4.0" edition = "2018" authors = ["SCRT Labs "] license-file = "../../LICENSE" @@ -20,7 +20,5 @@ cosmwasm-storage = { git = "https://github.com/scrtlabs/cosmwasm", branch = "sec secret-toolkit-serialization = { version = "0.3", path = "../serialization", optional = true } [features] -default = ["cashmap", "generational-store", "maxheap"] -cashmap = ["cosmwasm-storage", "serde", "secret-toolkit-serialization", "cosmwasm-std"] generational-store = ["secret-toolkit-serialization", "serde", "cosmwasm-std"] maxheap = ["secret-toolkit-serialization", "serde", "cosmwasm-std"] diff --git a/packages/incubator/src/cashmap.rs b/packages/incubator/src/cashmap.rs deleted file mode 100644 index 4ea1cc2..0000000 --- a/packages/incubator/src/cashmap.rs +++ /dev/null @@ -1,1195 +0,0 @@ -#![allow(dead_code)] -use std::any::type_name; -use std::collections::hash_map::DefaultHasher; -use std::hash::{Hash, Hasher}; -use std::marker::PhantomData; - -use serde::{de::DeserializeOwned, Deserialize, Serialize}; - -use cosmwasm_std::{StdError, StdResult, Storage}; - -use cosmwasm_storage::{PrefixedStorage, ReadonlyPrefixedStorage}; -use secret_toolkit_serialization::{Bincode2, Serde}; -use std::cmp::min; - -const INDEXES: &[u8] = b"indexes"; -const MAP_LENGTH: &[u8] = b"length"; - -const PAGE_SIZE: u32 = 5; - -#[derive(PartialEq)] -enum KeyInMap { - No, - Yes, - Collision, -} - -fn _page_from_position(position: u32) -> u32 { - position / PAGE_SIZE -} - -#[derive(Serialize, Deserialize, Clone)] -struct MetaData { - position: u32, - // displacement is set if we encountered a collision and we needed to move this item - displacement: u64, - key: Vec, - deleted: bool, -} - -#[derive(Serialize, Deserialize)] -pub struct InternalItem -// where -// T: Serialize + DeserializeOwned, -{ - item: T, - meta_data: MetaData, -} - -pub struct CashMap<'a, T, Ser = Bincode2> -where - T: Serialize + DeserializeOwned, - Ser: Serde, -{ - storage: &'a mut dyn Storage, - item_type: PhantomData<*const InternalItem>, - serialization_type: PhantomData<*const Ser>, - prefix: Option>, -} - -impl<'a, T> CashMap<'a, T, Bincode2> -where - T: Serialize + DeserializeOwned, -{ - pub fn init(name: &[u8], storage: &'a mut dyn Storage) -> Self { - Self::attach_with_serialization(storage, Bincode2, Some(name.to_vec())) - } - - pub fn attach(storage: &'a mut dyn Storage) -> Self { - Self::attach_with_serialization(storage, Bincode2, None) - } -} - -impl<'a, T, Ser> CashMap<'a, T, Ser> -where - T: Serialize + DeserializeOwned, - Ser: Serde, -{ - pub fn is_empty(&self) -> bool { - self.as_readonly().is_empty() - } - pub fn len(&self) -> u32 { - self.as_readonly().len() - } - - /// This method allows customization of the serialization, in case we want to force - /// something other than Bincode2, which has it's drawbacks (such as Enums fucking up) - pub fn attach_with_serialization( - storage: &'a mut dyn Storage, - _serialization: Ser, - prefix: Option>, - ) -> Self { - Self { - storage, - serialization_type: PhantomData, - item_type: PhantomData, - prefix, - } - } - - pub fn remove(&mut self, key: &[u8]) -> StdResult<()> { - let mut len = self.as_readonly().len(); - - let item = self.as_readonly()._direct_get(key); - - if item.is_none() || len == 0 { - return Err(StdError::not_found("Item not found in map")); - } - - let mut unwrapped_item = item.unwrap(); - unwrapped_item.meta_data.deleted = true; - - let removed_pos = unwrapped_item.meta_data.position; - //debug_print(format!("removing item from position {}", &removed_pos)); - - let page = _page_from_position(removed_pos); - - let mut indexes = self.as_readonly().get_indexes(page); - let hash = self - .as_readonly() - .key_to_hash(key) - .overflowing_add(unwrapped_item.meta_data.displacement) - .0; - - len -= 1; - self.set_length(len)?; - - return if !indexes.contains(&hash) { - Err(StdError::generic_err( - "Tried to remove, but hash not found - should never happen", - )) - } else { - if len == 0 || len == removed_pos { - indexes.pop(); - self.store_indexes(page, &indexes)?; - return self.store(&hash.to_be_bytes(), &unwrapped_item); - //return self.remove_from_store(&hash.to_be_bytes()); - } - - // find the index of our item - // todo: replace this since we know the absolute position from the internalitem - let pos_in_indexes = indexes.iter().position(|index| index == &hash).unwrap(); - - // replace the last item with our new item - let max_page = _page_from_position(len); - let mut last_item_indexes = self.as_readonly().get_indexes(max_page); - - if let Some(last_item_hash) = last_item_indexes.pop() { - if max_page != page { - self.store_indexes(max_page, &last_item_indexes)?; - } else { - // if we're already on the max page indexes has not removed the last item, - // so we do it here - indexes.pop(); - } - - if let Some(mut last_item) = self.as_readonly().get_no_hash(&last_item_hash) { - last_item.meta_data.position = removed_pos; - - // debug_print(format!( - // "replacing {} with {}", - // &indexes[pos_in_indexes], &last_item_hash - // )); - let _ = std::mem::replace(&mut indexes[pos_in_indexes], last_item_hash); - - // store the modified last item (with new position) - self.store(&last_item_hash.to_be_bytes(), &last_item)?; - - // debug_print(format!( - // "replacing {} with {}", - // &indexes[pos_in_indexes], &last_item_hash - // )); - self.store_indexes(page, &indexes)?; - //self.remove_from_store(&hash.to_be_bytes()) - - // store the item with the deleted = true flag - self.store(&hash.to_be_bytes(), &unwrapped_item) - } else { - return Err(StdError::not_found("Failed to remove item from map")); - } - } else { - Err(StdError::not_found("Failed to remove item from map")) - } - }; - } - - pub fn insert(&mut self, key: &[u8], item: T) -> StdResult<()> { - let hash = self.as_readonly().key_to_hash(key); - //debug_print(format!("***insert - inserting {:?}: {}", key, &hash)); - let pos = self.len(); - match self.as_readonly()._is_slot_taken(key)? { - // key is in map, but can also be in some other location other than the direct hash - (KeyInMap::Yes, prev_hash, Some(prev_item)) => { - let position = &prev_item.meta_data.position; - let to_store = InternalItem { - item, - meta_data: MetaData { - position: *position, - displacement: prev_item.meta_data.displacement, - key: key.to_vec(), - deleted: false, - }, - }; - - self.store(&prev_hash.to_be_bytes(), &to_store)?; - } - (KeyInMap::No, _, None) => { - // Key not in map, hash position not taken - let page = _page_from_position(pos); - let mut indexes = self.as_readonly().get_indexes(page); - //debug_print(format!("*** Got indexes: {:?}", &indexes)); - if !indexes.contains(&hash) { - //debug_print(format!("*** Pushing: {}", &hash)); - indexes.push(hash); - self.store_indexes(page, &indexes)?; - //debug_print(format!("*** stored indexes: {:?}", &indexes)); - } - - let to_store = InternalItem { - item, - meta_data: MetaData { - position: pos, - displacement: 0, - key: key.to_vec(), - deleted: false, - }, - }; - self.store(&hash.to_be_bytes(), &to_store)?; - self.set_length(pos + 1)?; - } - (KeyInMap::Collision, _, None) => { - // Key not in map, hash position is taken - if pos == u32::MAX { - return Err(StdError::generic_err( - "Map is full. How the hell did you get here?", - )); - } - let (displaced_hash, displacement) = - self.as_readonly()._get_next_empty_slot(hash)?; - - let page = _page_from_position(pos); - let mut indexes = self.as_readonly().get_indexes(page); - - indexes.push(displaced_hash); - self.store_indexes(page, &indexes)?; - - let to_store = InternalItem { - item, - meta_data: MetaData { - position: pos, - displacement, - key: key.to_vec(), - deleted: false, - }, - }; - self.store(&displaced_hash.to_be_bytes(), &to_store)?; - self.set_length(pos + 1)?; - } - _ => { - return Err(StdError::generic_err( - "Error checking if slot is taken. This can never happen", - )); - } - } - - Ok(()) - } - - /// user facing method to get T - pub fn get(&self, key: &[u8]) -> Option { - self.as_readonly().get(key) - } - - pub fn paging(&self, start_page: u32, size: u32) -> StdResult> { - self.as_readonly().paging(start_page, size) - } - - pub fn contains(&self, key: &[u8]) -> bool { - self.as_readonly().contains_key(key).is_some() - } - - fn get_position(&self, key: &[u8]) -> Option { - return if let Some(res) = self.as_readonly()._direct_get(key) { - Some(res.meta_data.position) - } else { - None - }; - } - - #[allow(clippy::ptr_arg)] - fn store_indexes(&mut self, index: u32, indexes: &Vec) -> StdResult<()> { - if let Some(prefix) = &self.prefix { - let mut store = PrefixedStorage::new(self.storage, prefix); - store.set( - &[INDEXES, index.to_be_bytes().to_vec().as_slice()].concat(), - &Ser::serialize(indexes)?, - ); - } else { - self.storage.set( - &[INDEXES, index.to_be_bytes().to_vec().as_slice()].concat(), - &Ser::serialize(indexes)?, - ); - } - Ok(()) - } - - // unused - we just set deleted = true - fn remove_from_store(&mut self, key: &[u8]) -> StdResult<()> { - if let Some(prefix) = &self.prefix { - let mut store = PrefixedStorage::new(self.storage, prefix); - store.remove(key) - } else { - self.storage.remove(key) - }; - Ok(()) - } - - fn store(&mut self, key: &[u8], item: &InternalItem) -> StdResult<()> { - if let Some(prefix) = &self.prefix { - let mut store = PrefixedStorage::new(self.storage, prefix); - store.set(key, &Ser::serialize(item)?) - } else { - self.storage.set(key, &Ser::serialize(item)?) - } - - Ok(()) - } - - fn as_readonly(&self) -> ReadOnlyCashMap { - ReadOnlyCashMap { - storage: self.storage, - item_type: self.item_type, - serialization_type: self.serialization_type, - prefix: self.prefix.clone(), - } - } - - fn set_length(&mut self, length: u32) -> StdResult<()> { - if let Some(prefix) = &self.prefix { - let mut store = PrefixedStorage::new(self.storage, prefix); - store.set(MAP_LENGTH, &Ser::serialize(&length.to_be_bytes())?) - } else { - self.storage - .set(MAP_LENGTH, &Ser::serialize(&length.to_be_bytes())?) - } - - Ok(()) - } - - // fn get(&self, key: &[u8]) -> StdResult { - // self.as_readonly().get(key) - // } -} - -/// basically this is used in queries -pub struct ReadOnlyCashMap<'a, T, Ser = Bincode2> -where - T: Serialize + DeserializeOwned, - Ser: Serde, -{ - storage: &'a dyn Storage, - item_type: PhantomData<*const InternalItem>, - serialization_type: PhantomData<*const Ser>, - prefix: Option>, -} - -impl<'a, T> ReadOnlyCashMap<'a, T, Bincode2> -where - T: Serialize + DeserializeOwned, -{ - pub fn init(name: &[u8], storage: &'a dyn Storage) -> Self { - Self::attach_with_serialization(storage, Bincode2, Some(name.to_vec())) - } - - pub fn attach(storage: &'a dyn Storage) -> Self { - Self::attach_with_serialization(storage, Bincode2, None) - } -} - -impl<'a, T, Ser> ReadOnlyCashMap<'a, T, Ser> -where - T: Serialize + DeserializeOwned, - Ser: Serde, -{ - pub fn attach_with_serialization( - storage: &'a dyn Storage, - _serialization: Ser, - prefix: Option>, - ) -> Self { - Self { - storage, - serialization_type: PhantomData, - item_type: PhantomData, - prefix, - } - } - - fn _is_slot_taken(&self, key: &[u8]) -> StdResult<(KeyInMap, u64, Option>)> { - let (in_map, hash) = self._get_slot_and_status(key)?; - - if in_map == KeyInMap::Yes { - if let Ok(item) = self._load_internal(&hash) { - return Ok((in_map, hash, Some(item))); - } - } - - Ok((in_map, hash, None)) - - // (item) = self._get_slot_and_status(key) { - // return if item.meta_data.key == key.to_vec() { - // (KeyInMap::Yes, Some(item)) - // } else { - // (KeyInMap::Collision, Some(item)) - // }; - // } - // (KeyInMap::No, None) - } - - // returns the slot and the displacement - fn _get_next_empty_slot(&self, hash: u64) -> StdResult<(u64, u64)> { - for i in 0..u32::MAX { - let testing_value = hash.overflowing_add(i as u64).0; - let item = self.get_no_hash(&testing_value); - if item.is_none() || item.unwrap().meta_data.deleted { - return Ok((testing_value, i as u64)); - } - } - - Err(StdError::generic_err( - "Failed to get available slot. How did you get here?", - )) - } - - pub fn contains_key(&self, key: &[u8]) -> Option { - let hash = self.key_to_hash(key); - let vec_key = key.to_vec(); - for i in 0..u32::MAX { - let testing_value = hash.overflowing_add(i as u64).0; - let item = self.get_no_hash(&testing_value); - if let Some(val) = item { - if val.meta_data.key == vec_key && !val.meta_data.deleted { - return Some(testing_value); - } - } else { - // empty slot found - so we didn't find the correct item - return None; - } - } - - None - } - - /// user facing method to get T - pub fn get(&self, key: &[u8]) -> Option { - if self.is_empty() { - return None; - } - - if let Some(place) = self.contains_key(key) { - if let Ok(result) = self._direct_load(&place) { - Some(result) - } else { - None - } - } else { - None - } - } - - pub fn is_empty(&self) -> bool { - self.len() == 0 - } - - pub fn len(&self) -> u32 { - let maybe_serialized = if let Some(prefix) = &self.prefix { - let store = ReadonlyPrefixedStorage::new(self.storage, prefix); - store.get(MAP_LENGTH) - } else { - self.storage.get(MAP_LENGTH) - }; - // let maybe_serialized = self.storage.get(&MAP_LENGTH); - let serialized = maybe_serialized.unwrap_or_default(); - u32::from_be(Ser::deserialize(&serialized).unwrap_or_default()) - } - - /// starts from page 0 - /// - /// Will return error if you access out of bounds - pub fn paging(&self, start_page: u32, size: u32) -> StdResult> { - let start_pos = start_page * size; - let mut end_pos = start_pos + size - 1; - - let max_size = self.len(); - - if max_size == 0 { - return Ok(vec![]); - } - - if start_pos > max_size { - return Err(StdError::NotFound { - kind: "Out of bounds".to_string(), - }); - } else if end_pos >= max_size { - end_pos = max_size - 1; - } - - // debug_print(format!( - // "***paging - reading from {} to {}", - // start_pos, end_pos - // )); - - self.get_positions(start_pos, end_pos) - } - - fn get_positions(&self, start: u32, end: u32) -> StdResult> { - let start_page = _page_from_position(start); - let end_page = _page_from_position(end); - - let mut results = vec![]; - - for page in start_page..=end_page { - let start_pos = if page == start_page { - start % PAGE_SIZE - } else { - 0 - }; - - let max_page_pos = min(end, ((page + 1) * PAGE_SIZE) - 1) % PAGE_SIZE; - - let indexes = self.get_indexes(page); - - if max_page_pos as usize > indexes.len() { - return Err(StdError::generic_err("Out of bounds")); - } - - let hashes: Vec = indexes[start_pos as usize..=max_page_pos as usize].to_vec(); - // debug_print(format!( - // "***paging - got hashes of length {}: {:?}", - // &hashes.len(), - // &hashes - // )); - - let res: Vec = hashes - .iter() - .map(|h| self._direct_load(h).unwrap()) - .collect(); - - results.extend(res); - } - - Ok(results) - } - - pub fn get_indexes(&self, index: u32) -> Vec { - let maybe_serialized = if let Some(prefix) = &self.prefix { - let store = ReadonlyPrefixedStorage::new(self.storage, prefix); - store.get(&[INDEXES, index.to_be_bytes().to_vec().as_slice()].concat()) - } else { - self.storage - .get(&[INDEXES, index.to_be_bytes().to_vec().as_slice()].concat()) - }; - let serialized = maybe_serialized.unwrap_or_default(); - Ser::deserialize(&serialized).unwrap_or_default() - } - - fn _direct_load(&self, hash: &u64) -> StdResult { - let int_item = self._load_internal(hash)?; - Ok(int_item.item) - } - - fn _get_slot_and_status(&self, key: &[u8]) -> StdResult<(KeyInMap, u64)> { - let hash = self.key_to_hash(key); - if let Some(place) = self.contains_key(key) { - Ok((KeyInMap::Yes, place)) - } else { - let (next_slot, _) = self._get_next_empty_slot(hash)?; - - if next_slot == hash { - return Ok((KeyInMap::No, next_slot)); - } - - Ok((KeyInMap::Collision, next_slot)) - } - } - - /// get InternalItem and not just T - fn _direct_get(&self, key: &[u8]) -> Option> { - if let Some(place) = self.contains_key(key) { - if let Ok(result) = self._load_internal(&place) { - Some(result) - } else { - None - } - } else { - None - } - } - - fn _load_internal(&self, hash: &u64) -> StdResult> { - let int_item = self._prefix_load(hash)?; - Ok(int_item) - } - - pub fn load(&self, key: &[u8]) -> StdResult { - let hash = self.key_to_hash(key); - - let int_item = self._prefix_load(&hash)?; - Ok(int_item.item) - } - - fn _prefix_load(&self, hash: &u64) -> StdResult> { - let serialized = if let Some(prefix) = &self.prefix { - let store = ReadonlyPrefixedStorage::new(self.storage, prefix); - store.get(&hash.to_be_bytes()) - } else { - self.storage.get(&hash.to_be_bytes()) - } - .ok_or_else(|| StdError::not_found(type_name::()))?; - - let int_item: InternalItem = Ser::deserialize(&serialized)?; - Ok(int_item) - } - - fn get_no_hash(&self, hash: &u64) -> Option> { - if let Ok(result) = self._load_internal(hash) { - Some(result) - } else { - None - } - } - - fn key_to_hash(&self, key: &[u8]) -> u64 { - let mut hasher = DefaultHasher::default(); - key.hash(&mut hasher); - hasher.finish() - } - - pub fn iter(&self) -> Iter<'a, T, Ser> { - Iter { - storage: Self::clone(self), - start: 0, - end: self.len(), - } - } -} - -/// An iterator over the contents of the append store. -pub struct Iter<'a, T, Ser> -where - T: Serialize + DeserializeOwned, - Ser: Serde, -{ - storage: ReadOnlyCashMap<'a, T, Ser>, - start: u32, - end: u32, -} - -impl<'a, T, Ser> Iterator for Iter<'a, T, Ser> -where - T: Serialize + DeserializeOwned, - Ser: Serde, -{ - type Item = T; - - fn next(&mut self) -> Option { - if self.start >= self.end { - return None; - } - let item = self.storage.get_positions(self.start, self.start); - self.start += 1; - if let Ok(mut inner) = item { - Some(inner.pop().unwrap()) - } else { - None - } - } - - // This needs to be implemented correctly for `ExactSizeIterator` to work. - fn size_hint(&self) -> (usize, Option) { - let len = (self.end - self.start) as usize; - (len, Some(len)) - } - - // I implement `nth` manually because it is used in the standard library whenever - // it wants to skip over elements, but the default implementation repeatedly calls next. - // because that is very expensive in this case, and the items are just discarded, we wan - // do better here. - // In practice, this enables cheap paging over the storage by calling: - // `append_store.iter().skip(start).take(length).collect()` - fn nth(&mut self, n: usize) -> Option { - self.start = self.start.saturating_add(n as u32); - self.next() - } -} - -impl<'a, T, Ser> IntoIterator for ReadOnlyCashMap<'a, T, Ser> -where - T: Serialize + DeserializeOwned, - Ser: Serde, -{ - type Item = T; - type IntoIter = Iter<'a, T, Ser>; - - fn into_iter(self) -> Iter<'a, T, Ser> { - let end = self.len(); - Iter { - storage: self, - start: 0, - end, - } - } -} - -// Manual `Clone` implementation because the default one tries to clone the Storage?? -impl<'a, T, Ser> Clone for ReadOnlyCashMap<'a, T, Ser> -where - T: Serialize + DeserializeOwned, - Ser: Serde, -{ - fn clone(&self) -> Self { - Self { - storage: self.storage, - item_type: self.item_type, - serialization_type: self.serialization_type, - prefix: self.prefix.clone(), - } - } -} - -#[cfg(test)] -mod tests { - use serde::{Deserialize, Serialize}; - - use cosmwasm_std::testing::MockStorage; - - use secret_toolkit_serialization::Json; - - use super::*; - - #[derive(Serialize, Deserialize, Eq, PartialEq, Debug, Clone)] - struct Foo { - string: String, - number: i32, - } - #[test] - fn test_hashmap_perf_insert() -> StdResult<()> { - let mut storage = MockStorage::new(); - - let total_items = 1000; - - let mut cashmap = CashMap::attach(&mut storage); - - for i in 0..total_items { - cashmap.insert(&(i as i32).to_be_bytes(), i)?; - } - - assert_eq!(cashmap.len(), 1000); - - Ok(()) - } - - #[test] - fn test_hashmap_perf_insert_remove() -> StdResult<()> { - let mut storage = MockStorage::new(); - - let total_items = 100; - - let mut cashmap = CashMap::attach(&mut storage); - - for i in 0..total_items { - cashmap.insert(&(i as i32).to_be_bytes(), i)?; - } - - for i in 0..total_items { - cashmap.remove(&(i as i32).to_be_bytes())?; - } - - assert_eq!(cashmap.len(), 0); - - Ok(()) - } - - #[test] - fn test_hashmap_paging() -> StdResult<()> { - let mut storage = MockStorage::new(); - - let page_size = 50; - let total_items = 50; - let mut cashmap = CashMap::attach(&mut storage); - - for i in 0..total_items { - cashmap.insert(&(i as i32).to_be_bytes(), i)?; - } - - for i in 0..((total_items / page_size) - 1) { - let start_page = i; - - let values = cashmap.paging(start_page, page_size)?; - - for (index, value) in values.iter().enumerate() { - assert_eq!(value, &(page_size * start_page + index as u32)) - } - } - - Ok(()) - } - - #[test] - fn test_hashmap_paging_prefixed() -> StdResult<()> { - let mut storage = MockStorage::new(); - let mut prefixed = PrefixedStorage::new(&mut storage, b"test"); - let mut cashmap = CashMap::init(b"yo", &mut prefixed); - - let page_size = 50; - let total_items = 50; - //let mut cashmap = CashMap::attach(&mut storage); - - for i in 0..total_items { - cashmap.insert(&(i as i32).to_be_bytes(), i)?; - } - - for i in 0..((total_items / page_size) - 1) { - let start_page = i; - - let values = cashmap.paging(start_page, page_size)?; - - for (index, value) in values.iter().enumerate() { - assert_eq!(value, &(page_size * start_page + index as u32)) - } - } - - Ok(()) - } - - #[test] - fn test_hashmap_paging_overflow() -> StdResult<()> { - let mut storage = MockStorage::new(); - - let page_size = 50; - let total_items = 10; - let mut cashmap = CashMap::attach(&mut storage); - - for i in 0..total_items { - cashmap.insert(&(i as i32).to_be_bytes(), i)?; - } - - let values = cashmap.paging(0, page_size)?; - - assert_eq!(values.len(), total_items as usize); - - for (index, value) in values.iter().enumerate() { - assert_eq!(value, &(index as u32)) - } - - Ok(()) - } - - #[test] - fn test_hashmap_insert_multiple() -> StdResult<()> { - let mut storage = MockStorage::new(); - - let mut typed_store_mut = CashMap::attach(&mut storage); - let foo1 = Foo { - string: "string one".to_string(), - number: 1111, - }; - let foo2 = Foo { - string: "string two".to_string(), - number: 1111, - }; - - typed_store_mut.insert(b"key1", foo1.clone())?; - typed_store_mut.insert(b"key2", foo2.clone())?; - - let read_foo1 = typed_store_mut.get(b"key1").unwrap(); - let read_foo2 = typed_store_mut.get(b"key2").unwrap(); - - assert_eq!(foo1, read_foo1); - assert_eq!(foo2, read_foo2); - Ok(()) - } - - #[test] - fn test_hashmap_insert_get() -> StdResult<()> { - let mut storage = MockStorage::new(); - - let mut typed_store_mut = CashMap::attach(&mut storage); - let foo1 = Foo { - string: "string one".to_string(), - number: 1111, - }; - - typed_store_mut.insert(b"key1", foo1.clone())?; - let read_foo1 = typed_store_mut.get(b"key1").unwrap(); - assert_eq!(foo1, read_foo1); - - Ok(()) - } - - #[test] - fn test_hashmap_insert_contains() -> StdResult<()> { - let mut storage = MockStorage::new(); - - let mut typed_store_mut = CashMap::attach(&mut storage); - let foo1 = Foo { - string: "string one".to_string(), - number: 1111, - }; - - typed_store_mut.insert(b"key1", foo1.clone())?; - let contains_k1 = typed_store_mut.contains(b"key1"); - - assert_eq!(contains_k1, true); - - Ok(()) - } - - #[test] - fn test_hashmap_insert_remove() -> StdResult<()> { - let mut storage = MockStorage::new(); - - let mut typed_store_mut = CashMap::attach(&mut storage); - let foo1 = Foo { - string: "string one".to_string(), - number: 1111, - }; - - typed_store_mut.insert(b"key1", foo1.clone())?; - let before_remove_foo1 = typed_store_mut.get(b"key1"); - - assert!(before_remove_foo1.is_some()); - assert_eq!(foo1, before_remove_foo1.unwrap()); - - typed_store_mut.remove(b"key1")?; - - let result = typed_store_mut.get(b"key1"); - assert!(result.is_none()); - - Ok(()) - } - - #[test] - fn test_hashmap_iter() -> StdResult<()> { - let mut storage = MockStorage::new(); - - let mut hashmap = CashMap::attach(&mut storage); - let foo1 = Foo { - string: "string one".to_string(), - number: 1111, - }; - let foo2 = Foo { - string: "string two".to_string(), - number: 1111, - }; - - hashmap.insert(b"key1", foo1.clone())?; - hashmap.insert(b"key2", foo2.clone())?; - - let mut x = hashmap.as_readonly().iter(); - let (len, _) = x.size_hint(); - assert_eq!(len, 2); - - assert_eq!(x.next().unwrap(), foo1); - - assert_eq!(x.next().unwrap(), foo2); - - Ok(()) - } - - #[test] - fn test_hashmap_overwrite() -> StdResult<()> { - let mut storage = MockStorage::new(); - - let mut hashmap = CashMap::attach(&mut storage); - let foo1 = Foo { - string: "string one".to_string(), - number: 1111, - }; - let foo2 = Foo { - string: "string two".to_string(), - number: 2222, - }; - - hashmap.insert(b"key1", foo1.clone())?; - hashmap.insert(b"key1", foo2.clone())?; - - let foo3 = hashmap.get(b"key1").unwrap(); - - assert_eq!(foo3, foo2); - - Ok(()) - } - - #[test] - fn test_hashmap_overwrite_prefixed() -> StdResult<()> { - let mut storage = MockStorage::new(); - let mut prefixed = PrefixedStorage::new(&mut storage, b"test"); - let mut hashmap = CashMap::init(b"yo", &mut prefixed); - - let foo1 = Foo { - string: "string one".to_string(), - number: 1111, - }; - let foo2 = Foo { - string: "string two".to_string(), - number: 2222, - }; - - hashmap.insert(b"key1", foo1.clone())?; - hashmap.insert(b"key1", foo2.clone())?; - - let foo3 = hashmap.get(b"key1").unwrap(); - - assert_eq!(hashmap.len(), 1); - assert_eq!(foo3, foo2); - - Ok(()) - } - - #[test] - fn test_cashmap_basics() -> StdResult<()> { - let mut storage = MockStorage::new(); - - let mut typed_store_mut = CashMap::attach(&mut storage); - let foo1 = Foo { - string: "string one".to_string(), - number: 1111, - }; - let foo2 = Foo { - string: "string one".to_string(), - number: 1111, - }; - typed_store_mut.insert(b"key1", foo1.clone())?; - typed_store_mut.insert(b"key2", foo2.clone())?; - - let read_foo1 = typed_store_mut.get(b"key1").unwrap(); - let read_foo2 = typed_store_mut.get(b"key2").unwrap(); - - assert_eq!(foo1, read_foo1); - assert_eq!(foo2, read_foo2); - - // show that it loads foo1 before removal - let before_remove_foo1 = typed_store_mut.get(b"key1"); - assert!(before_remove_foo1.is_some()); - assert_eq!(foo1, before_remove_foo1.unwrap()); - // and returns None after removal - typed_store_mut.remove(b"key1")?; - let removed_foo1 = typed_store_mut.get(b"key1"); - assert!(removed_foo1.is_none()); - - // show what happens when reading from keys that have not been set yet. - assert!(typed_store_mut.get(b"key3").is_none()); - - // Try to load it with the wrong format - let typed_store = - ReadOnlyCashMap::::attach_with_serialization(&storage, Json, None); - match typed_store.load(b"key2") { - Err(StdError::ParseErr { - target_type, msg, .. - }) if target_type == "secret_toolkit_incubator::cashmap::InternalItem" - && msg == "Invalid type" => {} - other => panic!("unexpected value: {:?}", other), - } - - Ok(()) - } - - #[test] - fn test_cashmap_basics_prefixed() -> StdResult<()> { - let mut storage = MockStorage::new(); - //let mut prefixed = PrefixedStorage::new(b"test", &mut storage); - let mut cmap = CashMap::init(b"yo", &mut storage); - - let foo1 = Foo { - string: "string one".to_string(), - number: 1111, - }; - let foo2 = Foo { - string: "string one".to_string(), - number: 1111, - }; - cmap.insert(b"key1", foo1.clone())?; - cmap.insert(b"key2", foo2.clone())?; - - let read_foo1 = cmap.get(b"key1").unwrap(); - let read_foo2 = cmap.get(b"key2").unwrap(); - - assert_eq!(foo1, read_foo1); - assert_eq!(foo2, read_foo2); - - // show that it loads foo1 before removal - let before_remove_foo1 = cmap.get(b"key1"); - assert!(before_remove_foo1.is_some()); - assert_eq!(foo1, before_remove_foo1.unwrap()); - // and returns None after removal - cmap.remove(b"key1")?; - let removed_foo1 = cmap.get(b"key1"); - assert!(removed_foo1.is_none()); - - // show what happens when reading from keys that have not been set yet. - assert!(cmap.get(b"key3").is_none()); - - // Try to load it with the wrong format - let typed_store = ReadOnlyCashMap::::attach_with_serialization( - &storage, - Json, - Some(b"yo".to_vec()), - ); - match typed_store.load(b"key2") { - Err(StdError::ParseErr { - target_type, msg, .. - }) if target_type == "secret_toolkit_incubator::cashmap::InternalItem" - && msg == "Invalid type" => {} - other => panic!("unexpected value: {:?}", other), - } - - Ok(()) - } - - #[test] - fn test_cashmap_length() -> StdResult<()> { - let mut storage = MockStorage::new(); - - let mut cmap = CashMap::attach(&mut storage); - let foo1 = Foo { - string: "string one".to_string(), - number: 1111, - }; - let foo2 = Foo { - string: "string one".to_string(), - number: 1111, - }; - - assert_eq!(cmap.len(), 0); - - cmap.insert(b"k1", foo1.clone())?; - assert_eq!(cmap.len(), 1); - - // add another item - cmap.insert(b"k2", foo2.clone())?; - assert_eq!(cmap.len(), 2); - - // remove item and check length - cmap.remove(b"k1")?; - assert_eq!(cmap.len(), 1); - - // override item (should not change length) - cmap.insert(b"k2", foo1)?; - assert_eq!(cmap.len(), 1); - - // remove item and check length - cmap.remove(b"k2")?; - assert_eq!(cmap.len(), 0); - - Ok(()) - } - - #[test] - fn test_cashmap_length_prefixed() -> StdResult<()> { - let mut storage = MockStorage::new(); - let mut prefixed = PrefixedStorage::new(&mut storage, b"test"); - let mut cmap = CashMap::init(b"yo", &mut prefixed); - - let foo1 = Foo { - string: "string one".to_string(), - number: 1111, - }; - let foo2 = Foo { - string: "string one".to_string(), - number: 1111, - }; - - assert_eq!(cmap.len(), 0); - - cmap.insert(b"k1", foo1.clone())?; - assert_eq!(cmap.len(), 1); - - // add another item - cmap.insert(b"k2", foo2.clone())?; - assert_eq!(cmap.len(), 2); - - // remove item and check length - cmap.remove(b"k1")?; - assert_eq!(cmap.len(), 1); - - // override item (should not change length) - cmap.insert(b"k2", foo1)?; - assert_eq!(cmap.len(), 1); - - // remove item and check length - cmap.remove(b"k2")?; - assert_eq!(cmap.len(), 0); - - Ok(()) - } -} diff --git a/packages/incubator/src/lib.rs b/packages/incubator/src/lib.rs index 806637d..a5ad360 100644 --- a/packages/incubator/src/lib.rs +++ b/packages/incubator/src/lib.rs @@ -1,8 +1,3 @@ -#[cfg(feature = "cashmap")] -pub mod cashmap; -#[cfg(feature = "cashmap")] -pub use cashmap::{CashMap, ReadOnlyCashMap}; - #[cfg(feature = "generational-store")] pub mod generational_store; #[cfg(feature = "generational-store")] diff --git a/packages/storage/Cargo.toml b/packages/storage/Cargo.toml index 0461873..50c64d4 100644 --- a/packages/storage/Cargo.toml +++ b/packages/storage/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "secret-toolkit-storage" -version = "0.3.0" +version = "0.4.1" edition = "2018" authors = ["SCRT Labs "] license-file = "../../LICENSE" diff --git a/packages/storage/src/keymap.rs b/packages/storage/src/keymap.rs index 5a2a3bc..ce33387 100644 --- a/packages/storage/src/keymap.rs +++ b/packages/storage/src/keymap.rs @@ -25,7 +25,7 @@ where T: Serialize + DeserializeOwned, Ser: Serde, { - item: Vec, + item_vec: Vec, index_pos: u32, item_type: PhantomData, serialization_type: PhantomData, @@ -34,14 +34,14 @@ where impl InternalItem { fn new(index_pos: u32, item: &T) -> StdResult { Ok(Self { - item: Ser::serialize(item)?, + item_vec: Ser::serialize(item)?, index_pos, item_type: PhantomData, serialization_type: PhantomData, }) } fn get_item(&self) -> StdResult { - Ser::deserialize(&self.item) + Ser::deserialize(&self.item_vec) } } diff --git a/packages/toolkit/Cargo.toml b/packages/toolkit/Cargo.toml index 54f00ad..837cefe 100644 --- a/packages/toolkit/Cargo.toml +++ b/packages/toolkit/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "secret-toolkit" -version = "0.3.0" +version = "0.4.0" edition = "2018" authors = ["SCRT Labs "] license-file = "../../LICENSE" @@ -28,11 +28,11 @@ viewing-key = ["secret-toolkit-viewing-key"] [dependencies] secret-toolkit-crypto = { version = "0.3.0", path = "../crypto", optional = true } -secret-toolkit-incubator = { version = "0.3.0", path = "../incubator", optional = true } +secret-toolkit-incubator = { version = "0.4.0", path = "../incubator", optional = true } secret-toolkit-permit = { version = "0.3.0", path = "../permit", optional = true } secret-toolkit-serialization = { version = "0.3", path = "../serialization", optional = true } secret-toolkit-snip20 = { version = "0.3", path = "../snip20", optional = true } secret-toolkit-snip721 = { version = "0.3", path = "../snip721", optional = true } -secret-toolkit-storage = { version = "0.3", path = "../storage", optional = true } +secret-toolkit-storage = { version = "0.4.1", path = "../storage", optional = true } secret-toolkit-utils = { version = "0.3", path = "../utils", optional = true } secret-toolkit-viewing-key = { version = "0.3", path = "../viewing_key", optional = true } From 7325f5b381c9751fc88a961b4d4a0c6666cffa2c Mon Sep 17 00:00:00 2001 From: srdtrk Date: Mon, 22 Aug 2022 12:23:48 +0300 Subject: [PATCH 06/12] changing the storage version in the toolkit package since the default behaviour is to import the highest compatible version --- packages/toolkit/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/toolkit/Cargo.toml b/packages/toolkit/Cargo.toml index 837cefe..ae51848 100644 --- a/packages/toolkit/Cargo.toml +++ b/packages/toolkit/Cargo.toml @@ -33,6 +33,6 @@ secret-toolkit-permit = { version = "0.3.0", path = "../permit", optional = true secret-toolkit-serialization = { version = "0.3", path = "../serialization", optional = true } secret-toolkit-snip20 = { version = "0.3", path = "../snip20", optional = true } secret-toolkit-snip721 = { version = "0.3", path = "../snip721", optional = true } -secret-toolkit-storage = { version = "0.4.1", path = "../storage", optional = true } +secret-toolkit-storage = { version = "0.4", path = "../storage", optional = true } secret-toolkit-utils = { version = "0.3", path = "../utils", optional = true } secret-toolkit-viewing-key = { version = "0.3", path = "../viewing_key", optional = true } From c86c906cfad2123c9a1b4a785f419c6741c1548a Mon Sep 17 00:00:00 2001 From: srdtrk Date: Thu, 25 Aug 2022 12:53:31 +0300 Subject: [PATCH 07/12] minor fix to readme --- packages/storage/Readme.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/storage/Readme.md b/packages/storage/Readme.md index 3d784cd..aea5af9 100644 --- a/packages/storage/Readme.md +++ b/packages/storage/Readme.md @@ -178,7 +178,7 @@ use secret_toolkit::storage::{Keymap} ```ignore pub static ADDR_VOTE: Keymap = Keymap::new(b"vote"); -pub static BET_STORE: Keymap = Keymap::new(b"vote"); +pub static BET_STORE: Keymap = Keymap::new(b"bet"); ``` > ❗ Initializing the object as const instead of static will also work but be less efficient since the variable won't be able to cache length data. From 54151e48f9475e0aea1df2c234328b9664d063d1 Mon Sep 17 00:00:00 2001 From: srdtrk Date: Sat, 27 Aug 2022 20:36:04 +0300 Subject: [PATCH 08/12] fixed an error that would prevent the storage from working in a contract --- packages/storage/src/append_store.rs | 49 ++++++------- packages/storage/src/deque_store.rs | 70 ++++++++---------- packages/storage/src/item.rs | 20 +++--- packages/storage/src/keymap.rs | 104 +++++++++++++-------------- 4 files changed, 115 insertions(+), 128 deletions(-) diff --git a/packages/storage/src/append_store.rs b/packages/storage/src/append_store.rs index a4a7ef0..45f138f 100644 --- a/packages/storage/src/append_store.rs +++ b/packages/storage/src/append_store.rs @@ -62,7 +62,7 @@ impl<'a, T: Serialize + DeserializeOwned, Ser: Serde> AppendStore<'a, T, Ser> { impl<'a, T: Serialize + DeserializeOwned, Ser: Serde> AppendStore<'a, T, Ser> { /// gets the length from storage, and otherwise sets it to 0 - pub fn get_len(&self, storage: &S) -> StdResult { + pub fn get_len(&self, storage: &dyn Storage) -> StdResult { let mut may_len = self.length.lock().unwrap(); match *may_len { Some(len) => Ok(len), @@ -84,11 +84,11 @@ impl<'a, T: Serialize + DeserializeOwned, Ser: Serde> AppendStore<'a, T, Ser> { } } /// checks if the collection has any elements - pub fn is_empty(&self, storage: &S) -> StdResult { + pub fn is_empty(&self, storage: &dyn Storage) -> StdResult { Ok(self.get_len(storage)? == 0) } /// gets the element at pos if within bounds - pub fn get_at(&self, storage: &S, pos: u32) -> StdResult { + pub fn get_at(&self, storage: &dyn Storage, pos: u32) -> StdResult { let len = self.get_len(storage)?; if pos > len { return Err(StdError::generic_err("AppendStore access out of bounds")); @@ -96,13 +96,13 @@ impl<'a, T: Serialize + DeserializeOwned, Ser: Serde> AppendStore<'a, T, Ser> { self.get_at_unchecked(storage, pos) } /// tries to get the element at pos - fn get_at_unchecked(&self, storage: &S, pos: u32) -> StdResult { + fn get_at_unchecked(&self, storage: &dyn Storage, pos: u32) -> StdResult { let key = pos.to_be_bytes(); self.load_impl(storage, &key) } /// Set the length of the collection - fn set_len(&self, storage: &mut S, len: u32) { + fn set_len(&self, storage: &mut dyn Storage, len: u32) { let len_key = [self.as_slice(), LEN_KEY].concat(); storage.set(&len_key, &len.to_be_bytes()); @@ -110,11 +110,11 @@ impl<'a, T: Serialize + DeserializeOwned, Ser: Serde> AppendStore<'a, T, Ser> { *may_len = Some(len); } /// Clear the collection - pub fn clear(&self, storage: &mut S) { + pub fn clear(&self, storage: &mut dyn Storage) { self.set_len(storage, 0); } /// Replaces data at a position within bounds - pub fn set_at(&self, storage: &mut S, pos: u32, item: &T) -> StdResult<()> { + pub fn set_at(&self, storage: &mut dyn Storage, pos: u32, item: &T) -> StdResult<()> { let len = self.get_len(storage)?; if pos >= len { return Err(StdError::generic_err("AppendStore access out of bounds")); @@ -122,18 +122,18 @@ impl<'a, T: Serialize + DeserializeOwned, Ser: Serde> AppendStore<'a, T, Ser> { self.set_at_unchecked(storage, pos, item) } /// Sets data at a given index - fn set_at_unchecked(&self, storage: &mut S, pos: u32, item: &T) -> StdResult<()> { + fn set_at_unchecked(&self, storage: &mut dyn Storage, pos: u32, item: &T) -> StdResult<()> { self.save_impl(storage, &pos.to_be_bytes(), item) } /// Pushes an item to AppendStorage - pub fn push(&self, storage: &mut S, item: &T) -> StdResult<()> { + pub fn push(&self, storage: &mut dyn Storage, item: &T) -> StdResult<()> { let len = self.get_len(storage)?; self.set_at_unchecked(storage, len, item)?; self.set_len(storage, len + 1); Ok(()) } /// Pops an item from AppendStore - pub fn pop(&self, storage: &mut S) -> StdResult { + pub fn pop(&self, storage: &mut dyn Storage) -> StdResult { if let Some(len) = self.get_len(storage)?.checked_sub(1) { let item = self.get_at_unchecked(storage, len); self.set_len(storage, len); @@ -150,7 +150,7 @@ impl<'a, T: Serialize + DeserializeOwned, Ser: Serde> AppendStore<'a, T, Ser> { /// /// Removing an element from the start (head) of the collection /// has the worst runtime and gas cost. - pub fn remove(&self, storage: &mut S, pos: u32) -> StdResult { + pub fn remove(&self, storage: &mut dyn Storage, pos: u32) -> StdResult { let len = self.get_len(storage)?; if pos >= len { @@ -166,13 +166,13 @@ impl<'a, T: Serialize + DeserializeOwned, Ser: Serde> AppendStore<'a, T, Ser> { item } /// Returns a readonly iterator - pub fn iter(&self, storage: &'a S) -> StdResult> { + pub fn iter(&self, storage: &'a dyn Storage) -> StdResult> { let len = self.get_len(storage)?; let iter = AppendStoreIter::new(self, storage, 0, len); Ok(iter) } /// does paging with the given parameters - pub fn paging(&self, storage: &S, start_page: u32, size: u32) -> StdResult> { + pub fn paging(&self, storage: &dyn Storage, start_page: u32, size: u32) -> StdResult> { self.iter(storage)? .skip((start_page as usize) * (size as usize)) .take(size as usize) @@ -208,7 +208,7 @@ impl<'a, T: Serialize + DeserializeOwned, Ser: Serde> AppendStore<'a, T, Ser> { /// /// * `storage` - a reference to the storage this item is in /// * `key` - a byte slice representing the key to access the stored item - fn load_impl(&self, storage: &S, key: &[u8]) -> StdResult { + fn load_impl(&self, storage: &dyn Storage, key: &[u8]) -> StdResult { let prefixed_key = [self.as_slice(), key].concat(); Ser::deserialize( &storage @@ -224,7 +224,7 @@ impl<'a, T: Serialize + DeserializeOwned, Ser: Serde> AppendStore<'a, T, Ser> { /// * `storage` - a mutable reference to the storage this item should go to /// * `key` - a byte slice representing the key to access the stored item /// * `value` - a reference to the item to store - fn save_impl(&self, storage: &mut S, key: &[u8], value: &T) -> StdResult<()> { + fn save_impl(&self, storage: &mut dyn Storage, key: &[u8], value: &T) -> StdResult<()> { let prefixed_key = [self.as_slice(), key].concat(); storage.set(&prefixed_key, &Ser::serialize(value)?); Ok(()) @@ -232,28 +232,26 @@ impl<'a, T: Serialize + DeserializeOwned, Ser: Serde> AppendStore<'a, T, Ser> { } /// An iterator over the contents of the append store. -pub struct AppendStoreIter<'a, T, S, Ser> +pub struct AppendStoreIter<'a, T, Ser> where T: Serialize + DeserializeOwned, - S: Storage, Ser: Serde, { append_store: &'a AppendStore<'a, T, Ser>, - storage: &'a S, + storage: &'a dyn Storage, start: u32, end: u32, } -impl<'a, T, S, Ser> AppendStoreIter<'a, T, S, Ser> +impl<'a, T, Ser> AppendStoreIter<'a, T, Ser> where T: Serialize + DeserializeOwned, - S: Storage, Ser: Serde, { /// constructor pub fn new( append_store: &'a AppendStore<'a, T, Ser>, - storage: &'a S, + storage: &'a dyn Storage, start: u32, end: u32, ) -> Self { @@ -266,10 +264,9 @@ where } } -impl<'a, T, S, Ser> Iterator for AppendStoreIter<'a, T, S, Ser> +impl<'a, T, Ser> Iterator for AppendStoreIter<'a, T, Ser> where T: Serialize + DeserializeOwned, - S: Storage, Ser: Serde, { type Item = StdResult; @@ -301,10 +298,9 @@ where } } -impl<'a, T, S, Ser> DoubleEndedIterator for AppendStoreIter<'a, T, S, Ser> +impl<'a, T, Ser> DoubleEndedIterator for AppendStoreIter<'a, T, Ser> where T: Serialize + DeserializeOwned, - S: Storage, Ser: Serde, { fn next_back(&mut self) -> Option { @@ -329,10 +325,9 @@ where } // This enables writing `append_store.iter().skip(n).rev()` -impl<'a, T, S, Ser> ExactSizeIterator for AppendStoreIter<'a, T, S, Ser> +impl<'a, T, Ser> ExactSizeIterator for AppendStoreIter<'a, T, Ser> where T: Serialize + DeserializeOwned, - S: Storage, Ser: Serde, { } diff --git a/packages/storage/src/deque_store.rs b/packages/storage/src/deque_store.rs index cb7ea48..812b433 100644 --- a/packages/storage/src/deque_store.rs +++ b/packages/storage/src/deque_store.rs @@ -67,7 +67,7 @@ impl<'a, T: Serialize + DeserializeOwned, Ser: Serde> DequeStore<'a, T, Ser> { impl<'a, T: Serialize + DeserializeOwned, Ser: Serde> DequeStore<'a, T, Ser> { /// gets the length from storage, and otherwise sets it to 0 - pub fn get_len(&self, storage: &S) -> StdResult { + pub fn get_len(&self, storage: &dyn Storage) -> StdResult { let mut may_len = self.length.lock().unwrap(); match *may_len { Some(len) => Ok(len), @@ -81,7 +81,7 @@ impl<'a, T: Serialize + DeserializeOwned, Ser: Serde> DequeStore<'a, T, Ser> { } } /// gets the offset from storage, and otherwise sets it to 0 - pub fn get_off(&self, storage: &S) -> StdResult { + pub fn get_off(&self, storage: &dyn Storage) -> StdResult { let mut may_off = self.offset.lock().unwrap(); match *may_off { Some(len) => Ok(len), @@ -95,7 +95,7 @@ impl<'a, T: Serialize + DeserializeOwned, Ser: Serde> DequeStore<'a, T, Ser> { } } /// gets offset or length - fn _get_u32(&self, storage: &S, key: &[u8]) -> StdResult { + fn _get_u32(&self, storage: &dyn Storage, key: &[u8]) -> StdResult { let num_key = [self.as_slice(), key].concat(); if let Some(num_vec) = storage.get(&num_key) { let num_bytes = num_vec @@ -109,11 +109,11 @@ impl<'a, T: Serialize + DeserializeOwned, Ser: Serde> DequeStore<'a, T, Ser> { } } /// checks if the collection has any elements - pub fn is_empty(&self, storage: &S) -> StdResult { + pub fn is_empty(&self, storage: &dyn Storage) -> StdResult { Ok(self.get_len(storage)? == 0) } /// gets the element at pos if within bounds - pub fn get_at(&self, storage: &S, pos: u32) -> StdResult { + pub fn get_at(&self, storage: &dyn Storage, pos: u32) -> StdResult { let len = self.get_len(storage)?; if pos >= len { return Err(StdError::generic_err("DequeStore access out of bounds")); @@ -121,38 +121,38 @@ impl<'a, T: Serialize + DeserializeOwned, Ser: Serde> DequeStore<'a, T, Ser> { self.get_at_unchecked(storage, pos) } /// tries to get the element at pos - fn get_at_unchecked(&self, storage: &S, pos: u32) -> StdResult { + fn get_at_unchecked(&self, storage: &dyn Storage, pos: u32) -> StdResult { self.load_impl(storage, &self._get_offset_pos(storage, pos)?.to_be_bytes()) } /// add the offset to the pos - fn _get_offset_pos(&self, storage: &S, pos: u32) -> StdResult { + fn _get_offset_pos(&self, storage: &dyn Storage, pos: u32) -> StdResult { let off = self.get_off(storage)?; Ok(pos.overflowing_add(off).0) } /// Set the length of the collection - fn set_len(&self, storage: &mut S, len: u32) { + fn set_len(&self, storage: &mut dyn Storage, len: u32) { let mut may_len = self.length.lock().unwrap(); *may_len = Some(len); self._set_u32(storage, LEN_KEY, len) } /// Set the offset of the collection - fn set_off(&self, storage: &mut S, off: u32) { + fn set_off(&self, storage: &mut dyn Storage, off: u32) { let mut may_off = self.offset.lock().unwrap(); *may_off = Some(off); self._set_u32(storage, OFFSET_KEY, off) } /// Set the length or offset of the collection - fn _set_u32(&self, storage: &mut S, key: &[u8], num: u32) { + fn _set_u32(&self, storage: &mut dyn Storage, key: &[u8], num: u32) { let num_key = [self.as_slice(), key].concat(); storage.set(&num_key, &num.to_be_bytes()); } /// Clear the collection - pub fn clear(&self, storage: &mut S) { + pub fn clear(&self, storage: &mut dyn Storage) { self.set_len(storage, 0); self.set_off(storage, 0); } /// Replaces data at a position within bounds - pub fn set_at(&self, storage: &mut S, pos: u32, item: &T) -> StdResult<()> { + pub fn set_at(&self, storage: &mut dyn Storage, pos: u32, item: &T) -> StdResult<()> { let len = self.get_len(storage)?; if pos >= len { return Err(StdError::generic_err("DequeStore access out of bounds")); @@ -160,22 +160,19 @@ impl<'a, T: Serialize + DeserializeOwned, Ser: Serde> DequeStore<'a, T, Ser> { self.set_at_unchecked(storage, pos, item) } /// Sets data at a given index - fn set_at_unchecked(&self, storage: &mut S, pos: u32, item: &T) -> StdResult<()> { - self.save_impl( - storage, - &self._get_offset_pos(storage, pos)?.to_be_bytes(), - item, - ) + fn set_at_unchecked(&self, storage: &mut dyn Storage, pos: u32, item: &T) -> StdResult<()> { + let get_offset_pos = self._get_offset_pos(storage, pos)?; + self.save_impl(storage, &get_offset_pos.to_be_bytes(), item) } /// Pushes an item to the back - pub fn push_back(&self, storage: &mut S, item: &T) -> StdResult<()> { + pub fn push_back(&self, storage: &mut dyn Storage, item: &T) -> StdResult<()> { let len = self.get_len(storage)?; self.set_at_unchecked(storage, len, item)?; self.set_len(storage, len + 1); Ok(()) } /// Pushes an item to the front - pub fn push_front(&self, storage: &mut S, item: &T) -> StdResult<()> { + pub fn push_front(&self, storage: &mut dyn Storage, item: &T) -> StdResult<()> { let off = self.get_off(storage)?; let len = self.get_len(storage)?; self.set_off(storage, off.overflowing_sub(1).0); @@ -184,7 +181,7 @@ impl<'a, T: Serialize + DeserializeOwned, Ser: Serde> DequeStore<'a, T, Ser> { Ok(()) } /// Pops an item from the back - pub fn pop_back(&self, storage: &mut S) -> StdResult { + pub fn pop_back(&self, storage: &mut dyn Storage) -> StdResult { if let Some(len) = self.get_len(storage)?.checked_sub(1) { let item = self.get_at_unchecked(storage, len); self.set_len(storage, len); @@ -194,7 +191,7 @@ impl<'a, T: Serialize + DeserializeOwned, Ser: Serde> DequeStore<'a, T, Ser> { } } /// Pops an item from the front - pub fn pop_front(&self, storage: &mut S) -> StdResult { + pub fn pop_front(&self, storage: &mut dyn Storage) -> StdResult { if let Some(len) = self.get_len(storage)?.checked_sub(1) { let off = self.get_off(storage)?; let item = self.get_at_unchecked(storage, 0); @@ -214,7 +211,7 @@ impl<'a, T: Serialize + DeserializeOwned, Ser: Serde> DequeStore<'a, T, Ser> { /// /// Removing an element from the middle of the collection /// has the worst runtime and gas cost. - pub fn remove(&self, storage: &mut S, pos: u32) -> StdResult { + pub fn remove(&self, storage: &mut dyn Storage, pos: u32) -> StdResult { let off = self.get_off(storage)?; let len = self.get_len(storage)?; if pos >= len { @@ -240,13 +237,13 @@ impl<'a, T: Serialize + DeserializeOwned, Ser: Serde> DequeStore<'a, T, Ser> { item } /// Returns a readonly iterator - pub fn iter(&self, storage: &'a S) -> StdResult> { + pub fn iter(&self, storage: &'a dyn Storage) -> StdResult> { let len = self.get_len(storage)?; let iter = DequeStoreIter::new(self, storage, 0, len); Ok(iter) } /// does paging with the given parameters - pub fn paging(&self, storage: &S, start_page: u32, size: u32) -> StdResult> { + pub fn paging(&self, storage: &dyn Storage, start_page: u32, size: u32) -> StdResult> { self.iter(storage)? .skip((start_page as usize) * (size as usize)) .take(size as usize) @@ -270,7 +267,7 @@ impl<'a, T: Serialize + DeserializeOwned, Ser: Serde> DequeStore<'a, T, Ser> { /// /// * `storage` - a reference to the storage this item is in /// * `key` - a byte slice representing the key to access the stored item - fn load_impl(&self, storage: &S, key: &[u8]) -> StdResult { + fn load_impl(&self, storage: &dyn Storage, key: &[u8]) -> StdResult { let prefixed_key = [self.as_slice(), key].concat(); Ser::deserialize( &storage @@ -286,7 +283,7 @@ impl<'a, T: Serialize + DeserializeOwned, Ser: Serde> DequeStore<'a, T, Ser> { /// * `storage` - a mutable reference to the storage this item should go to /// * `key` - a byte slice representing the key to access the stored item /// * `value` - a reference to the item to store - fn save_impl(&self, storage: &mut S, key: &[u8], value: &T) -> StdResult<()> { + fn save_impl(&self, storage: &mut dyn Storage, key: &[u8], value: &T) -> StdResult<()> { let prefixed_key = [self.as_slice(), key].concat(); storage.set(&prefixed_key, &Ser::serialize(value)?); Ok(()) @@ -307,28 +304,26 @@ impl<'a, T: Serialize + DeserializeOwned, Ser: Serde> Clone for DequeStore<'a, T } /// An iterator over the contents of the deque store. -pub struct DequeStoreIter<'a, T, S, Ser> +pub struct DequeStoreIter<'a, T, Ser> where T: Serialize + DeserializeOwned, - S: Storage, Ser: Serde, { deque_store: &'a DequeStore<'a, T, Ser>, - storage: &'a S, + storage: &'a dyn Storage, start: u32, end: u32, } -impl<'a, T, S, Ser> DequeStoreIter<'a, T, S, Ser> +impl<'a, T, Ser> DequeStoreIter<'a, T, Ser> where T: Serialize + DeserializeOwned, - S: Storage, Ser: Serde, { /// constructor pub fn new( deque_store: &'a DequeStore<'a, T, Ser>, - storage: &'a S, + storage: &'a dyn Storage, start: u32, end: u32, ) -> Self { @@ -341,10 +336,9 @@ where } } -impl<'a, T, S, Ser> Iterator for DequeStoreIter<'a, T, S, Ser> +impl<'a, T, Ser> Iterator for DequeStoreIter<'a, T, Ser> where T: Serialize + DeserializeOwned, - S: Storage, Ser: Serde, { type Item = StdResult; @@ -376,10 +370,9 @@ where } } -impl<'a, T, S, Ser> DoubleEndedIterator for DequeStoreIter<'a, T, S, Ser> +impl<'a, T, Ser> DoubleEndedIterator for DequeStoreIter<'a, T, Ser> where T: Serialize + DeserializeOwned, - S: Storage, Ser: Serde, { fn next_back(&mut self) -> Option { @@ -404,10 +397,9 @@ where } // This enables writing `deque_store.iter().skip(n).rev()` -impl<'a, T, S, Ser> ExactSizeIterator for DequeStoreIter<'a, T, S, Ser> +impl<'a, T, Ser> ExactSizeIterator for DequeStoreIter<'a, T, Ser> where T: Serialize + DeserializeOwned, - S: Storage, Ser: Serde, { } diff --git a/packages/storage/src/item.rs b/packages/storage/src/item.rs index 94a93fc..3a3e03d 100644 --- a/packages/storage/src/item.rs +++ b/packages/storage/src/item.rs @@ -33,28 +33,28 @@ where Ser: Serde, { /// save will serialize the model and store, returns an error on serialization issues - pub fn save(&self, storage: &mut S, data: &T) -> StdResult<()> { + pub fn save(&self, storage: &mut dyn Storage, data: &T) -> StdResult<()> { self.save_impl(storage, data) } /// userfacing remove function - pub fn remove(&self, storage: &mut S) { + pub fn remove(&self, storage: &mut dyn Storage) { self.remove_impl(storage); } /// load will return an error if no data is set at the given key, or on parse error - pub fn load(&self, storage: &S) -> StdResult { + pub fn load(&self, storage: &dyn Storage) -> StdResult { self.load_impl(storage) } /// may_load will parse the data stored at the key if present, returns `Ok(None)` if no data there. /// returns an error on issues parsing - pub fn may_load(&self, storage: &S) -> StdResult> { + pub fn may_load(&self, storage: &dyn Storage) -> StdResult> { self.may_load_impl(storage) } /// efficient way to see if any object is currently saved. - pub fn is_empty(&self, storage: &S) -> bool { + pub fn is_empty(&self, storage: &dyn Storage) -> bool { storage.get(self.as_slice()).is_none() } @@ -63,7 +63,7 @@ where /// /// It assumes, that data was initialized before, and if it doesn't exist, `Err(StdError::NotFound)` /// is returned. - pub fn update(&self, storage: &mut S, action: A) -> StdResult + pub fn update(&self, storage: &mut dyn Storage, action: A) -> StdResult where S: Storage, A: FnOnce(T) -> StdResult, @@ -80,7 +80,7 @@ where /// # Arguments /// /// * `storage` - a reference to the storage this item is in - fn load_impl(&self, storage: &S) -> StdResult { + fn load_impl(&self, storage: &dyn Storage) -> StdResult { Ser::deserialize( &storage .get(self.as_slice()) @@ -94,7 +94,7 @@ where /// # Arguments /// /// * `storage` - a reference to the storage this item is in - fn may_load_impl(&self, storage: &S) -> StdResult> { + fn may_load_impl(&self, storage: &dyn Storage) -> StdResult> { match storage.get(self.as_slice()) { Some(value) => Ser::deserialize(&value).map(Some), None => Ok(None), @@ -107,7 +107,7 @@ where /// /// * `storage` - a mutable reference to the storage this item should go to /// * `value` - a reference to the item to store - fn save_impl(&self, storage: &mut S, value: &T) -> StdResult<()> { + fn save_impl(&self, storage: &mut dyn Storage, value: &T) -> StdResult<()> { storage.set(self.as_slice(), &Ser::serialize(value)?); Ok(()) } @@ -117,7 +117,7 @@ where /// # Arguments /// /// * `storage` - a mutable reference to the storage this item is in - fn remove_impl(&self, storage: &mut S) { + fn remove_impl(&self, storage: &mut dyn Storage) { storage.remove(self.as_slice()); } diff --git a/packages/storage/src/keymap.rs b/packages/storage/src/keymap.rs index ce33387..9fb1f6e 100644 --- a/packages/storage/src/keymap.rs +++ b/packages/storage/src/keymap.rs @@ -107,7 +107,7 @@ impl<'a, K: Serialize + DeserializeOwned, T: Serialize + DeserializeOwned, Ser: Ser::deserialize(key_data) } /// get total number of objects saved - pub fn get_len(&self, storage: &S) -> StdResult { + pub fn get_len(&self, storage: &dyn Storage) -> StdResult { let mut may_len = self.length.lock().unwrap(); match *may_len { Some(length) => Ok(length), @@ -129,11 +129,11 @@ impl<'a, K: Serialize + DeserializeOwned, T: Serialize + DeserializeOwned, Ser: } } /// checks if the collection has any elements - pub fn is_empty(&self, storage: &S) -> StdResult { + pub fn is_empty(&self, storage: &dyn Storage) -> StdResult { Ok(self.get_len(storage)? == 0) } /// set length of the map - fn set_len(&self, storage: &mut S, len: u32) -> StdResult<()> { + fn set_len(&self, storage: &mut dyn Storage, len: u32) -> StdResult<()> { let len_key = [self.as_slice(), MAP_LENGTH].concat(); storage.set(&len_key, &len.to_be_bytes()); @@ -143,7 +143,7 @@ impl<'a, K: Serialize + DeserializeOwned, T: Serialize + DeserializeOwned, Ser: Ok(()) } /// Used to get the indexes stored in the given page number - fn _get_indexes(&self, storage: &S, page: u32) -> StdResult>> { + fn _get_indexes(&self, storage: &dyn Storage, page: u32) -> StdResult>> { let indexes_key = [self.as_slice(), INDEXES, page.to_be_bytes().as_slice()].concat(); let maybe_serialized = storage.get(&indexes_key); match maybe_serialized { @@ -152,9 +152,9 @@ impl<'a, K: Serialize + DeserializeOwned, T: Serialize + DeserializeOwned, Ser: } } /// Set an indexes page - fn _set_indexes_page( + fn _set_indexes_page( &self, - storage: &mut S, + storage: &mut dyn Storage, page: u32, indexes: &Vec>, ) -> StdResult<()> { @@ -163,7 +163,7 @@ impl<'a, K: Serialize + DeserializeOwned, T: Serialize + DeserializeOwned, Ser: Ok(()) } /// user facing get function - pub fn get(&self, storage: &S, key: &K) -> Option { + pub fn get(&self, storage: &dyn Storage, key: &K) -> Option { if let Ok(internal_item) = self._get_from_key(storage, key) { internal_item.get_item().ok() } else { @@ -171,12 +171,12 @@ impl<'a, K: Serialize + DeserializeOwned, T: Serialize + DeserializeOwned, Ser: } } /// internal item get function - fn _get_from_key(&self, storage: &S, key: &K) -> StdResult> { + fn _get_from_key(&self, storage: &dyn Storage, key: &K) -> StdResult> { let key_vec = self.serialize_key(key)?; self.load_impl(storage, &key_vec) } /// user facing remove function - pub fn remove(&self, storage: &mut S, key: &K) -> StdResult<()> { + pub fn remove(&self, storage: &mut dyn Storage, key: &K) -> StdResult<()> { let key_vec = self.serialize_key(key)?; let removed_pos = self._get_from_key(storage, key)?.index_pos; @@ -236,7 +236,7 @@ impl<'a, K: Serialize + DeserializeOwned, T: Serialize + DeserializeOwned, Ser: Ok(()) } /// user facing insert function - pub fn insert(&self, storage: &mut S, key: &K, item: &T) -> StdResult<()> { + pub fn insert(&self, storage: &mut dyn Storage, key: &K, item: &T) -> StdResult<()> { let key_vec = self.serialize_key(key)?; match self.may_load_impl(storage, &key_vec)? { Some(existing_internal_item) => { @@ -260,16 +260,16 @@ impl<'a, K: Serialize + DeserializeOwned, T: Serialize + DeserializeOwned, Ser: } } /// user facing method that checks if any item is stored with this key. - pub fn contains(&self, storage: &S, key: &K) -> bool { + pub fn contains(&self, storage: &dyn Storage, key: &K) -> bool { match self.serialize_key(key) { Ok(key_vec) => self.contains_impl(storage, &key_vec), Err(_) => false, } } /// paginates (key, item) pairs. - pub fn paging( + pub fn paging( &self, - storage: &S, + storage: &dyn Storage, start_page: u32, size: u32, ) -> StdResult> { @@ -292,9 +292,9 @@ impl<'a, K: Serialize + DeserializeOwned, T: Serialize + DeserializeOwned, Ser: self.get_pairs_at_positions(storage, start_pos, end_pos) } /// paginates only the keys. More efficient than paginating both items and keys - pub fn paging_keys( + pub fn paging_keys( &self, - storage: &S, + storage: &dyn Storage, start_page: u32, size: u32, ) -> StdResult> { @@ -317,9 +317,9 @@ impl<'a, K: Serialize + DeserializeOwned, T: Serialize + DeserializeOwned, Ser: self.get_keys_at_positions(storage, start_pos, end_pos) } /// tries to list keys without checking start/end bounds - fn get_keys_at_positions( + fn get_keys_at_positions( &self, - storage: &S, + storage: &dyn Storage, start: u32, end: u32, ) -> StdResult> { @@ -349,9 +349,9 @@ impl<'a, K: Serialize + DeserializeOwned, T: Serialize + DeserializeOwned, Ser: Ok(res) } /// tries to list (key, item) pairs without checking start/end bounds - fn get_pairs_at_positions( + fn get_pairs_at_positions( &self, - storage: &S, + storage: &dyn Storage, start: u32, end: u32, ) -> StdResult> { @@ -382,7 +382,7 @@ impl<'a, K: Serialize + DeserializeOwned, T: Serialize + DeserializeOwned, Ser: Ok(res) } /// gets a key from a specific position in indexes - fn get_key_from_pos(&self, storage: &S, pos: u32) -> StdResult { + fn get_key_from_pos(&self, storage: &dyn Storage, pos: u32) -> StdResult { let page = _page_from_position(pos); let indexes = self._get_indexes(storage, page)?; let index = pos % PAGE_SIZE; @@ -390,7 +390,7 @@ impl<'a, K: Serialize + DeserializeOwned, T: Serialize + DeserializeOwned, Ser: self.deserialize_key(key_vec) } /// gets a key from a specific position in indexes - fn get_pair_from_pos(&self, storage: &S, pos: u32) -> StdResult<(K, T)> { + fn get_pair_from_pos(&self, storage: &dyn Storage, pos: u32) -> StdResult<(K, T)> { let page = _page_from_position(pos); let indexes = self._get_indexes(storage, page)?; let index = pos % PAGE_SIZE; @@ -400,13 +400,13 @@ impl<'a, K: Serialize + DeserializeOwned, T: Serialize + DeserializeOwned, Ser: Ok((key, item)) } /// Returns a readonly iterator only for keys. More efficient than iter(). - pub fn iter_keys(&self, storage: &'a S) -> StdResult> { + pub fn iter_keys(&self, storage: &'a dyn Storage) -> StdResult> { let len = self.get_len(storage)?; let iter = KeyIter::new(self, storage, 0, len); Ok(iter) } /// Returns a readonly iterator for (key-item) pairs - pub fn iter(&self, storage: &'a S) -> StdResult> { + pub fn iter(&self, storage: &'a dyn Storage) -> StdResult> { let len = self.get_len(storage)?; let iter = KeyItemIter::new(self, storage, 0, len); Ok(iter) @@ -441,15 +441,14 @@ impl<'a, K: Serialize + DeserializeOwned, T: Serialize + DeserializeOwned, Ser: } /// An iterator over the keys of the Keymap. -pub struct KeyIter<'a, K, T, S, Ser> +pub struct KeyIter<'a, K, T, Ser> where K: Serialize + DeserializeOwned, T: Serialize + DeserializeOwned, - S: Storage, Ser: Serde, { keymap: &'a Keymap<'a, K, T, Ser>, - storage: &'a S, + storage: &'a dyn Storage, start: u32, end: u32, saved_indexes: Option>>, @@ -458,15 +457,19 @@ where saved_back_index_page: Option, } -impl<'a, K, T, S, Ser> KeyIter<'a, K, T, S, Ser> +impl<'a, K, T, Ser> KeyIter<'a, K, T, Ser> where K: Serialize + DeserializeOwned, T: Serialize + DeserializeOwned, - S: Storage, Ser: Serde, { /// constructor - pub fn new(keymap: &'a Keymap<'a, K, T, Ser>, storage: &'a S, start: u32, end: u32) -> Self { + pub fn new( + keymap: &'a Keymap<'a, K, T, Ser>, + storage: &'a dyn Storage, + start: u32, + end: u32, + ) -> Self { Self { keymap, storage, @@ -480,11 +483,10 @@ where } } -impl<'a, K, T, S, Ser> Iterator for KeyIter<'a, K, T, S, Ser> +impl<'a, K, T, Ser> Iterator for KeyIter<'a, K, T, Ser> where K: Serialize + DeserializeOwned, T: Serialize + DeserializeOwned, - S: Storage, Ser: Serde, { type Item = StdResult; @@ -606,11 +608,10 @@ where } } -impl<'a, K, T, S, Ser> DoubleEndedIterator for KeyIter<'a, K, T, S, Ser> +impl<'a, K, T, Ser> DoubleEndedIterator for KeyIter<'a, K, T, Ser> where K: Serialize + DeserializeOwned, T: Serialize + DeserializeOwned, - S: Storage, Ser: Serde, { fn next_back(&mut self) -> Option { @@ -726,11 +727,10 @@ where } // This enables writing `append_store.iter().skip(n).rev()` -impl<'a, K, T, S, Ser> ExactSizeIterator for KeyIter<'a, K, T, S, Ser> +impl<'a, K, T, Ser> ExactSizeIterator for KeyIter<'a, K, T, Ser> where K: Serialize + DeserializeOwned, T: Serialize + DeserializeOwned, - S: Storage, Ser: Serde, { } @@ -738,15 +738,14 @@ where // =============================================================================================== /// An iterator over the (key, item) pairs of the Keymap. Less efficient than just iterating over keys. -pub struct KeyItemIter<'a, K, T, S, Ser> +pub struct KeyItemIter<'a, K, T, Ser> where K: Serialize + DeserializeOwned, T: Serialize + DeserializeOwned, - S: Storage, Ser: Serde, { keymap: &'a Keymap<'a, K, T, Ser>, - storage: &'a S, + storage: &'a dyn Storage, start: u32, end: u32, saved_indexes: Option>>, @@ -755,15 +754,19 @@ where saved_back_index_page: Option, } -impl<'a, K, T, S, Ser> KeyItemIter<'a, K, T, S, Ser> +impl<'a, K, T, Ser> KeyItemIter<'a, K, T, Ser> where K: Serialize + DeserializeOwned, T: Serialize + DeserializeOwned, - S: Storage, Ser: Serde, { /// constructor - pub fn new(keymap: &'a Keymap<'a, K, T, Ser>, storage: &'a S, start: u32, end: u32) -> Self { + pub fn new( + keymap: &'a Keymap<'a, K, T, Ser>, + storage: &'a dyn Storage, + start: u32, + end: u32, + ) -> Self { Self { keymap, storage, @@ -777,11 +780,10 @@ where } } -impl<'a, K, T, S, Ser> Iterator for KeyItemIter<'a, K, T, S, Ser> +impl<'a, K, T, Ser> Iterator for KeyItemIter<'a, K, T, Ser> where K: Serialize + DeserializeOwned, T: Serialize + DeserializeOwned, - S: Storage, Ser: Serde, { type Item = StdResult<(K, T)>; @@ -906,11 +908,10 @@ where } } -impl<'a, K, T, S, Ser> DoubleEndedIterator for KeyItemIter<'a, K, T, S, Ser> +impl<'a, K, T, Ser> DoubleEndedIterator for KeyItemIter<'a, K, T, Ser> where K: Serialize + DeserializeOwned, T: Serialize + DeserializeOwned, - S: Storage, Ser: Serde, { fn next_back(&mut self) -> Option { @@ -1029,11 +1030,10 @@ where } // This enables writing `append_store.iter().skip(n).rev()` -impl<'a, K, T, S, Ser> ExactSizeIterator for KeyItemIter<'a, K, T, S, Ser> +impl<'a, K, T, Ser> ExactSizeIterator for KeyItemIter<'a, K, T, Ser> where K: Serialize + DeserializeOwned, T: Serialize + DeserializeOwned, - S: Storage, Ser: Serde, { } @@ -1047,7 +1047,7 @@ trait PrefixedTypedStorage { /// /// * `storage` - a reference to the storage this item is in /// * `key` - a byte slice representing the key to access the stored item - fn contains_impl(&self, storage: &S, key: &[u8]) -> bool { + fn contains_impl(&self, storage: &dyn Storage, key: &[u8]) -> bool { let prefixed_key = [self.as_slice(), key].concat(); storage.get(&prefixed_key).is_some() } @@ -1059,7 +1059,7 @@ trait PrefixedTypedStorage { /// /// * `storage` - a reference to the storage this item is in /// * `key` - a byte slice representing the key to access the stored item - fn load_impl(&self, storage: &S, key: &[u8]) -> StdResult { + fn load_impl(&self, storage: &dyn Storage, key: &[u8]) -> StdResult { let prefixed_key = [self.as_slice(), key].concat(); Ser::deserialize( &storage @@ -1075,7 +1075,7 @@ trait PrefixedTypedStorage { /// /// * `storage` - a reference to the storage this item is in /// * `key` - a byte slice representing the key to access the stored item - fn may_load_impl(&self, storage: &S, key: &[u8]) -> StdResult> { + fn may_load_impl(&self, storage: &dyn Storage, key: &[u8]) -> StdResult> { let prefixed_key = [self.as_slice(), key].concat(); match storage.get(&prefixed_key) { Some(value) => Ser::deserialize(&value).map(Some), @@ -1090,7 +1090,7 @@ trait PrefixedTypedStorage { /// * `storage` - a mutable reference to the storage this item should go to /// * `key` - a byte slice representing the key to access the stored item /// * `value` - a reference to the item to store - fn save_impl(&self, storage: &mut S, key: &[u8], value: &T) -> StdResult<()> { + fn save_impl(&self, storage: &mut dyn Storage, key: &[u8], value: &T) -> StdResult<()> { let prefixed_key = [self.as_slice(), key].concat(); storage.set(&prefixed_key, &Ser::serialize(value)?); Ok(()) @@ -1102,7 +1102,7 @@ trait PrefixedTypedStorage { /// /// * `storage` - a mutable reference to the storage this item is in /// * `key` - a byte slice representing the key to access the stored item - fn remove_impl(&self, storage: &mut S, key: &[u8]) { + fn remove_impl(&self, storage: &mut dyn Storage, key: &[u8]) { let prefixed_key = [self.as_slice(), key].concat(); storage.remove(&prefixed_key); } From d677ea34d725e6044ded214c5ad4a7464e2a1b67 Mon Sep 17 00:00:00 2001 From: srdtrk Date: Mon, 29 Aug 2022 15:38:08 +0300 Subject: [PATCH 09/12] added support for generic errors in padding. This is needed because cw 1.0 allows generic errors in entry points unlike cw 0.1 --- packages/utils/src/padding.rs | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/packages/utils/src/padding.rs b/packages/utils/src/padding.rs index 2b37dbf..a778083 100644 --- a/packages/utils/src/padding.rs +++ b/packages/utils/src/padding.rs @@ -1,4 +1,4 @@ -use cosmwasm_std::{Binary, Response, StdResult}; +use cosmwasm_std::{Binary, Response}; /// Take a Vec and pad it up to a multiple of `block_size`, using spaces at the end. pub fn space_pad(message: &mut Vec, block_size: usize) -> &mut Vec { @@ -14,13 +14,13 @@ pub fn space_pad(message: &mut Vec, block_size: usize) -> &mut Vec { message } -/// Pad the data and logs in a `StdResult` to the block size, with spaces. +/// Pad the data and logs in a `Result` to the block size, with spaces. // Users don't need to care about it as the type `T` has a default, and will // always be known in the context of the caller. -pub fn pad_handle_result( - response: StdResult>, +pub fn pad_handle_result( + response: Result, E>, block_size: usize, -) -> StdResult> +) -> Result, E> where T: Clone + std::fmt::Debug + PartialEq + schemars::JsonSchema, { @@ -40,7 +40,7 @@ where } /// Pad a `QueryResult` with spaces -pub fn pad_query_result(response: StdResult, block_size: usize) -> StdResult { +pub fn pad_query_result(response: Result, block_size: usize) -> Result { response.map(|mut response| { space_pad(&mut response.0, block_size); response From 281297f4e5cb7892a2ec6786d51c0ea9687d7119 Mon Sep 17 00:00:00 2001 From: srdtrk Date: Mon, 29 Aug 2022 15:49:26 +0300 Subject: [PATCH 10/12] fixed reference to cw0.1 type Storage pattern in viewing_key::set function --- packages/viewing_key/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/viewing_key/src/lib.rs b/packages/viewing_key/src/lib.rs index d4bd962..ffe5baa 100644 --- a/packages/viewing_key/src/lib.rs +++ b/packages/viewing_key/src/lib.rs @@ -63,7 +63,7 @@ pub trait ViewingKeyStore { } /// Set a new viewing key based on a predetermined value. - fn set(storage: &mut S, account: &str, viewing_key: &str) { + fn set(storage: &mut dyn Storage, account: &str, viewing_key: &str) { let mut balance_store = PrefixedStorage::new(storage, Self::STORAGE_KEY); balance_store.set(account.as_bytes(), &sha_256(viewing_key.as_bytes())); } From 1d3b58ce3c59cf810a95c78173b06e89fd8ae0dc Mon Sep 17 00:00:00 2001 From: srdtrk Date: Tue, 30 Aug 2022 18:03:36 +0300 Subject: [PATCH 11/12] fixed a bug in item::update, also added the add_suffix feature to item. Expanded the tests for Item --- packages/storage/src/append_store.rs | 3 +- packages/storage/src/deque_store.rs | 3 +- packages/storage/src/item.rs | 66 ++++++++++++++++++++++++++-- packages/storage/src/keymap.rs | 15 +++---- 4 files changed, 71 insertions(+), 16 deletions(-) diff --git a/packages/storage/src/append_store.rs b/packages/storage/src/append_store.rs index 45f138f..0087465 100644 --- a/packages/storage/src/append_store.rs +++ b/packages/storage/src/append_store.rs @@ -24,7 +24,6 @@ where /// prefix of the newly constructed Storage namespace: &'a [u8], /// needed if any suffixes were added to the original namespace. - /// therefore it is not necessarily same as the namespace. prefix: Option>, length: Mutex>, item_type: PhantomData, @@ -213,7 +212,7 @@ impl<'a, T: Serialize + DeserializeOwned, Ser: Serde> AppendStore<'a, T, Ser> { Ser::deserialize( &storage .get(&prefixed_key) - .ok_or(StdError::not_found(type_name::()))?, + .ok_or_else(|| StdError::not_found(type_name::()))?, ) } diff --git a/packages/storage/src/deque_store.rs b/packages/storage/src/deque_store.rs index 812b433..86c0a02 100644 --- a/packages/storage/src/deque_store.rs +++ b/packages/storage/src/deque_store.rs @@ -26,7 +26,6 @@ where /// prefix of the newly constructed Storage namespace: &'a [u8], /// needed if any suffixes were added to the original namespace. - /// therefore it is not necessarily same as the namespace. prefix: Option>, length: Mutex>, offset: Mutex>, @@ -272,7 +271,7 @@ impl<'a, T: Serialize + DeserializeOwned, Ser: Serde> DequeStore<'a, T, Ser> { Ser::deserialize( &storage .get(&prefixed_key) - .ok_or(StdError::not_found(type_name::()))?, + .ok_or_else(|| StdError::not_found(type_name::()))?, ) } diff --git a/packages/storage/src/item.rs b/packages/storage/src/item.rs index 3a3e03d..6e82fe3 100644 --- a/packages/storage/src/item.rs +++ b/packages/storage/src/item.rs @@ -13,6 +13,8 @@ where Ser: Serde, { storage_key: &'a [u8], + /// needed if any suffixes were added to the original storage key. + prefix: Option>, item_type: PhantomData, serialization_type: PhantomData, } @@ -21,10 +23,26 @@ impl<'a, T: Serialize + DeserializeOwned, Ser: Serde> Item<'a, T, Ser> { pub const fn new(key: &'a [u8]) -> Self { Self { storage_key: key, + prefix: None, item_type: PhantomData, serialization_type: PhantomData, } } + /// This is used to produce a new Item. This can be used when you want to associate an Item to each user + /// and you still get to define the Item as a static constant + pub fn add_suffix(&self, suffix: &[u8]) -> Self { + let prefix = if let Some(prefix) = &self.prefix { + [prefix.clone(), suffix.to_vec()].concat() + } else { + [self.storage_key.to_vec(), suffix.to_vec()].concat() + }; + Self { + storage_key: self.storage_key, + prefix: Some(prefix), + item_type: self.item_type, + serialization_type: self.serialization_type, + } + } } impl<'a, T, Ser> Item<'a, T, Ser> @@ -63,9 +81,8 @@ where /// /// It assumes, that data was initialized before, and if it doesn't exist, `Err(StdError::NotFound)` /// is returned. - pub fn update(&self, storage: &mut dyn Storage, action: A) -> StdResult + pub fn update(&self, storage: &mut dyn Storage, action: A) -> StdResult where - S: Storage, A: FnOnce(T) -> StdResult, { let input = self.load_impl(storage)?; @@ -84,7 +101,7 @@ where Ser::deserialize( &storage .get(self.as_slice()) - .ok_or(StdError::not_found(type_name::()))?, + .ok_or_else(|| StdError::not_found(type_name::()))?, ) } @@ -122,7 +139,11 @@ where } fn as_slice(&self) -> &[u8] { - self.storage_key + if let Some(prefix) = &self.prefix { + prefix + } else { + self.storage_key + } } } @@ -154,6 +175,43 @@ mod tests { Ok(()) } + #[test] + fn test_suffix() -> StdResult<()> { + let mut storage = MockStorage::new(); + let item: Item = Item::new(b"test"); + let item1 = item.add_suffix(b"suffix1"); + let item2 = item.add_suffix(b"suffix2"); + + item.save(&mut storage, &0)?; + assert!(item1.is_empty(&storage)); + assert!(item2.is_empty(&storage)); + + item1.save(&mut storage, &1)?; + assert!(!item1.is_empty(&storage)); + assert!(item2.is_empty(&storage)); + assert_eq!(item.may_load(&storage)?, Some(0)); + assert_eq!(item1.may_load(&storage)?, Some(1)); + item2.save(&mut storage, &2)?; + assert_eq!(item.may_load(&storage)?, Some(0)); + assert_eq!(item1.may_load(&storage)?, Some(1)); + assert_eq!(item2.may_load(&storage)?, Some(2)); + + Ok(()) + } + + #[test] + fn test_update() -> StdResult<()> { + let mut storage = MockStorage::new(); + let item: Item = Item::new(b"test"); + + assert!(item.update(&mut storage, |x| Ok(x + 1)).is_err()); + item.save(&mut storage, &7)?; + assert!(item.update(&mut storage, |x| Ok(x + 1)).is_ok()); + assert_eq!(item.load(&storage), Ok(8)); + + Ok(()) + } + #[test] fn test_serializations() -> StdResult<()> { // Check the default behavior is Bincode2 diff --git a/packages/storage/src/keymap.rs b/packages/storage/src/keymap.rs index 9fb1f6e..d47bd1a 100644 --- a/packages/storage/src/keymap.rs +++ b/packages/storage/src/keymap.rs @@ -54,7 +54,6 @@ where /// prefix of the newly constructed Storage namespace: &'a [u8], /// needed if any suffixes were added to the original namespace. - /// therefore it is not necessarily same as the namespace. prefix: Option>, length: Mutex>, key_type: PhantomData, @@ -208,9 +207,9 @@ impl<'a, K: Serialize + DeserializeOwned, T: Serialize + DeserializeOwned, Ser: let max_page = _page_from_position(len); if max_page == page { // last page indexes is the same as indexes - let last_key = indexes.pop().ok_or(StdError::generic_err( - "Last item's key not found - should never happen", - ))?; + let last_key = indexes.pop().ok_or_else(|| { + StdError::generic_err("Last item's key not found - should never happen") + })?; // modify last item let mut last_internal_item = self.load_impl(storage, &last_key)?; last_internal_item.index_pos = removed_pos; @@ -220,9 +219,9 @@ impl<'a, K: Serialize + DeserializeOwned, T: Serialize + DeserializeOwned, Ser: self._set_indexes_page(storage, page, &indexes)?; } else { let mut last_page_indexes = self._get_indexes(storage, max_page)?; - let last_key = last_page_indexes.pop().ok_or(StdError::generic_err( - "Last item's key not found - should never happen", - ))?; + let last_key = last_page_indexes.pop().ok_or_else(|| { + StdError::generic_err("Last item's key not found - should never happen") + })?; // modify last item let mut last_internal_item = self.load_impl(storage, &last_key)?; last_internal_item.index_pos = removed_pos; @@ -1064,7 +1063,7 @@ trait PrefixedTypedStorage { Ser::deserialize( &storage .get(&prefixed_key) - .ok_or(StdError::not_found(type_name::()))?, + .ok_or_else(|| StdError::not_found(type_name::()))?, ) } From 898c52dcadce6e678818295918d875f8b81f6284 Mon Sep 17 00:00:00 2001 From: srdtrk Date: Tue, 30 Aug 2022 18:11:27 +0300 Subject: [PATCH 12/12] implemented Clone for Item --- packages/storage/src/item.rs | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/packages/storage/src/item.rs b/packages/storage/src/item.rs index 6e82fe3..fc45116 100644 --- a/packages/storage/src/item.rs +++ b/packages/storage/src/item.rs @@ -147,6 +147,17 @@ where } } +impl<'a, T: Serialize + DeserializeOwned, Ser: Serde> Clone for Item<'a, T, Ser> { + fn clone(&self) -> Self { + Self { + storage_key: self.storage_key, + prefix: self.prefix.clone(), + item_type: PhantomData, + serialization_type: PhantomData, + } + } +} + #[cfg(test)] mod tests { use cosmwasm_std::testing::MockStorage;