Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: Change kv_store::Value to be Arc<[u8]> instead of Arc<Vec<u8>> #2411

Draft
wants to merge 1 commit into
base: 2344-add-support-for-multi-get-operation-in-the-database
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/).

#### Breaking
- [2396](https://github.com/FuelLabs/fuel-core/pull/2396): Return `StorageResult<Transaction>` in `OffChainDatabase::old_transaction`.
- [2379](https://github.com/FuelLabs/fuel-core/issues/2379): Change `kv_store::Value` to be `Arc<[u8]>` instead of `Arc<Vec<u8>>`.

### Added
- [2321](https://github.com/FuelLabs/fuel-core/pull/2321): New metrics for the txpool: "The size of transactions in the txpool" (`txpool_tx_size`), "The time spent by a transaction in the txpool in seconds" (`txpool_tx_time_in_txpool_seconds`), The number of transactions in the txpool (`txpool_number_of_transactions`), "The number of transactions pending verification before entering the txpool" (`txpool_number_of_transactions_pending_verification`), "The number of executable transactions in the txpool" (`txpool_number_of_executable_transactions`), "The time it took to select transactions for inclusion in a block in nanoseconds" (`txpool_select_transaction_time_nanoseconds`), The time it took to insert a transaction in the txpool in milliseconds (`txpool_insert_transaction_time_milliseconds`).
Expand Down
12 changes: 6 additions & 6 deletions benches/src/db_lookup_times_utils/seed.rs
Original file line number Diff line number Diff line change
Expand Up @@ -91,13 +91,13 @@ pub fn insert_compressed_block(
let compressed_block = block.compress(&ChainId::default());
let height_key = height_key(height);

let raw_compressed_block = postcard::to_allocvec(&compressed_block)?.to_vec();
let raw_compressed_block = postcard::to_allocvec(&compressed_block)?;
let raw_transactions: Vec<(Bytes32, Vec<u8>)> = block
.transactions()
.iter()
.map(|tx| -> DbLookupBenchResult<(Bytes32, Vec<u8>)> {
let tx_id = tx.id(&ChainId::default());
let raw_tx = postcard::to_allocvec(tx)?.to_vec();
let raw_tx = postcard::to_allocvec(tx)?;
Ok((tx_id, raw_tx))
})
.try_collect()?;
Expand All @@ -106,14 +106,14 @@ pub fn insert_compressed_block(
database.put(
height_key.as_slice(),
BenchDbColumn::FuelBlocks,
Value::new(raw_compressed_block),
Value::from(raw_compressed_block),
)?;
// 2. insert into Transactions table
for (tx_id, tx) in raw_transactions {
database.put(
tx_id.as_slice(),
BenchDbColumn::Transactions,
Value::new(tx),
Value::from(tx),
)?;
}

Expand All @@ -130,13 +130,13 @@ pub fn insert_full_block(
let block = insert_compressed_block(database, height, tx_count)?;

let height_key = height_key(height);
let raw_full_block = postcard::to_allocvec(&block)?.to_vec();
let raw_full_block = postcard::to_allocvec(&block)?;

database
.put(
height_key.as_slice(),
BenchDbColumn::FullFuelBlocks,
Value::new(raw_full_block),
Value::from(raw_full_block),
)
.map_err(|err| anyhow!(err))?;

Expand Down
9 changes: 4 additions & 5 deletions benches/src/db_lookup_times_utils/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ fn get_block_full_block_method(
.get(&height_key, BenchDbColumn::FullFuelBlocks)?
.ok_or(anyhow!("empty raw full block"))?;

let block: Block = postcard::from_bytes(raw_block.as_slice())?;
let block: Block = postcard::from_bytes(&raw_block)?;
Ok(block)
}

Expand All @@ -88,7 +88,7 @@ fn get_block_multi_get_method(
let raw_block = database
.get(&height_key, BenchDbColumn::FuelBlocks)?
.ok_or(anyhow!("empty raw block"))?;
let block: CompressedBlock = postcard::from_bytes(raw_block.as_slice())?;
let block: CompressedBlock = postcard::from_bytes(&raw_block)?;
let tx_ids = block.transactions().iter();
let raw_txs = database.multi_get(BenchDbColumn::Transactions.id(), tx_ids)?;
let txs: Vec<Transaction> = raw_txs
Expand All @@ -109,7 +109,7 @@ fn get_block_headers_and_tx_method(
let raw_block = database
.get(&height_key, BenchDbColumn::FuelBlocks)?
.ok_or(anyhow!("empty raw block"))?;
let block: CompressedBlock = postcard::from_bytes(raw_block.as_slice())?;
let block: CompressedBlock = postcard::from_bytes(&raw_block)?;

let txs: Vec<Transaction> = block
.transactions()
Expand All @@ -118,8 +118,7 @@ fn get_block_headers_and_tx_method(
let raw_tx = database
.get(tx_id.as_slice(), BenchDbColumn::Transactions)?
.ok_or(anyhow!("empty transaction"))?;
postcard::from_bytes::<Transaction>(raw_tx.as_slice())
.map_err(|err| anyhow!(err))
postcard::from_bytes::<Transaction>(&raw_tx).map_err(|err| anyhow!(err))
})
.try_collect()?;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ impl OffChainDatabase for OffChainIterableKeyValueView {

self.get(encoder.as_ref(), column)?
.ok_or_else(|| not_found!(DaCompressedBlocks))
.map(|value| value.as_ref().clone())
.map(|value| value.to_vec())
}

fn tx_status(&self, tx_id: &TxId) -> StorageResult<TransactionStatus> {
Expand Down
2 changes: 1 addition & 1 deletion crates/fuel-core/src/state/historical_rocksdb.rs
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,7 @@ where
);
}
(Some(old_value), WriteOperation::Insert(new_value)) => {
if old_value.as_slice() != new_value.as_slice() {
if *old_value != **new_value {
entry.insert(
key.clone(),
WriteOperation::Insert(old_value.into()),
Expand Down
7 changes: 3 additions & 4 deletions crates/fuel-core/src/state/in_memory/memory_store.rs
Original file line number Diff line number Diff line change
Expand Up @@ -223,7 +223,6 @@ mod tests {
kv_store::KeyValueMutate,
transactional::ReadTransaction,
};
use std::sync::Arc;

impl<Description> KeyValueMutate for MemoryStore<Description>
where
Expand Down Expand Up @@ -256,7 +255,7 @@ mod tests {
let key = vec![0x00];

let mut db = MemoryStore::<OnChain>::default();
let expected = Arc::new(vec![]);
let expected = Value::from([]);
db.put(&key.to_vec(), Column::Metadata, expected.clone())
.unwrap();

Expand All @@ -281,7 +280,7 @@ mod tests {
let key: Vec<u8> = Vec::with_capacity(0);

let mut db = MemoryStore::<OnChain>::default();
let expected = Arc::new(vec![1, 2, 3]);
let expected = Value::from([1, 2, 3]);
db.put(&key, Column::Metadata, expected.clone()).unwrap();

assert_eq!(db.get(&key, Column::Metadata).unwrap().unwrap(), expected);
Expand All @@ -305,7 +304,7 @@ mod tests {
let key: Vec<u8> = Vec::with_capacity(0);

let mut db = MemoryStore::<OnChain>::default();
let expected = Arc::new(vec![]);
let expected = Value::from([]);
db.put(&key, Column::Metadata, expected.clone()).unwrap();

assert_eq!(db.get(&key, Column::Metadata).unwrap().unwrap(), expected);
Expand Down
42 changes: 21 additions & 21 deletions crates/fuel-core/src/state/rocks_db.rs
Original file line number Diff line number Diff line change
Expand Up @@ -713,7 +713,7 @@ impl ExtractItem for KeyAndValue {
{
raw_iterator
.item()
.map(|(key, value)| (key.to_vec(), Arc::new(value.to_vec())))
.map(|(key, value)| (key.to_vec(), Value::from(value)))
}

fn size(item: &Self::Item) -> u64 {
Expand Down Expand Up @@ -754,7 +754,7 @@ where
.map_err(|e| StorageError::Other(DatabaseError::Other(e.into()).into()));

self.register_read(result, column.id())
.map(|opt| opt.map(Arc::new))
.map(|opt| opt.map(Value::from))
}

fn get_batch<'a>(
Expand All @@ -775,7 +775,7 @@ where
}),
column.id(),
)
.map(|opt| opt.map(Arc::new))
.map(|opt| opt.map(Value::from))
})
.into_boxed()
}
Expand Down Expand Up @@ -964,7 +964,7 @@ mod tests {
let key = vec![0xA, 0xB, 0xC];

let (mut db, _tmp) = create_db();
let expected = Arc::new(vec![1, 2, 3]);
let expected = Value::from([1, 2, 3]);
db.put(&key, Column::Metadata, expected.clone()).unwrap();

assert_eq!(db.get(&key, Column::Metadata).unwrap().unwrap(), expected)
Expand All @@ -975,10 +975,10 @@ mod tests {
let key = vec![0xA, 0xB, 0xC];

let (mut db, _tmp) = create_db();
let expected = Arc::new(vec![1, 2, 3]);
let expected = Value::from([1, 2, 3]);
db.put(&key, Column::Metadata, expected.clone()).unwrap();
let prev = db
.replace(&key, Column::Metadata, Arc::new(vec![2, 4, 6]))
.replace(&key, Column::Metadata, Arc::new([2, 4, 6]))
.unwrap();

assert_eq!(prev, Some(expected));
Expand All @@ -989,7 +989,7 @@ mod tests {
let key = vec![0xA, 0xB, 0xC];

let (mut db, _tmp) = create_db();
let expected = Arc::new(vec![1, 2, 3]);
let expected = Value::from([1, 2, 3]);
db.put(&key, Column::Metadata, expected.clone()).unwrap();
assert_eq!(db.get(&key, Column::Metadata).unwrap().unwrap(), expected);

Expand All @@ -1002,15 +1002,15 @@ mod tests {
let key = vec![0xA, 0xB, 0xC];

let (mut db, _tmp) = create_db();
let expected = Arc::new(vec![1, 2, 3]);
let expected = Arc::new([1, 2, 3]);
db.put(&key, Column::Metadata, expected).unwrap();
assert!(db.exists(&key, Column::Metadata).unwrap());
}

#[test]
fn commit_changes_inserts() {
let key = vec![0xA, 0xB, 0xC];
let value = Arc::new(vec![1, 2, 3]);
let value = Value::from([1, 2, 3]);

let (db, _tmp) = create_db();
let ops = vec![(
Expand All @@ -1028,7 +1028,7 @@ mod tests {
#[test]
fn commit_changes_removes() {
let key = vec![0xA, 0xB, 0xC];
let value = Arc::new(vec![1, 2, 3]);
let value = Arc::new([1, 2, 3]);

let (mut db, _tmp) = create_db();
db.put(&key, Column::Metadata, value).unwrap();
Expand All @@ -1047,7 +1047,7 @@ mod tests {
let key = vec![0x00];

let (mut db, _tmp) = create_db();
let expected = Arc::new(vec![]);
let expected = Value::from([]);
db.put(&key, Column::Metadata, expected.clone()).unwrap();

assert_eq!(db.get(&key, Column::Metadata).unwrap().unwrap(), expected);
Expand All @@ -1071,7 +1071,7 @@ mod tests {
let key: Vec<u8> = Vec::with_capacity(0);

let (mut db, _tmp) = create_db();
let expected = Arc::new(vec![1, 2, 3]);
let expected = Value::from([1, 2, 3]);
db.put(&key, Column::Metadata, expected.clone()).unwrap();

assert_eq!(db.get(&key, Column::Metadata).unwrap().unwrap(), expected);
Expand All @@ -1095,7 +1095,7 @@ mod tests {
let key: Vec<u8> = Vec::with_capacity(0);

let (mut db, _tmp) = create_db();
let expected = Arc::new(vec![]);
let expected = Value::from([]);
db.put(&key, Column::Metadata, expected.clone()).unwrap();

assert_eq!(db.get(&key, Column::Metadata).unwrap().unwrap(), expected);
Expand Down Expand Up @@ -1174,7 +1174,7 @@ mod tests {
#[test]
fn snapshot_allows_get_entry_after_it_was_removed() {
let (mut db, _tmp) = create_db();
let value = Arc::new(vec![1, 2, 3]);
let value = Value::from([1, 2, 3]);

// Given
let key_1 = [1; 32];
Expand All @@ -1195,12 +1195,12 @@ mod tests {
#[test]
fn snapshot_allows_correct_iteration_even_after_all_elements_where_removed() {
let (mut db, _tmp) = create_db();
let value = Arc::new(vec![1, 2, 3]);
let value = Value::from([1, 2, 3]);

// Given
let key_1 = [1; 32];
let key_2 = [2; 32];
let key_3 = [3; 32];
let key_1 = vec![1; 32];
let key_2 = vec![2; 32];
let key_3 = vec![3; 32];
db.put(&key_1, Column::Metadata, value.clone()).unwrap();
db.put(&key_2, Column::Metadata, value.clone()).unwrap();
db.put(&key_3, Column::Metadata, value.clone()).unwrap();
Expand All @@ -1223,9 +1223,9 @@ mod tests {
assert_eq!(
snapshot_iter,
vec![
Ok((key_1.to_vec(), value.clone())),
Ok((key_2.to_vec(), value.clone())),
Ok((key_3.to_vec(), value))
Ok((key_1, value.clone())),
Ok((key_2, value.clone())),
Ok((key_3, value))
]
);
}
Expand Down
2 changes: 1 addition & 1 deletion crates/services/upgradable-executor/src/instance.rs
Original file line number Diff line number Diff line change
Expand Up @@ -384,7 +384,7 @@ impl Instance<Source> {
));
}

caller.write(out_ptr, value.as_slice())?;
caller.write(out_ptr, &value)?;
Ok(0)
} else {
Ok(1)
Expand Down
2 changes: 1 addition & 1 deletion crates/storage/src/codec.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ pub trait Encode<T: ?Sized> {

/// Returns the serialized object as an [`Value`].
fn encode_as_value(t: &T) -> Value {
Value::new(Self::encode(t).as_bytes().into_owned())
Value::from(Self::encode(t).as_bytes())
}
}

Expand Down
2 changes: 1 addition & 1 deletion crates/storage/src/iter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -219,7 +219,7 @@ where
.map_err(|e| crate::Error::Codec(anyhow::anyhow!(e)))?;
let value =
<M::Blueprint as BlueprintInspect<M, Self>>::ValueCodec::decode(
value.as_slice(),
&value,
)
.map_err(|e| crate::Error::Codec(anyhow::anyhow!(e)))?;
Ok((key, value))
Expand Down
2 changes: 1 addition & 1 deletion crates/storage/src/kv_store.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ use core::ops::Deref;
/// The key of the storage.
pub type Key = Vec<u8>;
/// The value of the storage. It is wrapped into the `Arc` to provide less cloning of massive objects.
pub type Value = alloc::sync::Arc<Vec<u8>>;
pub type Value = alloc::sync::Arc<[u8]>;

/// The pair of key and value from the storage.
pub type KVItem = StorageResult<(Key, Value)>;
Expand Down
3 changes: 1 addition & 2 deletions crates/storage/src/structured_storage.rs
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,6 @@ use crate::{
StorageSize,
StorageWrite,
};
use core::ops::Deref;

#[cfg(feature = "std")]
use std::{
Expand Down Expand Up @@ -406,7 +405,7 @@ where
self.inner
.get(key_bytes.as_ref(), <M as TableWithBlueprint>::column())
// TODO: Return `Value` instead of cloned `Vec<u8>`.
.map(|value| value.map(|value| value.deref().clone()))
.map(|value| value.map(|value| value.to_vec()))
}
}

Expand Down
Loading
Loading