Skip to content

Commit

Permalink
fix(pruned mode): prune inputs, keep track of kernel/utxo sum
Browse files Browse the repository at this point in the history
  • Loading branch information
sdbondi committed Nov 2, 2021
1 parent 90f2c07 commit 678aa27
Show file tree
Hide file tree
Showing 19 changed files with 329 additions and 325 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ impl HorizonStateSync {

let prover = CryptoFactories::default().range_proof;
let mut horizon_state =
HorizonStateSynchronization::new(shared, self.sync_peer.clone(), horizon_sync_height, &prover);
HorizonStateSynchronization::new(shared, self.sync_peer.clone(), horizon_sync_height, prover);

match horizon_state.synchronize().await {
Ok(()) => {
Expand Down

Large diffs are not rendered by default.

41 changes: 32 additions & 9 deletions base_layer/core/src/blocks/accumulated_data.rs
Original file line number Diff line number Diff line change
Expand Up @@ -49,43 +49,55 @@ use tari_mmr::{pruned_hashset::PrunedHashSet, ArrayLike};

const LOG_TARGET: &str = "c::bn::acc_data";

#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct BlockAccumulatedData {
pub(crate) kernels: PrunedHashSet,
pub(crate) outputs: PrunedHashSet,
pub(crate) witness: PrunedHashSet,
pub(crate) deleted: DeletedBitmap,
pub(crate) range_proofs: PrunedHashSet,
pub(crate) kernel_sum: Commitment,
pub(crate) utxo_sum: Commitment,
}

impl BlockAccumulatedData {
pub fn new(
kernels: PrunedHashSet,
outputs: PrunedHashSet,
range_proofs: PrunedHashSet,
witness: PrunedHashSet,
deleted: Bitmap,
total_kernel_sum: Commitment,
kernel_sum: Commitment,
utxo_sum: Commitment,
) -> Self {
Self {
kernels,
outputs,
range_proofs,
witness,
deleted: DeletedBitmap { deleted },
kernel_sum: total_kernel_sum,
kernel_sum,
utxo_sum,
}
}

pub fn deleted(&self) -> &Bitmap {
&self.deleted.deleted
}

pub fn set_deleted(&mut self, deleted: DeletedBitmap) -> &mut Self {
self.deleted = deleted;
self
}

pub fn dissolve(self) -> (PrunedHashSet, PrunedHashSet, PrunedHashSet, Bitmap) {
(self.kernels, self.outputs, self.range_proofs, self.deleted.deleted)
(self.kernels, self.outputs, self.witness, self.deleted.deleted)
}

pub fn kernel_sum(&self) -> &Commitment {
&self.kernel_sum
}

pub fn utxo_sum(&self) -> &Commitment {
&self.utxo_sum
}
}

impl Default for BlockAccumulatedData {
Expand All @@ -96,8 +108,9 @@ impl Default for BlockAccumulatedData {
deleted: DeletedBitmap {
deleted: Bitmap::create(),
},
range_proofs: Default::default(),
witness: Default::default(),
kernel_sum: Default::default(),
utxo_sum: Default::default(),
}
}
}
Expand All @@ -110,11 +123,21 @@ impl Display for BlockAccumulatedData {
self.outputs.len().unwrap_or(0),
self.deleted.deleted.cardinality(),
self.kernels.len().unwrap_or(0),
self.range_proofs.len().unwrap_or(0)
self.witness.len().unwrap_or(0)
)
}
}

#[derive(Debug, Clone, Default)]
pub struct UpdateBlockAccumulatedData {
pub kernel_hash_set: Option<PrunedHashSet>,
pub utxo_hash_set: Option<PrunedHashSet>,
pub witness_hash_set: Option<PrunedHashSet>,
pub deleted_diff: Option<DeletedBitmap>,
pub utxo_sum: Option<Commitment>,
pub kernel_sum: Option<Commitment>,
}

/// Wrapper struct to serialize and deserialize Bitmap
#[derive(Debug, Clone)]
pub struct DeletedBitmap {
Expand Down
1 change: 1 addition & 0 deletions base_layer/core/src/blocks/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ pub use accumulated_data::{
ChainHeader,
CompleteDeletedBitmap,
DeletedBitmap,
UpdateBlockAccumulatedData,
};

mod error;
Expand Down
27 changes: 9 additions & 18 deletions base_layer/core/src/chain_storage/async_db.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ use crate::{
DeletedBitmap,
HistoricalBlock,
NewBlockTemplate,
UpdateBlockAccumulatedData,
},
chain_storage::{
blockchain_database::MmrRoots,
Expand Down Expand Up @@ -61,7 +62,6 @@ use tari_common_types::{
chain_metadata::ChainMetadata,
types::{BlockHash, Commitment, HashOutput, Signature},
};
use tari_mmr::pruned_hashset::PrunedHashSet;

const LOG_TARGET: &str = "c::bn::async_db";

Expand Down Expand Up @@ -145,7 +145,7 @@ impl<B: BlockchainBackend + 'static> AsyncBlockchainDb<B> {
//---------------------------------- Metadata --------------------------------------------//
make_async_fn!(get_chain_metadata() -> ChainMetadata, "get_chain_metadata");

make_async_fn!(fetch_horizon_data() -> Option<HorizonData>, "fetch_horizon_data");
make_async_fn!(fetch_horizon_data() -> HorizonData, "fetch_horizon_data");

//---------------------------------- TXO --------------------------------------------//
make_async_fn!(fetch_utxo(hash: HashOutput) -> Option<PrunedOutput>, "fetch_utxo");
Expand Down Expand Up @@ -224,6 +224,8 @@ impl<B: BlockchainBackend + 'static> AsyncBlockchainDb<B> {

make_async_fn!(fetch_block_accumulated_data(hash: HashOutput) -> BlockAccumulatedData, "fetch_block_accumulated_data");

make_async_fn!(fetch_last_block_accumulated_data() -> Option< BlockAccumulatedData>, "fetch_last_block_accumulated_data");

make_async_fn!(fetch_block_accumulated_data_by_height(height: u64) -> BlockAccumulatedData, "fetch_block_accumulated_data_by_height");

//---------------------------------- Misc. --------------------------------------------//
Expand Down Expand Up @@ -280,11 +282,11 @@ impl<'a, B: BlockchainBackend + 'static> AsyncDbTransaction<'a, B> {
&mut self,
height: u64,
hash: HashOutput,
accumulated_data: u128,
accumulated_difficulty: u128,
expected_prev_best_block: HashOutput,
) -> &mut Self {
self.transaction
.set_best_block(height, hash, accumulated_data, expected_prev_best_block);
.set_best_block(height, hash, accumulated_difficulty, expected_prev_best_block);
self
}

Expand Down Expand Up @@ -328,23 +330,12 @@ impl<'a, B: BlockchainBackend + 'static> AsyncDbTransaction<'a, B> {
self
}

pub fn update_pruned_hash_set(
&mut self,
mmr_tree: MmrTree,
header_hash: HashOutput,
pruned_hash_set: PrunedHashSet,
) -> &mut Self {
self.transaction
.update_pruned_hash_set(mmr_tree, header_hash, pruned_hash_set);
self
}

pub fn update_block_accumulated_data_with_deleted_diff(
pub fn update_block_accumulated_data_via_horizon_sync(
&mut self,
header_hash: HashOutput,
deleted: Bitmap,
values: UpdateBlockAccumulatedData,
) -> &mut Self {
self.transaction.update_deleted_with_diff(header_hash, deleted);
self.transaction.update_block_accumulated_data(header_hash, values);
self
}

Expand Down
2 changes: 2 additions & 0 deletions base_layer/core/src/chain_storage/blockchain_backend.rs
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,8 @@ pub trait BlockchainBackend: Send + Sync {
header_hash: &HashOutput,
) -> Result<Option<BlockAccumulatedData>, ChainStorageError>;

fn fetch_last_block_accumulated_data(&self) -> Result<Option<BlockAccumulatedData>, ChainStorageError>;

fn fetch_block_accumulated_data_by_height(
&self,
height: u64,
Expand Down
54 changes: 46 additions & 8 deletions base_layer/core/src/chain_storage/blockchain_database.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ use crate::{
DeletedBitmap,
HistoricalBlock,
NewBlockTemplate,
UpdateBlockAccumulatedData,
},
chain_storage::{
consts::{
Expand Down Expand Up @@ -208,8 +209,30 @@ where B: BlockchainBackend
};
if is_empty {
info!(target: LOG_TARGET, "Blockchain db is empty. Adding genesis block.");
let genesis_block = blockchain_db.consensus_manager.get_genesis_block();
blockchain_db.insert_block(Arc::new(genesis_block))?;
let genesis_block = Arc::new(blockchain_db.consensus_manager.get_genesis_block());
blockchain_db.insert_block(genesis_block.clone())?;
let mut txn = DbTransaction::new();
let utxo_sum = genesis_block
.block()
.body
.outputs()
.iter()
.map(|k| &k.commitment)
.sum::<Commitment>();
let kernel_sum = genesis_block
.block()
.body
.kernels()
.iter()
.map(|k| &k.excess)
.sum::<Commitment>();
txn.update_block_accumulated_data(genesis_block.hash().clone(), UpdateBlockAccumulatedData {
utxo_sum: Some(utxo_sum.clone()),
kernel_sum: Some(kernel_sum.clone()),
..Default::default()
});
txn.set_pruned_height(0, kernel_sum, utxo_sum);
blockchain_db.write(txn)?;
blockchain_db.store_pruning_horizon(config.pruning_horizon)?;
}
if cleanup_orphans_at_startup {
Expand Down Expand Up @@ -573,7 +596,7 @@ where B: BlockchainBackend

/// Returns the sum of all kernels
pub fn fetch_kernel_commitment_sum(&self, at_hash: &HashOutput) -> Result<Commitment, ChainStorageError> {
Ok(self.fetch_block_accumulated_data(at_hash.clone())?.kernel_sum)
Ok(self.fetch_block_accumulated_data(at_hash.clone())?.kernel_sum().clone())
}

/// Returns `n` hashes from height _h - offset_ where _h_ is the tip header height back to `h - n - offset`.
Expand Down Expand Up @@ -609,6 +632,11 @@ where B: BlockchainBackend
})
}

pub fn fetch_last_block_accumulated_data(&self) -> Result<Option<BlockAccumulatedData>, ChainStorageError> {
let db = self.db_read_access()?;
db.fetch_last_block_accumulated_data()
}

pub fn fetch_block_accumulated_data_by_height(
&self,
height: u64,
Expand Down Expand Up @@ -972,9 +1000,9 @@ where B: BlockchainBackend
rewind_to_hash(&mut *db, hash)
}

pub fn fetch_horizon_data(&self) -> Result<Option<HorizonData>, ChainStorageError> {
pub fn fetch_horizon_data(&self) -> Result<HorizonData, ChainStorageError> {
let db = self.db_read_access()?;
db.fetch_horizon_data()
Ok(db.fetch_horizon_data()?.unwrap_or_default())
}

pub fn fetch_complete_deleted_bitmap_at(
Expand Down Expand Up @@ -1071,7 +1099,7 @@ pub fn calculate_mmr_roots<T: BlockchainBackend>(db: &T, block: &Block) -> Resul
let BlockAccumulatedData {
kernels,
outputs,
range_proofs,
witness: range_proofs,
..
} = db
.fetch_block_accumulated_data(&header.prev_hash)?
Expand Down Expand Up @@ -2034,6 +2062,7 @@ fn cleanup_orphans<T: BlockchainBackend>(db: &mut T, orphan_storage_capacity: us

db.delete_oldest_orphans(horizon_height, orphan_storage_capacity)
}

fn prune_database_if_needed<T: BlockchainBackend>(
db: &mut T,
pruning_horizon: u64,
Expand Down Expand Up @@ -2067,19 +2096,28 @@ fn prune_database_if_needed<T: BlockchainBackend>(
)?;
let mut txn = DbTransaction::new();
for block_to_prune in (last_pruned + 1)..abs_pruning_horizon {
let header = db.fetch_chain_header_by_height(block_to_prune)?;
let curr_block = db.fetch_block_accumulated_data_by_height(block_to_prune).or_not_found(
"BlockAccumulatedData",
"height",
block_to_prune.to_string(),
)?;
// Note, this could actually be done in one step instead of each block, since deleted is
// accumulated
let inputs_to_prune = curr_block.deleted.bitmap().clone() - last_block.deleted.bitmap();
let output_mmr_positions = curr_block.deleted() - last_block.deleted();
last_block = curr_block;

txn.prune_outputs_and_update_horizon(inputs_to_prune.to_vec(), block_to_prune);
txn.prune_outputs_at_positions(output_mmr_positions.to_vec());
txn.delete_all_inputs_in_block(header.hash().clone());
}

txn.set_pruned_height(
abs_pruning_horizon - 1,
last_block.kernel_sum().clone(),
last_block.utxo_sum().clone(),
);
// TODO: prune block accumulated data

db.write(txn)?;
}

Expand Down
Loading

0 comments on commit 678aa27

Please sign in to comment.