Skip to content

Commit

Permalink
reforkering
Browse files Browse the repository at this point in the history
fmt

rebase forkup

fixing build all

leave the last block open

more tests

rebase fix

replay test fix

rollback depth parameter

cleanup

better replay stage

s/finalized/frozen for bank_checkpoints that are no longer able to write transactions or ticks

remove println

disconnect blob sender so that TVU isn't replyaing onto TPU's fork

finalize()->freeze(), code review comments

rebased

fix window_send_test

remove entries_to_blocks()

guard against too many ticks in a slot

limiting ticks, take #2

more info

fix test_replay_stage_poh_error_entry_receiver

fix leader_scheduler::test_update_height

s/bank_state/bank_fork/g

s/bank_checkpoint/bank_delta/g

fix up stragglers from rename

get it building again.  num_ticks_left_in_slot() should not assume slot from tick_height

fixups

ignore leader rotation tests until ledger is turned around

renames, compile fixes
  • Loading branch information
aeyakovenko authored and rob-solana committed Feb 12, 2019
1 parent 8b39eb5 commit c825e7f
Show file tree
Hide file tree
Showing 29 changed files with 2,106 additions and 1,025 deletions.
19 changes: 13 additions & 6 deletions benches/bank.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ use test::Bencher;

#[bench]
fn bench_process_transaction(bencher: &mut Bencher) {
solana_logger::setup();
let (genesis_block, mint_keypair) = GenesisBlock::new(100_000_000);
let bank = Bank::new(&genesis_block);

Expand All @@ -25,31 +26,37 @@ fn bench_process_transaction(bencher: &mut Bencher) {
&mint_keypair,
rando0.pubkey(),
10_000,
bank.last_id(),
bank.active_fork().last_id(),
0,
);
assert_eq!(bank.process_transaction(&tx), Ok(()));

// Seed the 'to' account and a cell for its signature.
let rando1 = Keypair::new();
let tx = SystemTransaction::new_move(&rando0, rando1.pubkey(), 1, bank.last_id(), 0);
let tx = SystemTransaction::new_move(
&rando0,
rando1.pubkey(),
1,
bank.active_fork().last_id(),
0,
);
assert_eq!(bank.process_transaction(&tx), Ok(()));

// Finally, return the transaction to the benchmark.
tx
})
.collect();

let mut id = bank.last_id();
let mut id = bank.active_fork().last_id();

for _ in 0..(MAX_ENTRY_IDS - 1) {
bank.register_tick(&id);
for _ in 0..(MAX_ENTRY_IDS / 2) {
bank.active_fork().register_tick(&id);
id = hash(&id.as_ref())
}

bencher.iter(|| {
// Since benchmarker runs this multiple times, we need to clear the signatures.
bank.clear_signatures();
bank.active_fork().clear_signatures();
let results = bank.process_transactions(&transactions);
assert!(results.iter().all(Result::is_ok));
})
Expand Down
20 changes: 10 additions & 10 deletions benches/banking_stage.rs
Original file line number Diff line number Diff line change
Expand Up @@ -86,13 +86,13 @@ fn bench_banking_stage_multi_accounts(bencher: &mut Bencher) {
let res = bank.process_transaction(&tx);
assert!(res.is_ok(), "sanity test transactions");
});
bank.clear_signatures();
bank.active_fork().clear_signatures();
//sanity check, make sure all the transactions can execute in parallel
let res = bank.process_transactions(&transactions);
for r in res {
assert!(r.is_ok(), "sanity parallel execution");
}
bank.clear_signatures();
bank.active_fork().clear_signatures();
let verified: Vec<_> = to_packets_chunked(&transactions.clone(), 192)
.into_iter()
.map(|x| {
Expand All @@ -114,19 +114,19 @@ fn bench_banking_stage_multi_accounts(bencher: &mut Bencher) {
let mut id = genesis_block.last_id();
for _ in 0..MAX_ENTRY_IDS {
id = hash(&id.as_ref());
bank.register_tick(&id);
bank.active_fork().register_tick(&id);
}

let half_len = verified.len() / 2;
let mut start = 0;
bencher.iter(move || {
// make sure the transactions are still valid
bank.register_tick(&genesis_block.last_id());
bank.active_fork().register_tick(&genesis_block.last_id());
for v in verified[start..start + half_len].chunks(verified.len() / num_threads) {
verified_sender.send(v.to_vec()).unwrap();
}
check_txs(&signal_receiver, txes / 2);
bank.clear_signatures();
bank.active_fork().clear_signatures();
start += half_len;
start %= verified.len();
});
Expand Down Expand Up @@ -195,13 +195,13 @@ fn bench_banking_stage_multi_programs(bencher: &mut Bencher) {
let res = bank.process_transaction(&tx);
assert!(res.is_ok(), "sanity test transactions");
});
bank.clear_signatures();
bank.active_fork().clear_signatures();
//sanity check, make sure all the transactions can execute in parallel
let res = bank.process_transactions(&transactions);
for r in res {
assert!(r.is_ok(), "sanity parallel execution");
}
bank.clear_signatures();
bank.active_fork().clear_signatures();
let verified: Vec<_> = to_packets_chunked(&transactions.clone(), 96)
.into_iter()
.map(|x| {
Expand All @@ -223,19 +223,19 @@ fn bench_banking_stage_multi_programs(bencher: &mut Bencher) {
let mut id = genesis_block.last_id();
for _ in 0..MAX_ENTRY_IDS {
id = hash(&id.as_ref());
bank.register_tick(&id);
bank.active_fork().register_tick(&id);
}

let half_len = verified.len() / 2;
let mut start = 0;
bencher.iter(move || {
// make sure the transactions are still valid
bank.register_tick(&genesis_block.last_id());
bank.active_fork().register_tick(&genesis_block.last_id());
for v in verified[start..start + half_len].chunks(verified.len() / num_threads) {
verified_sender.send(v.to_vec()).unwrap();
}
check_txs(&signal_receiver, txes / 2);
bank.clear_signatures();
bank.active_fork().clear_signatures();
start += half_len;
start %= verified.len();
});
Expand Down
4 changes: 2 additions & 2 deletions ledger-tool/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ fn main() {
}
("verify", _) => {
let bank = Bank::new(&genesis_block);
let mut last_id = bank.last_id();
let mut last_id = bank.active_fork().last_id();
let mut num_entries = 0;
for (i, entry) in entries.enumerate() {
if i >= head {
Expand All @@ -129,7 +129,7 @@ fn main() {
last_id = entry.id;
num_entries += 1;

if let Err(e) = bank.process_entry(&entry) {
if let Err(e) = bank.active_fork().process_entries(&[entry]) {
eprintln!("verify failed at entry[{}], err: {:?}", i + 2, e);
if !matches.is_present("continue") {
exit(1);
Expand Down
32 changes: 16 additions & 16 deletions src/accounts.rs
Original file line number Diff line number Diff line change
Expand Up @@ -81,11 +81,11 @@ impl AccountsDB {
hash(&serialize(&ordered_accounts).unwrap())
}

fn load<U>(checkpoints: &[U], pubkey: &Pubkey) -> Option<Account>
fn load<U>(deltas: &[U], pubkey: &Pubkey) -> Option<Account>
where
U: Deref<Target = Self>,
{
for db in checkpoints {
for db in deltas {
if let Some(account) = db.accounts.get(pubkey) {
return Some(account.clone());
}
Expand All @@ -97,7 +97,7 @@ impl AccountsDB {
pub fn store(&mut self, purge: bool, pubkey: &Pubkey, account: &Account) {
if account.tokens == 0 {
if purge {
// purge if balance is 0 and no checkpoints
// purge if balance is 0 and no deltas
self.accounts.remove(pubkey);
} else {
// store default account if balance is 0 and there's a checkpoint
Expand Down Expand Up @@ -128,7 +128,7 @@ impl AccountsDB {
}
}
fn load_tx_accounts<U>(
checkpoints: &[U],
deltas: &[U],
tx: &Transaction,
error_counters: &mut ErrorCounters,
) -> Result<Vec<Account>>
Expand All @@ -149,7 +149,7 @@ impl AccountsDB {
// If a fee can pay for execution then the program will be scheduled
let mut called_accounts: Vec<Account> = vec![];
for key in &tx.account_keys {
called_accounts.push(Self::load(checkpoints, key).unwrap_or_default());
called_accounts.push(Self::load(deltas, key).unwrap_or_default());
}
if called_accounts.is_empty() || called_accounts[0].tokens == 0 {
error_counters.account_not_found += 1;
Expand All @@ -165,7 +165,7 @@ impl AccountsDB {
}

fn load_executable_accounts<U>(
checkpoints: &[U],
deltas: &[U],
mut program_id: Pubkey,
error_counters: &mut ErrorCounters,
) -> Result<Vec<(Pubkey, Account)>>
Expand All @@ -186,7 +186,7 @@ impl AccountsDB {
}
depth += 1;

let program = match Self::load(checkpoints, &program_id) {
let program = match Self::load(deltas, &program_id) {
Some(program) => program,
None => {
error_counters.account_not_found += 1;
Expand All @@ -208,7 +208,7 @@ impl AccountsDB {

/// For each program_id in the transaction, load its loaders.
fn load_loaders<U>(
checkpoints: &[U],
deltas: &[U],
tx: &Transaction,
error_counters: &mut ErrorCounters,
) -> Result<Vec<Vec<(Pubkey, Account)>>>
Expand All @@ -223,13 +223,13 @@ impl AccountsDB {
return Err(BankError::AccountNotFound);
}
let program_id = tx.program_ids[ix.program_ids_index as usize];
Self::load_executable_accounts(checkpoints, program_id, error_counters)
Self::load_executable_accounts(deltas, program_id, error_counters)
})
.collect()
}

fn load_accounts<U>(
checkpoints: &[U],
deltas: &[U],
txs: &[Transaction],
lock_results: Vec<Result<()>>,
error_counters: &mut ErrorCounters,
Expand All @@ -241,8 +241,8 @@ impl AccountsDB {
.zip(lock_results.into_iter())
.map(|etx| match etx {
(tx, Ok(())) => {
let accounts = Self::load_tx_accounts(checkpoints, tx, error_counters)?;
let loaders = Self::load_loaders(checkpoints, tx, error_counters)?;
let accounts = Self::load_tx_accounts(deltas, tx, error_counters)?;
let loaders = Self::load_loaders(deltas, tx, error_counters)?;
Ok((accounts, loaders))
}
(_, Err(e)) => Err(e),
Expand All @@ -268,11 +268,11 @@ impl AccountsDB {

impl Accounts {
/// Slow because lock is held for 1 operation insted of many
pub fn load_slow<U>(checkpoints: &[U], pubkey: &Pubkey) -> Option<Account>
pub fn load_slow<U>(deltas: &[U], pubkey: &Pubkey) -> Option<Account>
where
U: Deref<Target = Self>,
{
let dbs: Vec<_> = checkpoints
let dbs: Vec<_> = deltas
.iter()
.map(|obj| obj.accounts_db.read().unwrap())
.collect();
Expand Down Expand Up @@ -350,15 +350,15 @@ impl Accounts {
}

pub fn load_accounts<U>(
checkpoints: &[U],
deltas: &[U],
txs: &[Transaction],
results: Vec<Result<()>>,
error_counters: &mut ErrorCounters,
) -> Vec<Result<(InstructionAccounts, InstructionLoaders)>>
where
U: Deref<Target = Self>,
{
let dbs: Vec<_> = checkpoints
let dbs: Vec<_> = deltas
.iter()
.map(|obj| obj.accounts_db.read().unwrap())
.collect();
Expand Down
Loading

0 comments on commit c825e7f

Please sign in to comment.