Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Bump rand to 0.8, rand_chacha to 0.3, getrandom to 0.2 #32871

Merged
merged 35 commits into from
Aug 21, 2023
Merged
Show file tree
Hide file tree
Changes from 26 commits
Commits
Show all changes
35 commits
Select commit Hold shift + click to select a range
7cb815a
sdk: Add concurrent support for rand 0.7 and 0.8
joncinque Aug 17, 2023
0821cb7
Update rand, rand_chacha, and getrandom versions
joncinque Aug 17, 2023
4c99792
Run command to replace `gen_range`
joncinque Aug 17, 2023
bdb5465
sdk: Fix users of older `gen_range`
joncinque Aug 17, 2023
2e9c550
Replace `hash::new_rand` with `hash::new_with_thread_rng`
joncinque Aug 17, 2023
9a0a8ec
perf: Use `Keypair::new()` instead of `generate`
joncinque Aug 17, 2023
5d67cfc
Use older rand version in zk-token-sdk
joncinque Aug 17, 2023
6b8f63e
program-runtime: Inline random key generation
joncinque Aug 17, 2023
d4a6f09
bloom: Fix clippy warnings in tests
joncinque Aug 17, 2023
53d12e3
streamer: Scope rng usage correctly
joncinque Aug 17, 2023
79fb8d3
perf: Fix clippy warning
joncinque Aug 17, 2023
c8c1059
accounts-db: Map to char to generate a random string
joncinque Aug 17, 2023
b87dd11
Remove `from_secret_key_bytes`, it's just `keypair_from_seed`
joncinque Aug 17, 2023
775da77
ledger: Generate keypairs by hand
joncinque Aug 17, 2023
cab9648
ed25519-tests: Use new rand
joncinque Aug 17, 2023
2c99f5f
runtime: Use new rand in all tests
joncinque Aug 17, 2023
05bb142
gossip: Clean up clippy and inline keypair generators
joncinque Aug 17, 2023
6bf9c1d
core: Inline keypair generation for tests
joncinque Aug 17, 2023
f21bd05
Push sbf lockfile change
joncinque Aug 17, 2023
0b10db3
sdk: Sort dependencies correctly
joncinque Aug 17, 2023
3b2597c
Remove `hash::new_with_thread_rng`, use `Hash::new_unique()`
joncinque Aug 17, 2023
37bb533
Use Keypair::new where chacha isn't used
joncinque Aug 17, 2023
147c1e9
sdk: Fix build by marking rand 0.7 optional
joncinque Aug 17, 2023
db48acc
Hardcode secret key length, add static assertion
joncinque Aug 17, 2023
39b5adf
Unify `getrandom` crate usage to fix linking errors
joncinque Aug 18, 2023
fd77bc2
bloom: Fix tests that require a random hash
joncinque Aug 18, 2023
884dc52
Remove some dependencies, try to unify others
joncinque Aug 18, 2023
2b45df5
Remove unnecessary uses of rand and rand_core
joncinque Aug 18, 2023
295023a
Update lockfiles
joncinque Aug 18, 2023
7099108
Add back some dependencies to reduce rebuilds
joncinque Aug 18, 2023
2e09e57
Increase max rebuilds from 14 to 15
joncinque Aug 18, 2023
9d44709
frozen-abi: Remove `getrandom`
joncinque Aug 18, 2023
2b00f23
Bump rebuilds to 17
joncinque Aug 18, 2023
4718c51
Remove getrandom from zk-token-proof
joncinque Aug 18, 2023
7054869
Merge branch 'master' into rand8
joncinque Aug 18, 2023
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
108 changes: 54 additions & 54 deletions Cargo.lock

Large diffs are not rendered by default.

6 changes: 3 additions & 3 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -197,7 +197,7 @@ futures-util = "0.3.28"
gag = "1.0.0"
generic-array = { version = "0.14.7", default-features = false }
gethostname = "0.2.3"
getrandom = "0.1.14"
getrandom = "0.2.10"
goauth = "0.13.1"
hex = "0.4.3"
hidapi = { version = "2.4.1", default-features = false }
Expand Down Expand Up @@ -263,8 +263,8 @@ qualifier_attr = { version = "0.2.2", default-features = false }
quinn = "0.10.2"
quinn-proto = "0.10.2"
quote = "1.0"
rand = "0.7.0"
rand_chacha = "0.2.2"
rand = "0.8.5"
rand_chacha = "0.3.1"
rand_core = "0.6.4"
raptorq = "1.7.0"
rayon = "1.7.0"
Expand Down
2 changes: 1 addition & 1 deletion accounts-cluster-bench/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ fn make_create_message(
maybe_space: Option<u64>,
mint: Option<Pubkey>,
) -> Message {
let space = maybe_space.unwrap_or_else(|| thread_rng().gen_range(0, 1000));
let space = maybe_space.unwrap_or_else(|| thread_rng().gen_range(0..1000));

let instructions: Vec<_> = (0..num_instructions)
.flat_map(|_| {
Expand Down
18 changes: 9 additions & 9 deletions accounts-db/src/accounts_db.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4500,7 +4500,7 @@ impl AccountsDb {
} else {
false
};
if is_candidate || (can_randomly_shrink && thread_rng().gen_range(0, 10000) == 0) {
if is_candidate || (can_randomly_shrink && thread_rng().gen_range(0..10000) == 0) {
// we are a candidate for shrink, so either append us to the previous append vec
// or recreate us as a new append vec and eliminate the dead accounts
info!(
Expand Down Expand Up @@ -5712,7 +5712,7 @@ impl AccountsDb {
self.stats
.create_store_count
.fetch_add(1, Ordering::Relaxed);
let path_index = thread_rng().gen_range(0, paths.len());
let path_index = thread_rng().gen_range(0..paths.len());
let store = Arc::new(self.new_storage_entry(
slot,
Path::new(&paths[path_index]),
Expand Down Expand Up @@ -9680,7 +9680,7 @@ impl AccountsDb {
pub fn check_accounts(&self, pubkeys: &[Pubkey], slot: Slot, num: usize, count: usize) {
let ancestors = vec![(slot, 0)].into_iter().collect();
for _ in 0..num {
let idx = thread_rng().gen_range(0, num);
let idx = thread_rng().gen_range(0..num);
let account = self.load_without_fixed_root(&ancestors, &pubkeys[idx]);
let account1 = Some((
AccountSharedData::new(
Expand Down Expand Up @@ -9887,7 +9887,7 @@ pub mod test_utils {
// accounts cache!
pub fn update_accounts_bench(accounts: &Accounts, pubkeys: &[Pubkey], slot: u64) {
for pubkey in pubkeys {
let amount = thread_rng().gen_range(0, 10);
let amount = thread_rng().gen_range(0..10);
let account = AccountSharedData::new(amount, 0, AccountSharedData::default().owner());
accounts.store_slow_uncached(slot, pubkey, &account);
}
Expand Down Expand Up @@ -11389,7 +11389,7 @@ pub mod tests {
let mut pubkeys: Vec<Pubkey> = vec![];
db.create_account(&mut pubkeys, 0, 100, 0, 0);
for _ in 1..100 {
let idx = thread_rng().gen_range(0, 99);
let idx = thread_rng().gen_range(0..99);
let ancestors = vec![(0, 0)].into_iter().collect();
let account = db
.load_without_fixed_root(&ancestors, &pubkeys[idx])
Expand All @@ -11405,7 +11405,7 @@ pub mod tests {

// check that all the accounts appear with a new root
for _ in 1..100 {
let idx = thread_rng().gen_range(0, 99);
let idx = thread_rng().gen_range(0..99);
let ancestors = vec![(0, 0)].into_iter().collect();
let account0 = db
.load_without_fixed_root(&ancestors, &pubkeys[idx])
Expand Down Expand Up @@ -11540,7 +11540,7 @@ pub mod tests {

fn update_accounts(accounts: &AccountsDb, pubkeys: &[Pubkey], slot: Slot, range: usize) {
for _ in 1..1000 {
let idx = thread_rng().gen_range(0, range);
let idx = thread_rng().gen_range(0..range);
let ancestors = vec![(slot, 0)].into_iter().collect();
if let Some((mut account, _)) =
accounts.load_without_fixed_root(&ancestors, &pubkeys[idx])
Expand Down Expand Up @@ -12377,7 +12377,7 @@ pub mod tests {
let mut account = AccountSharedData::new(1, 0, &pubkey);
let mut i = 0;
loop {
let account_bal = thread_rng().gen_range(1, 99);
let account_bal = thread_rng().gen_range(1..99);
account.set_lamports(account_bal);
db.store_for_tests(slot, &[(&pubkey, &account)]);

Expand Down Expand Up @@ -15178,7 +15178,7 @@ pub mod tests {
// Ordering::Relaxed is ok because of no data dependencies; the modified field is
// completely free-standing cfg(test) control-flow knob.
db.load_limit
.store(thread_rng().gen_range(0, 10) as u64, Ordering::Relaxed);
.store(thread_rng().gen_range(0..10) as u64, Ordering::Relaxed);

// Load should never be unable to find this key
let loaded_account = db
Expand Down
2 changes: 1 addition & 1 deletion accounts-db/src/accounts_index.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1619,7 +1619,7 @@ impl<T: IndexValue, U: DiskIndexValue + From<T> + Into<T>> AccountsIndex<T, U> {
let expected_items_per_bin = item_len * 2 / bins;
// offset bin 0 in the 'binned' array by a random amount.
// This results in calls to insert_new_entry_if_missing_with_lock from different threads starting at different bins.
let random_offset = thread_rng().gen_range(0, bins);
let random_offset = thread_rng().gen_range(0..bins);
let use_disk = self.storage.storage.disk.is_some();
let mut binned = (0..bins)
.map(|mut pubkey_bin| {
Expand Down
4 changes: 2 additions & 2 deletions accounts-db/src/ancient_append_vecs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ impl AncientSlotInfos {
let should_shrink = if capacity > 0 {
let alive_ratio = alive_bytes * 100 / capacity;
alive_ratio < 90
|| if can_randomly_shrink && thread_rng().gen_range(0, 10000) == 0 {
|| if can_randomly_shrink && thread_rng().gen_range(0..10000) == 0 {
was_randomly_shrunk = true;
true
} else {
Expand Down Expand Up @@ -1202,7 +1202,7 @@ pub mod tests {
let mut data_size = 450;
// random # of extra accounts here
let total_accounts_per_storage =
thread_rng().gen_range(0, total_accounts_per_storage);
thread_rng().gen_range(0..total_accounts_per_storage);
let _pubkeys_and_accounts = storages
.iter()
.map(|storage| {
Expand Down
2 changes: 1 addition & 1 deletion accounts-db/src/append_vec.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1067,7 +1067,7 @@ pub mod tests {

let now = Instant::now();
for _ in 0..size {
let sample = thread_rng().gen_range(0, indexes.len());
let sample = thread_rng().gen_range(0..indexes.len());
let account = create_test_account(sample);
assert_eq!(av.get_account_test(indexes[sample]).unwrap(), account);
}
Expand Down
1 change: 1 addition & 0 deletions accounts-db/src/append_vec/test_utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ pub fn get_append_vec_path(path: &str) -> TempFile {
let out_dir = get_append_vec_dir();
let rand_string: String = rand::thread_rng()
.sample_iter(&Alphanumeric)
.map(char::from)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this whole method seems ripe for replacement with tempfile

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Truth

.take(30)
.collect();
let dir = format!("{out_dir}/{rand_string}");
Expand Down
2 changes: 1 addition & 1 deletion accounts-db/src/cache_hash_data.rs
Original file line number Diff line number Diff line change
Expand Up @@ -462,7 +462,7 @@ pub mod tests {
}

CalculateHashIntermediate::new(
solana_sdk::hash::new_rand(&mut rng),
solana_sdk::hash::Hash::new_unique(),
ct as u64,
pk,
)
Expand Down
2 changes: 1 addition & 1 deletion accounts-db/src/hardened_unpack.rs
Original file line number Diff line number Diff line change
Expand Up @@ -381,7 +381,7 @@ where
};
if let ["accounts", file] = parts {
// Randomly distribute the accounts files about the available `account_paths`,
let path_index = thread_rng().gen_range(0, account_paths.len());
let path_index = thread_rng().gen_range(0..account_paths.len());
match account_paths
.get(path_index)
.map(|path_buf| path_buf.as_path())
Expand Down
4 changes: 2 additions & 2 deletions accounts-db/src/in_mem_accounts_index.rs
Original file line number Diff line number Diff line change
Expand Up @@ -179,7 +179,7 @@ impl<T: IndexValue, U: DiskIndexValue + From<T> + Into<T>> InMemAccountsIndex<T,
// Spread out the scanning across all ages within the window.
// This causes us to scan 1/N of the bins each 'Age'
remaining_ages_to_skip_flushing: AtomicU8::new(
thread_rng().gen_range(0, num_ages_to_distribute_flushes),
thread_rng().gen_range(0..num_ages_to_distribute_flushes),
),
num_ages_to_distribute_flushes,
}
Expand Down Expand Up @@ -932,7 +932,7 @@ impl<T: IndexValue, U: DiskIndexValue + From<T> + Into<T>> InMemAccountsIndex<T,
// random eviction
const N: usize = 1000;
// 1/N chance of eviction
thread_rng().gen_range(0, N) == 0
thread_rng().gen_range(0..N) == 0
}

/// assumes 1 entry in the slot list. Ignores overhead of the HashMap and such
Expand Down
2 changes: 1 addition & 1 deletion accounts-db/src/read_only_accounts_cache.rs
Original file line number Diff line number Diff line change
Expand Up @@ -291,7 +291,7 @@ mod tests {
const MAX_CACHE_SIZE: usize = 17 * (CACHE_ENTRY_SIZE + DATA_SIZE);
let mut rng = ChaChaRng::from_seed(SEED);
let cache = ReadOnlyAccountsCache::new(MAX_CACHE_SIZE);
let slots: Vec<Slot> = repeat_with(|| rng.gen_range(0, 1000)).take(5).collect();
let slots: Vec<Slot> = repeat_with(|| rng.gen_range(0..1000)).take(5).collect();
let pubkeys: Vec<Pubkey> = repeat_with(|| {
let mut arr = [0u8; 32];
rng.fill(&mut arr[..]);
Expand Down
2 changes: 1 addition & 1 deletion accounts-db/src/stake_rewards.rs
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ impl StakeReward {
stake_pubkey: Pubkey::new_unique(),
stake_reward_info: RewardInfo {
reward_type: RewardType::Staking,
lamports: rng.gen_range(1, 200),
lamports: rng.gen_range(1..200),
post_balance: 0, /* unused atm */
commission: None, /* unused atm */
},
Expand Down
2 changes: 1 addition & 1 deletion accounts-db/src/tiered_storage/index.rs
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ mod tests {
.iter()
.map(|address| AccountIndexWriterEntry {
address,
block_offset: rng.gen_range(128, 2048),
block_offset: rng.gen_range(128..2048),
intra_block_offset: 0,
})
.collect();
Expand Down
8 changes: 4 additions & 4 deletions bloom/benches/bloom.rs
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ fn bench_sigs_hashmap(bencher: &mut Bencher) {
#[bench]
fn bench_add_hash(bencher: &mut Bencher) {
let mut rng = rand::thread_rng();
let hash_values: Vec<_> = std::iter::repeat_with(|| solana_sdk::hash::new_rand(&mut rng))
let hash_values: Vec<_> = std::iter::repeat_with(Hash::new_unique)
.take(1200)
.collect();
let mut fail = 0;
Expand All @@ -112,7 +112,7 @@ fn bench_add_hash(bencher: &mut Bencher) {
for hash_value in &hash_values {
bloom.add(hash_value);
}
let index = rng.gen_range(0, hash_values.len());
let index = rng.gen_range(0..hash_values.len());
if !bloom.contains(&hash_values[index]) {
fail += 1;
}
Expand All @@ -123,7 +123,7 @@ fn bench_add_hash(bencher: &mut Bencher) {
#[bench]
fn bench_add_hash_atomic(bencher: &mut Bencher) {
let mut rng = rand::thread_rng();
let hash_values: Vec<_> = std::iter::repeat_with(|| solana_sdk::hash::new_rand(&mut rng))
let hash_values: Vec<_> = std::iter::repeat_with(Hash::new_unique)
.take(1200)
.collect();
let mut fail = 0;
Expand All @@ -136,7 +136,7 @@ fn bench_add_hash_atomic(bencher: &mut Bencher) {
for hash_value in &hash_values {
bloom.add(hash_value);
}
let index = rng.gen_range(0, hash_values.len());
let index = rng.gen_range(0..hash_values.len());
if !bloom.contains(&hash_values[index]) {
fail += 1;
}
Expand Down
25 changes: 15 additions & 10 deletions bloom/src/bloom.rs
Original file line number Diff line number Diff line change
Expand Up @@ -308,10 +308,16 @@ mod test {
);
}

fn generate_random_hash() -> Hash {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

lol... Hash::new_unique() is broken. it should be hashing the counter, not using it straight up 🙃

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I can try changing it to that and see if the bloom filter test passes. On the flipside, and I might be understanding this wrong, but it seems like the test is meant for random hashes.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

nah don't mess with it here

let mut rng = rand::thread_rng();
let mut hash = [0u8; solana_sdk::hash::HASH_BYTES];
rng.fill(&mut hash);
Hash::new_from_array(hash)
}

#[test]
fn test_atomic_bloom() {
let mut rng = rand::thread_rng();
let hash_values: Vec<_> = std::iter::repeat_with(|| solana_sdk::hash::new_rand(&mut rng))
let hash_values: Vec<_> = std::iter::repeat_with(generate_random_hash)
.take(1200)
.collect();
let bloom: AtomicBloom<_> = Bloom::<Hash>::random(1287, 0.1, 7424).into();
Expand All @@ -328,7 +334,7 @@ mod test {
for hash_value in hash_values {
assert!(bloom.contains(&hash_value));
}
let false_positive = std::iter::repeat_with(|| solana_sdk::hash::new_rand(&mut rng))
let false_positive = std::iter::repeat_with(generate_random_hash)
.take(10_000)
.filter(|hash_value| bloom.contains(hash_value))
.count();
Expand All @@ -340,7 +346,7 @@ mod test {
let mut rng = rand::thread_rng();
let keys: Vec<_> = std::iter::repeat_with(|| rng.gen()).take(5).collect();
let mut bloom = Bloom::<Hash>::new(9731, keys.clone());
let hash_values: Vec<_> = std::iter::repeat_with(|| solana_sdk::hash::new_rand(&mut rng))
let hash_values: Vec<_> = std::iter::repeat_with(generate_random_hash)
.take(1000)
.collect();
for hash_value in &hash_values {
Expand Down Expand Up @@ -375,10 +381,9 @@ mod test {
assert!(bloom.contains(hash_value));
}
// Round trip, inserting new hash values.
let more_hash_values: Vec<_> =
std::iter::repeat_with(|| solana_sdk::hash::new_rand(&mut rng))
.take(1000)
.collect();
let more_hash_values: Vec<_> = std::iter::repeat_with(generate_random_hash)
.take(1000)
.collect();
let bloom: AtomicBloom<_> = bloom.into();
assert_eq!(bloom.num_bits, 9731);
assert_eq!(bloom.bits.len(), (9731 + 63) / 64);
Expand All @@ -391,7 +396,7 @@ mod test {
for hash_value in &more_hash_values {
assert!(bloom.contains(hash_value));
}
let false_positive = std::iter::repeat_with(|| solana_sdk::hash::new_rand(&mut rng))
let false_positive = std::iter::repeat_with(generate_random_hash)
.take(10_000)
.filter(|hash_value| bloom.contains(hash_value))
.count();
Expand All @@ -410,7 +415,7 @@ mod test {
for hash_value in &more_hash_values {
assert!(bloom.contains(hash_value));
}
let false_positive = std::iter::repeat_with(|| solana_sdk::hash::new_rand(&mut rng))
let false_positive = std::iter::repeat_with(generate_random_hash)
.take(10_000)
.filter(|hash_value| bloom.contains(hash_value))
.count();
Expand Down
2 changes: 1 addition & 1 deletion bucket_map/src/bucket.rs
Original file line number Diff line number Diff line change
Expand Up @@ -483,7 +483,7 @@ impl<'b, T: Clone + Copy + 'static> Bucket<T> {
let best_bucket = &mut self.data[best_fit_bucket as usize];
let cap_power = best_bucket.contents.capacity_pow2();
let cap = best_bucket.capacity();
let pos = thread_rng().gen_range(0, cap);
let pos = thread_rng().gen_range(0..cap);
let mut success = false;
// max search is increased here by a lot for this search. The idea is that we just have to find an empty bucket somewhere.
// We don't mind waiting on a new write (by searching longer). Writing is done in the background only.
Expand Down
Loading