diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 37906772..792290f9 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -57,11 +57,7 @@ jobs: - miri - script: cargo miri setup displayName: cargo miri setup - # ignore leaks due to - # https://github.com/crossbeam-rs/crossbeam/issues/464 - # which is - # https://github.com/rust-lang/miri/issues/940 - - script: cargo miri -Zmiri-ignore-leaks test + - script: cargo miri test -- -Zmiri-ignore-leaks displayName: cargo miri test - job: asan dependsOn: deny diff --git a/src/map.rs b/src/map.rs index 44822d61..7975dbf2 100644 --- a/src/map.rs +++ b/src/map.rs @@ -7,10 +7,7 @@ use std::error::Error; use std::fmt::{self, Debug, Display, Formatter}; use std::hash::{BuildHasher, Hash, Hasher}; use std::iter::FromIterator; -use std::sync::{ - atomic::{AtomicIsize, AtomicUsize, Ordering}, - Once, -}; +use std::sync::atomic::{AtomicIsize, Ordering}; const ISIZE_BITS: usize = core::mem::size_of::() * 8; @@ -62,8 +59,10 @@ const MAX_RESIZERS: isize = (1 << (ISIZE_BITS - RESIZE_STAMP_BITS)) - 1; /// The bit shift for recording size stamp in `size_ctl`. const RESIZE_STAMP_SHIFT: usize = ISIZE_BITS - RESIZE_STAMP_BITS; -static NCPU_INITIALIZER: Once = Once::new(); -static NCPU: AtomicUsize = AtomicUsize::new(0); +#[cfg(not(miri))] +static NCPU_INITIALIZER: std::sync::Once = std::sync::Once::new(); +#[cfg(not(miri))] +static NCPU: std::sync::atomic::AtomicUsize = std::sync::atomic::AtomicUsize::new(0); macro_rules! load_factor { ($n: expr) => { @@ -3383,6 +3382,7 @@ mod tree_bins { } #[test] + #[cfg_attr(miri, ignore)] fn concurrent_tree_bin() { let map = HashMap::::with_hasher(ZeroHashBuilder); // first, ensure that we have a tree bin @@ -3537,6 +3537,7 @@ mod tree_bins { } #[test] #[should_panic] + #[cfg_attr(miri, ignore)] fn disallow_evil() { let map: HashMap<_, _> = HashMap::default(); map.insert(42, String::from("hello"), &crossbeam_epoch::pin()); diff --git a/tests/basic.rs b/tests/basic.rs index 07c18e5c..6bc02a1a 100644 --- a/tests/basic.rs +++ b/tests/basic.rs @@ -206,6 +206,7 @@ fn compute_if_present_remove() { } #[test] +#[cfg_attr(miri, ignore)] fn concurrent_insert() { let map = Arc::new(HashMap::::new()); @@ -236,6 +237,7 @@ fn concurrent_insert() { } #[test] +#[cfg_attr(miri, ignore)] fn concurrent_remove() { let map = Arc::new(HashMap::::new()); @@ -276,6 +278,7 @@ fn concurrent_remove() { } #[test] +#[cfg_attr(miri, ignore)] fn concurrent_compute_if_present() { let map = Arc::new(HashMap::::new()); @@ -314,6 +317,7 @@ fn concurrent_compute_if_present() { } #[test] +#[cfg_attr(miri, ignore)] fn concurrent_resize_and_get() { let map = Arc::new(HashMap::::new()); { diff --git a/tests/basic_ref.rs b/tests/basic_ref.rs index b2246d1b..9f2c786c 100644 --- a/tests/basic_ref.rs +++ b/tests/basic_ref.rs @@ -413,6 +413,7 @@ fn retain_force_some() { } #[test] +#[cfg_attr(miri, ignore)] fn concurrent_insert() { let map = Arc::new(HashMap::::new()); @@ -443,6 +444,7 @@ fn concurrent_insert() { } #[test] +#[cfg_attr(miri, ignore)] fn concurrent_remove() { let map = Arc::new(HashMap::::new()); @@ -483,6 +485,7 @@ fn concurrent_remove() { } #[test] +#[cfg_attr(miri, ignore)] fn concurrent_compute_if_present() { let map = Arc::new(HashMap::::new()); diff --git a/tests/borrow.rs b/tests/borrow.rs index 75522f61..07277066 100644 --- a/tests/borrow.rs +++ b/tests/borrow.rs @@ -67,6 +67,7 @@ fn update() { } #[test] +#[cfg_attr(miri, ignore)] fn concurrent_insert() { let map = Arc::new(HashMap::::new()); let keys = Arc::new((0..64).map(|i| i.to_string()).collect::>()); @@ -97,6 +98,7 @@ fn concurrent_insert() { } #[test] +#[cfg_attr(miri, ignore)] fn concurrent_remove() { let map = Arc::new(HashMap::::new()); let keys = Arc::new((0..64).map(|i| i.to_string()).collect::>()); diff --git a/tests/cuckoo/stress.rs b/tests/cuckoo/stress.rs index d037144c..5bf401f7 100644 --- a/tests/cuckoo/stress.rs +++ b/tests/cuckoo/stress.rs @@ -147,6 +147,7 @@ fn stress_find_thread(env: Arc) { } #[test] +#[cfg_attr(miri, ignore)] fn stress_test() { let root = Arc::new(Environment::new()); let mut threads = Vec::new(); diff --git a/tests/hasher.rs b/tests/hasher.rs index c8489dd4..e11e8ca9 100644 --- a/tests/hasher.rs +++ b/tests/hasher.rs @@ -23,7 +23,7 @@ impl BuildHasher for ZeroHashBuilder { } fn check() { - let range = 0..1000; + let range = if cfg!(miri) { 0..16 } else { 0..1000 }; let guard = epoch::pin(); let map = HashMap::::default(); for i in range.clone() { diff --git a/tests/jdk/concurrent_associate.rs b/tests/jdk/concurrent_associate.rs index 4f884dc8..d7458daf 100644 --- a/tests/jdk/concurrent_associate.rs +++ b/tests/jdk/concurrent_associate.rs @@ -25,6 +25,7 @@ fn insert(map: Arc>, k: KeyVal) { } #[test] +#[cfg_attr(miri, ignore)] fn test_concurrent_insert<'g>() { test(insert); } diff --git a/tests/jdk/concurrent_contains.rs b/tests/jdk/concurrent_contains.rs index 59bcd551..622a3480 100644 --- a/tests/jdk/concurrent_contains.rs +++ b/tests/jdk/concurrent_contains.rs @@ -11,6 +11,7 @@ const ITERATIONS: usize = 256; const ROUNDS: usize = 32; #[test] +#[cfg_attr(miri, ignore)] fn test_concurrent_contains_key() { let map = HashMap::new(); let mut content = [0; NUM_ENTRIES]; diff --git a/tests/jdk/map_check.rs b/tests/jdk/map_check.rs index e376e3d4..385b2219 100644 --- a/tests/jdk/map_check.rs +++ b/tests/jdk/map_check.rs @@ -3,8 +3,17 @@ use flurry::*; use rand::prelude::*; use std::hash::Hash; +#[cfg(not(miri))] const SIZE: usize = 50_000; +#[cfg(miri)] +const SIZE: usize = 12; + +// there must be more things absent than present! +#[cfg(not(miri))] const ABSENT_SIZE: usize = 1 << 17; +#[cfg(miri)] +const ABSENT_SIZE: usize = 1 << 5; + const ABSENT_MASK: usize = ABSENT_SIZE - 1; fn t1(map: &HashMap, keys: &[K], expect: usize) diff --git a/tests/set.rs b/tests/set.rs index a3242af0..3dde617a 100644 --- a/tests/set.rs +++ b/tests/set.rs @@ -99,6 +99,7 @@ fn update() { } #[test] +#[cfg_attr(miri, ignore)] fn concurrent_insert() { let set = Arc::new(HashSet::::new()); @@ -128,6 +129,7 @@ fn concurrent_insert() { } #[test] +#[cfg_attr(miri, ignore)] fn concurrent_remove() { let set = Arc::new(HashSet::::new());