From 907ea5584173464b31326480dd11d9a71cbd4141 Mon Sep 17 00:00:00 2001 From: Nilstrieb <48135649+Nilstrieb@users.noreply.github.com> Date: Fri, 3 Jun 2022 22:46:01 +0200 Subject: [PATCH 1/2] Adapt tests to be able to run in miri Decrease the Ns of bug loops to a smaller N, which makes them a lot faster in miri. --- compiler/rustc_arena/src/tests.rs | 24 ++++++++++++++++++++---- 1 file changed, 20 insertions(+), 4 deletions(-) diff --git a/compiler/rustc_arena/src/tests.rs b/compiler/rustc_arena/src/tests.rs index 911e577c1edc7..ad61464343a4a 100644 --- a/compiler/rustc_arena/src/tests.rs +++ b/compiler/rustc_arena/src/tests.rs @@ -79,7 +79,11 @@ fn test_arena_alloc_nested() { #[test] pub fn test_copy() { let arena = TypedArena::default(); - for _ in 0..100000 { + #[cfg(not(miri))] + const N: usize = 100000; + #[cfg(miri)] + const N: usize = 1000; + for _ in 0..N { arena.alloc(Point { x: 1, y: 2, z: 3 }); } } @@ -106,7 +110,11 @@ struct Noncopy { #[test] pub fn test_noncopy() { let arena = TypedArena::default(); - for _ in 0..100000 { + #[cfg(not(miri))] + const N: usize = 100000; + #[cfg(miri)] + const N: usize = 1000; + for _ in 0..N { arena.alloc(Noncopy { string: "hello world".to_string(), array: vec![1, 2, 3, 4, 5] }); } } @@ -114,7 +122,11 @@ pub fn test_noncopy() { #[test] pub fn test_typed_arena_zero_sized() { let arena = TypedArena::default(); - for _ in 0..100000 { + #[cfg(not(miri))] + const N: usize = 100000; + #[cfg(miri)] + const N: usize = 1000; + for _ in 0..N { arena.alloc(()); } } @@ -124,7 +136,11 @@ pub fn test_typed_arena_clear() { let mut arena = TypedArena::default(); for _ in 0..10 { arena.clear(); - for _ in 0..10000 { + #[cfg(not(miri))] + const N: usize = 10000; + #[cfg(miri)] + const N: usize = 100; + for _ in 0..N { arena.alloc(Point { x: 1, y: 2, z: 3 }); } } From 211fb66810c978576201bb2054f1ccea0e176f10 Mon Sep 17 00:00:00 2001 From: Nilstrieb <48135649+Nilstrieb@users.noreply.github.com> Date: Fri, 3 Jun 2022 22:47:05 +0200 Subject: [PATCH 2/2] Fix stacked borrows violation in rustc_arena There was a problem with storing a `Box` in a struct, where the current rules would invalidate the value. this makes it store a raw pointer instead, circumventing the aliasing problems. --- compiler/rustc_arena/src/lib.rs | 27 +++++++++++++++++++-------- 1 file changed, 19 insertions(+), 8 deletions(-) diff --git a/compiler/rustc_arena/src/lib.rs b/compiler/rustc_arena/src/lib.rs index 62995dfd2e2f0..a5f1cbc96daa7 100644 --- a/compiler/rustc_arena/src/lib.rs +++ b/compiler/rustc_arena/src/lib.rs @@ -19,6 +19,7 @@ #![feature(rustc_attrs)] #![cfg_attr(test, feature(test))] #![feature(strict_provenance)] +#![feature(ptr_const_cast)] use smallvec::SmallVec; @@ -27,7 +28,7 @@ use std::cell::{Cell, RefCell}; use std::cmp; use std::marker::{PhantomData, Send}; use std::mem::{self, MaybeUninit}; -use std::ptr; +use std::ptr::{self, NonNull}; use std::slice; #[inline(never)] @@ -55,15 +56,24 @@ pub struct TypedArena { struct ArenaChunk { /// The raw storage for the arena chunk. - storage: Box<[MaybeUninit]>, + storage: NonNull<[MaybeUninit]>, /// The number of valid entries in the chunk. entries: usize, } +unsafe impl<#[may_dangle] T> Drop for ArenaChunk { + fn drop(&mut self) { + unsafe { Box::from_raw(self.storage.as_mut()) }; + } +} + impl ArenaChunk { #[inline] unsafe fn new(capacity: usize) -> ArenaChunk { - ArenaChunk { storage: Box::new_uninit_slice(capacity), entries: 0 } + ArenaChunk { + storage: NonNull::new(Box::into_raw(Box::new_uninit_slice(capacity))).unwrap(), + entries: 0, + } } /// Destroys this arena chunk. @@ -72,14 +82,15 @@ impl ArenaChunk { // The branch on needs_drop() is an -O1 performance optimization. // Without the branch, dropping TypedArena takes linear time. if mem::needs_drop::() { - ptr::drop_in_place(MaybeUninit::slice_assume_init_mut(&mut self.storage[..len])); + let slice = &mut *(self.storage.as_mut()); + ptr::drop_in_place(MaybeUninit::slice_assume_init_mut(&mut slice[..len])); } } // Returns a pointer to the first allocated object. #[inline] fn start(&mut self) -> *mut T { - MaybeUninit::slice_as_mut_ptr(&mut self.storage) + self.storage.as_ptr() as *mut T } // Returns a pointer to the end of the allocated space. @@ -90,7 +101,7 @@ impl ArenaChunk { // A pointer as large as possible for zero-sized elements. ptr::invalid_mut(!0) } else { - self.start().add(self.storage.len()) + self.start().add((*self.storage.as_ptr()).len()) } } } @@ -274,7 +285,7 @@ impl TypedArena { // If the previous chunk's len is less than HUGE_PAGE // bytes, then this chunk will be least double the previous // chunk's size. - new_cap = last_chunk.storage.len().min(HUGE_PAGE / elem_size / 2); + new_cap = (*last_chunk.storage.as_ptr()).len().min(HUGE_PAGE / elem_size / 2); new_cap *= 2; } else { new_cap = PAGE / elem_size; @@ -382,7 +393,7 @@ impl DroplessArena { // If the previous chunk's len is less than HUGE_PAGE // bytes, then this chunk will be least double the previous // chunk's size. - new_cap = last_chunk.storage.len().min(HUGE_PAGE / 2); + new_cap = (*last_chunk.storage.as_ptr()).len().min(HUGE_PAGE / 2); new_cap *= 2; } else { new_cap = PAGE;