diff --git a/Cargo.toml b/Cargo.toml index 24ff782..c0dc180 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -35,6 +35,7 @@ harness = false # trait on stable Rust. Enabling this feature means that `bumpalo` will # implement its `Allocator` trait. allocator-api2 = { version = "0.2.8", default-features = false, optional = true } +memoffset = { version = "0.9.0", optional = true } [dev-dependencies] quickcheck = "1.0.3" @@ -45,6 +46,7 @@ rand = "0.8.5" default = [] collections = [] boxed = [] +pin = ["dep:memoffset"] allocator_api = [] std = [] diff --git a/src/boxed.rs b/src/boxed.rs index af0737c..1b04ce9 100644 --- a/src/boxed.rs +++ b/src/boxed.rs @@ -165,13 +165,6 @@ impl<'a, T> Box<'a, T> { Box(a.alloc(x)) } - /// Constructs a new `Pin>`. If `T` does not implement `Unpin`, then - /// `x` will be pinned in memory and unable to be moved. - #[inline(always)] - pub fn pin_in(x: T, a: &'a Bump) -> Pin> { - Box(a.alloc(x)).into() - } - /// Consumes the `Box`, returning the wrapped value. /// /// # Examples @@ -452,18 +445,6 @@ impl<'a, T: ?Sized + Hasher> Hasher for Box<'a, T> { } } -impl<'a, T: ?Sized> From> for Pin> { - /// Converts a `Box` into a `Pin>`. - /// - /// This conversion does not allocate on the heap and happens in place. - fn from(boxed: Box<'a, T>) -> Self { - // It's not possible to move or replace the insides of a `Pin>` - // when `T: !Unpin`, so it's safe to pin it directly without any - // additional requirements. - unsafe { Pin::new_unchecked(boxed) } - } -} - impl<'a> Box<'a, dyn Any> { #[inline] /// Attempt to downcast the box to a concrete type. diff --git a/src/drop.rs b/src/drop.rs new file mode 100644 index 0000000..1fe2a6f --- /dev/null +++ b/src/drop.rs @@ -0,0 +1,110 @@ +use core::{ + cell::{Cell, UnsafeCell}, + marker::PhantomPinned, + mem::MaybeUninit, + ptr::NonNull, +}; + +/// A circular doubly linked list. +#[derive(Debug, Default)] +pub struct DropList { + pub link: Link, +} + +impl DropList { + /// Safety: `self` must be pinned. + #[inline] + pub unsafe fn init(&self) { + let link_ptr = Some(NonNull::from(&self.link)); + self.link.prev.set(link_ptr); + self.link.next.set(link_ptr); + } + + pub unsafe fn insert(&self, node: NonNull) { + insert_after(NonNull::from(&self.link), node) + } + + pub unsafe fn run_drop(&self) { + let mut curr = self.link.next.get().unwrap(); + let end = NonNull::from(&self.link); + while curr != end { + let entry = unsafe { curr.cast::>().as_ref() }; + unsafe { + (entry.drop_fn)(entry.data.assume_init_ref().get()); + } + curr = entry.link.next.get().unwrap(); + } + } +} + +#[inline] +unsafe fn insert_after(tail: NonNull, node_ptr: NonNull) { + let tail = tail.as_ref(); + + let node = node_ptr.as_ref(); + node.prev.set(Some(NonNull::from(tail))); + node.next.set(tail.next.get()); + + tail.next.get().unwrap().as_ref().prev.set(Some(node_ptr)); + tail.next.set(Some(node_ptr)); +} + +#[derive(Debug, Default)] +pub struct Link { + prev: Cell>>, + next: Cell>>, + _marker: PhantomPinned, +} + +impl Link { + pub unsafe fn unlink(&self) { + let Some(prev) = self.prev.take() else { + return; + }; + let next = self.next.take().unwrap(); + prev.as_ref().next.set(Some(next)); + next.as_ref().prev.set(Some(prev)); + } +} + +#[derive(Debug)] +#[repr(C)] +pub struct DropEntry { + link: Link, + drop_fn: unsafe fn(*mut ()), + data: MaybeUninit>, +} + +impl DropEntry { + #[inline] + pub fn new(val: T) -> Self { + Self { + link: Link::default(), + drop_fn: unsafe { + core::mem::transmute::<_, unsafe fn(*mut ())>( + core::ptr::drop_in_place:: as unsafe fn(*mut T), + ) + }, + data: MaybeUninit::new(UnsafeCell::new(val)), + } + } + + #[inline] + pub unsafe fn link_and_data(&self) -> (NonNull, *mut T) { + (NonNull::from(&self.link), self.data.assume_init_ref().get()) + } + + #[inline] + pub unsafe fn ptr_from_data(data: *mut T) -> NonNull> { + NonNull::new_unchecked( + data.byte_sub(memoffset::offset_of!(Self, data)) + .cast::>(), + ) + } + + #[inline] + pub unsafe fn link_from_data(data: *mut T) -> NonNull { + let entry = Self::ptr_from_data(data).as_ptr(); + NonNull::new_unchecked(core::ptr::addr_of_mut!((*entry).link)) + } +} diff --git a/src/lib.rs b/src/lib.rs index 4ccc6f0..4a7b8be 100755 --- a/src/lib.rs +++ b/src/lib.rs @@ -11,8 +11,12 @@ pub extern crate alloc as core_alloc; pub mod boxed; #[cfg(feature = "collections")] pub mod collections; +#[cfg(feature = "pin")] +pub mod pin; mod alloc; +#[cfg(feature = "pin")] +mod drop; use core::cell::Cell; use core::fmt::Display; @@ -293,6 +297,8 @@ pub struct Bump { // The current chunk we are bump allocating within. current_chunk_footer: Cell>, allocation_limit: Cell>, + #[cfg(feature = "pin")] + drop_list: Cell<*const drop::DropList>, } #[repr(C)] @@ -386,6 +392,9 @@ impl Default for Bump { impl Drop for Bump { fn drop(&mut self) { unsafe { + #[cfg(feature = "pin")] + self.reset_drop_list(); + dealloc_chunk_list(self.current_chunk_footer.get()); } } @@ -523,6 +532,8 @@ impl Bump { return Ok(Bump { current_chunk_footer: Cell::new(EMPTY_CHUNK.get()), allocation_limit: Cell::new(None), + #[cfg(feature = "pin")] + drop_list: Cell::new(core::ptr::null()), }); } @@ -540,6 +551,8 @@ impl Bump { Ok(Bump { current_chunk_footer: Cell::new(chunk_footer), allocation_limit: Cell::new(None), + #[cfg(feature = "pin")] + drop_list: Cell::new(core::ptr::null()), }) } @@ -747,6 +760,9 @@ impl Bump { return; } + #[cfg(feature = "pin")] + self.reset_drop_list(); + let mut cur_chunk = self.current_chunk_footer.get(); // Deallocate all chunks except the current one @@ -1767,6 +1783,33 @@ impl Bump { } } +#[cfg(feature = "pin")] +impl Bump { + fn drop_list(&self) -> &drop::DropList { + if self.drop_list.get().is_null() { + let drop_list = &*self.alloc(drop::DropList::default()); + unsafe { drop_list.init() }; + self.drop_list.set(drop_list); + } + unsafe { &*self.drop_list.get() } + } + + fn take_drop_list(&mut self) -> Option<&drop::DropList> { + let drop_list_ptr = self.drop_list.replace(core::ptr::null()); + if drop_list_ptr.is_null() { + return None; + } + unsafe { Some(&*drop_list_ptr) } + } + + unsafe fn reset_drop_list(&mut self) { + let Some(drop_list) = self.take_drop_list() else { + return; + }; + drop_list.run_drop(); + } +} + /// An iterator over each chunk of allocated memory that /// an arena has bump allocated into. /// diff --git a/src/pin.rs b/src/pin.rs new file mode 100644 index 0000000..1e4ff27 --- /dev/null +++ b/src/pin.rs @@ -0,0 +1,676 @@ +//! A pointer type for bump allocation. +//! +//! [`Box<'a, T>`] provides the simplest form of +//! bump allocation in `bumpalo`. Boxes provide ownership for this allocation, and +//! drop their contents when they go out of scope. +//! +//! # Examples +//! +//! Move a value from the stack to the heap by creating a [`Box`]: +//! +//! ``` +//! use bumpalo::{Bump, pin::Box}; +//! +//! let b = Bump::new(); +//! +//! let val: u8 = 5; +//! let boxed: Box = Box::new_in(val, &b); +//! ``` +//! +//! Move a value from a [`Box`] back to the stack by [dereferencing]: +//! +//! ``` +//! use bumpalo::{Bump, pin::Box}; +//! +//! let b = Bump::new(); +//! +//! let boxed: Box = Box::new_in(5, &b); +//! let val: u8 = *boxed; +//! ``` +//! +//! Running [`Drop`] implementations on bump-allocated values: +//! +//! ``` +//! use bumpalo::{Bump, pin::Box}; +//! use std::sync::atomic::{AtomicUsize, Ordering}; +//! +//! static NUM_DROPPED: AtomicUsize = AtomicUsize::new(0); +//! +//! struct CountDrops; +//! +//! impl Drop for CountDrops { +//! fn drop(&mut self) { +//! NUM_DROPPED.fetch_add(1, Ordering::SeqCst); +//! } +//! } +//! +//! // Create a new bump arena. +//! let bump = Bump::new(); +//! +//! // Create a `CountDrops` inside the bump arena. +//! let mut c = Box::new_in(CountDrops, &bump); +//! +//! // No `CountDrops` have been dropped yet. +//! assert_eq!(NUM_DROPPED.load(Ordering::SeqCst), 0); +//! +//! // Drop our `Box`. +//! drop(c); +//! +//! // Its `Drop` implementation was run, and so `NUM_DROPS` has been incremented. +//! assert_eq!(NUM_DROPPED.load(Ordering::SeqCst), 1); +//! ``` +//! +//! Creating a recursive data structure: +//! +//! ``` +//! use bumpalo::{Bump, pin::Box}; +//! +//! let b = Bump::new(); +//! +//! #[derive(Debug)] +//! enum List<'a, T> { +//! Cons(T, Box<'a, List<'a, T>>), +//! Nil, +//! } +//! +//! let list: List = List::Cons(1, Box::new_in(List::Cons(2, Box::new_in(List::Nil, &b)), &b)); +//! println!("{:?}", list); +//! ``` +//! +//! This will print `Cons(1, Cons(2, Nil))`. +//! +//! Recursive structures must be boxed, because if the definition of `Cons` +//! looked like this: +//! +//! ```compile_fail,E0072 +//! # enum List { +//! Cons(T, List), +//! # } +//! ``` +//! +//! It wouldn't work. This is because the size of a `List` depends on how many +//! elements are in the list, and so we don't know how much memory to allocate +//! for a `Cons`. By introducing a [`Box<'a, T>`], which has a defined size, we know how +//! big `Cons` needs to be. +//! +//! # Memory layout +//! +//! For non-zero-sized values, a [`Box`] will use the provided [`Bump`] allocator for +//! its allocation. It is valid to convert both ways between a [`Box`] and a +//! pointer allocated with the [`Bump`] allocator, given that the +//! [`Layout`] used with the allocator is correct for the type. More precisely, +//! a `value: *mut T` that has been allocated with the [`Bump`] allocator +//! with `Layout::for_value(&*value)` may be converted into a box using +//! [`Box::::from_raw(value)`]. Conversely, the memory backing a `value: *mut +//! T` obtained from [`Box::::into_raw`] will be deallocated by the +//! [`Bump`] allocator with [`Layout::for_value(&*value)`]. +//! +//! Note that roundtrip `Box::from_raw(Box::into_raw(b))` looses the lifetime bound to the +//! [`Bump`] immutable borrow which guarantees that the allocator will not be reset +//! and memory will not be freed. +//! +//! [dereferencing]: https://doc.rust-lang.org/std/ops/trait.Deref.html +//! [`Box`]: struct.Box.html +//! [`Box<'a, T>`]: struct.Box.html +//! [`Box::::from_raw(value)`]: struct.Box.html#method.from_raw +//! [`Box::::into_raw`]: struct.Box.html#method.into_raw +//! [`Bump`]: ../struct.Bump.html +//! [`Drop`]: https://doc.rust-lang.org/std/ops/trait.Drop.html +//! [`Layout`]: https://doc.rust-lang.org/std/alloc/struct.Layout.html +//! [`Layout::for_value(&*value)`]: https://doc.rust-lang.org/std/alloc/struct.Layout.html#method.for_value + +use core::marker::PhantomData; + +use { + crate::Bump, + { + core::{ + any::Any, + borrow, + cmp::Ordering, + future::Future, + hash::{Hash, Hasher}, + iter::FusedIterator, + mem::ManuallyDrop, + ops::{Deref, DerefMut}, + pin::Pin, + task::{Context, Poll}, + }, + core_alloc::fmt, + }, +}; + +/// An owned pointer to a bump-allocated `T` value, that runs `Drop` +/// implementations. +/// +/// See the [module-level documentation][crate::boxed] for more details. +#[repr(transparent)] +pub struct Box<'a, T: ?Sized> +where + // The `'static` bound is required for soundness, since `T` isn't actually owned by `Box` + // but instead owned by the bump allocator. A pinned `Box` is really just a handle and therefore + // `mem:forget`-ting a pinned Box will still cause a value drop when the arena drops. + T: 'static, +{ + ptr: *mut T, + _marker: PhantomData<&'a mut T>, +} + +impl<'a, T> Box<'a, T> { + /// Constructs a new `Pin>`. If `T` does not implement `Unpin`, then + /// `x` will be pinned in memory and unable to be moved. + pub fn new_in(x: T, a: &'a Bump) -> Box<'a, T> { + let entry = &*a.alloc(crate::drop::DropEntry::new(x)); + + let (link, data) = unsafe { entry.link_and_data() }; + unsafe { a.drop_list().insert(link) }; + + Box { + ptr: data, + _marker: PhantomData, + } + } + + /// Constructs a new `Pin>`. If `T` does not implement `Unpin`, then + /// `x` will be pinned in memory and unable to be moved. + #[inline] + pub fn pin_in(x: T, a: &'a Bump) -> Pin> { + Box::new_in(x, a).into() + } +} + +impl<'a, T: ?Sized> Box<'a, T> { + #[inline] + unsafe fn release_from_drop_list(&self) { + crate::drop::DropEntry::link_from_data(self.ptr.cast::<()>()) + .as_ref() + .unlink(); + } + + #[inline] + unsafe fn insert_into_drop_list(&self, b: &'a Bump) { + b.drop_list().insert(crate::drop::DropEntry::link_from_data( + self.ptr.cast::<()>(), + )); + } +} + +impl<'a, T: ?Sized> Box<'a, T> { + /// Constructs a box from a raw pointer. + /// + /// After calling this function, the raw pointer is owned by the + /// resulting `Box`. Specifically, the `Box` destructor will call + /// the destructor of `T` and free the allocated memory. For this + /// to be safe, the memory must have been allocated in accordance + /// with the memory layout used by `Box` . + /// + /// # Safety + /// + /// This function is unsafe because improper use may lead to + /// memory problems. For example, a double-free may occur if the + /// function is called twice on the same raw pointer. + /// + /// # Examples + /// + /// Recreate a `Box` which was previously converted to a raw pointer + /// using [`Box::into_raw`]: + /// ``` + /// use bumpalo::{Bump, pin::Box}; + /// + /// let b = Bump::new(); + /// + /// let x = Box::new_in(5, &b); + /// let ptr = Box::into_raw(x); + /// let x = unsafe { Box::from_raw(ptr, &b) }; // Note that new `x`'s lifetime is unbound. It must be bound to the `b` immutable borrow before `b` is reset. + /// ``` + #[inline] + pub unsafe fn from_raw(raw: *mut T, b: &'a Bump) -> Self { + let val = Box { + ptr: raw, + _marker: PhantomData, + }; + unsafe { + val.insert_into_drop_list(b); + } + val + } + + #[inline] + unsafe fn from_raw_with_no_release(raw: *mut T) -> Self { + Box { + ptr: raw, + _marker: PhantomData, + } + } + + /// Consumes the `Box`, returning a wrapped raw pointer. + /// + /// The pointer will be properly aligned and non-null. + /// + /// After calling this function, the caller is responsible for the + /// value previously managed by the `Box`. In particular, the + /// caller should properly destroy `T`. The easiest way to + /// do this is to convert the raw pointer back into a `Box` with the + /// [`Box::from_raw`] function, allowing the `Box` destructor to perform + /// the cleanup. + /// + /// Note: this is an associated function, which means that you have + /// to call it as `Box::into_raw(b)` instead of `b.into_raw()`. This + /// is so that there is no conflict with a method on the inner type. + /// + /// # Examples + /// + /// Converting the raw pointer back into a `Box` with [`Box::from_raw`] + /// for automatic cleanup: + /// ``` + /// use bumpalo::{Bump, pin::Box}; + /// + /// let b = Bump::new(); + /// + /// let x = Box::new_in(String::from("Hello"), &b); + /// let ptr = Box::into_raw(x); + /// let x = unsafe { Box::from_raw(ptr, &b) }; // Note that new `x`'s lifetime is unbound. It must be bound to the `b` immutable borrow before `b` is reset. + /// ``` + /// Manual cleanup by explicitly running the destructor: + /// ``` + /// use std::ptr; + /// use bumpalo::{Bump, pin::Box}; + /// + /// let b = Bump::new(); + /// + /// let mut x = Box::new_in(String::from("Hello"), &b); + /// let p = Box::into_raw(x); + /// unsafe { + /// ptr::drop_in_place(p); + /// } + /// ``` + #[inline] + pub fn into_raw(b: Box<'a, T>) -> *mut T + where + T: Unpin, + { + unsafe { Box::into_raw_unchecked(b) } + } + + /// Consumes the `Box`, returning a wrapped raw pointer. + /// + /// The pointer will be properly aligned and non-null. + /// + /// After calling this function, the caller is responsible for the + /// value previously managed by the `Box`. In particular, the + /// caller should properly destroy `T`. The easiest way to + /// do this is to convert the raw pointer back into a `Box` with the + /// [`Box::from_raw`] function, allowing the `Box` destructor to perform + /// the cleanup. + /// + /// Note: this is an associated function, which means that you have + /// to call it as `Box::into_raw(b)` instead of `b.into_raw()`. This + /// is so that there is no conflict with a method on the inner type. + /// + /// # Examples + /// + /// Converting the raw pointer back into a `Box` with [`Box::from_raw`] + /// for automatic cleanup: + /// ``` + /// use bumpalo::{Bump, pin::Box}; + /// + /// let b = Bump::new(); + /// + /// let x = Box::new_in(String::from("Hello"), &b); + /// let ptr = unsafe { Box::into_raw_unchecked(x) }; + /// let x = unsafe { Box::from_raw(ptr, &b) }; // Note that new `x`'s lifetime is unbound. It must be bound to the `b` immutable borrow before `b` is reset. + /// ``` + /// Manual cleanup by explicitly running the destructor: + /// ``` + /// use std::ptr; + /// use bumpalo::{Bump, pin::Box}; + /// + /// let b = Bump::new(); + /// + /// let mut x = Box::new_in(String::from("Hello"), &b); + /// let p = unsafe { Box::into_raw_unchecked(x) }; + /// unsafe { + /// ptr::drop_in_place(p); + /// } + /// ``` + #[inline] + pub unsafe fn into_raw_unchecked(b: Box<'a, T>) -> *mut T { + let b = ManuallyDrop::new(b); + b.release_from_drop_list(); + b.ptr + } + + #[inline] + unsafe fn into_raw_with_no_release(b: Box<'a, T>) -> *mut T { + ManuallyDrop::new(b).ptr + } +} + +impl<'a, T: ?Sized> Drop for Box<'a, T> { + fn drop(&mut self) { + unsafe { + self.release_from_drop_list(); + // `Box` owns value of `T`, but not memory behind it. + core::ptr::drop_in_place(self.ptr); + } + } +} + +impl<'a, 'b, T: ?Sized + PartialEq> PartialEq> for Box<'a, T> { + #[inline] + fn eq(&self, other: &Box<'b, T>) -> bool { + PartialEq::eq(&**self, &**other) + } + #[inline] + fn ne(&self, other: &Box<'b, T>) -> bool { + PartialEq::ne(&**self, &**other) + } +} + +impl<'a, 'b, T: ?Sized + PartialOrd> PartialOrd> for Box<'a, T> { + #[inline] + fn partial_cmp(&self, other: &Box<'b, T>) -> Option { + PartialOrd::partial_cmp(&**self, &**other) + } + #[inline] + fn lt(&self, other: &Box<'b, T>) -> bool { + PartialOrd::lt(&**self, &**other) + } + #[inline] + fn le(&self, other: &Box<'b, T>) -> bool { + PartialOrd::le(&**self, &**other) + } + #[inline] + fn ge(&self, other: &Box<'b, T>) -> bool { + PartialOrd::ge(&**self, &**other) + } + #[inline] + fn gt(&self, other: &Box<'b, T>) -> bool { + PartialOrd::gt(&**self, &**other) + } +} + +impl<'a, T: ?Sized + Ord> Ord for Box<'a, T> { + #[inline] + fn cmp(&self, other: &Box<'a, T>) -> Ordering { + Ord::cmp(&**self, &**other) + } +} + +impl<'a, T: ?Sized + Eq> Eq for Box<'a, T> {} + +impl<'a, T: ?Sized + Hash> Hash for Box<'a, T> { + fn hash(&self, state: &mut H) { + (**self).hash(state); + } +} + +impl<'a, T: ?Sized + Hasher> Hasher for Box<'a, T> { + fn finish(&self) -> u64 { + (**self).finish() + } + fn write(&mut self, bytes: &[u8]) { + (**self).write(bytes) + } + fn write_u8(&mut self, i: u8) { + (**self).write_u8(i) + } + fn write_u16(&mut self, i: u16) { + (**self).write_u16(i) + } + fn write_u32(&mut self, i: u32) { + (**self).write_u32(i) + } + fn write_u64(&mut self, i: u64) { + (**self).write_u64(i) + } + fn write_u128(&mut self, i: u128) { + (**self).write_u128(i) + } + fn write_usize(&mut self, i: usize) { + (**self).write_usize(i) + } + fn write_i8(&mut self, i: i8) { + (**self).write_i8(i) + } + fn write_i16(&mut self, i: i16) { + (**self).write_i16(i) + } + fn write_i32(&mut self, i: i32) { + (**self).write_i32(i) + } + fn write_i64(&mut self, i: i64) { + (**self).write_i64(i) + } + fn write_i128(&mut self, i: i128) { + (**self).write_i128(i) + } + fn write_isize(&mut self, i: isize) { + (**self).write_isize(i) + } +} + +impl<'a, T: ?Sized> From> for Pin> { + /// Converts a `Box` into a `Pin>`. + /// + /// This conversion does not allocate on the heap and happens in place. + fn from(boxed: Box<'a, T>) -> Self { + // It's not possible to move or replace the insides of a `Pin>` + // when `T: !Unpin`, so it's safe to pin it directly without any + // additional requirements. + unsafe { Pin::new_unchecked(boxed) } + } +} + +impl<'a> Box<'a, dyn Any> { + #[inline] + /// Attempt to downcast the box to a concrete type. + /// + /// # Examples + /// + /// ``` + /// use std::any::Any; + /// + /// fn print_if_string(value: Box) { + /// if let Ok(string) = value.downcast::() { + /// println!("String ({}): {}", string.len(), string); + /// } + /// } + /// + /// let my_string = "Hello World".to_string(); + /// print_if_string(Box::new(my_string)); + /// print_if_string(Box::new(0i8)); + /// ``` + pub fn downcast(self) -> Result, Box<'a, dyn Any>> { + if self.is::() { + unsafe { + let raw: *mut dyn Any = Box::into_raw_with_no_release(self); + Ok(Box::from_raw_with_no_release(raw as *mut T)) + } + } else { + Err(self) + } + } +} + +impl<'a> Box<'a, dyn Any + Send> { + #[inline] + /// Attempt to downcast the box to a concrete type. + /// + /// # Examples + /// + /// ``` + /// use std::any::Any; + /// + /// fn print_if_string(value: Box) { + /// if let Ok(string) = value.downcast::() { + /// println!("String ({}): {}", string.len(), string); + /// } + /// } + /// + /// let my_string = "Hello World".to_string(); + /// print_if_string(Box::new(my_string)); + /// print_if_string(Box::new(0i8)); + /// ``` + pub fn downcast(self) -> Result, Box<'a, dyn Any + Send>> { + if self.is::() { + unsafe { + let raw: *mut (dyn Any + Send) = Box::into_raw_with_no_release(self); + Ok(Box::from_raw_with_no_release(raw as *mut T)) + } + } else { + Err(self) + } + } +} + +impl<'a, T: fmt::Display + ?Sized> fmt::Display for Box<'a, T> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Display::fmt(&**self, f) + } +} + +impl<'a, T: fmt::Debug + ?Sized> fmt::Debug for Box<'a, T> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Debug::fmt(&**self, f) + } +} + +impl<'a, T: ?Sized> fmt::Pointer for Box<'a, T> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + // It's not possible to extract the inner Uniq directly from the Box, + // instead we cast it to a *const which aliases the Unique + let ptr: *const T = &**self; + fmt::Pointer::fmt(&ptr, f) + } +} + +impl<'a, T: ?Sized> Deref for Box<'a, T> { + type Target = T; + + #[inline] + fn deref(&self) -> &T { + unsafe { &*self.ptr } + } +} + +impl<'a, T: ?Sized> DerefMut for Box<'a, T> { + #[inline] + fn deref_mut(&mut self) -> &mut T { + unsafe { &mut *self.ptr } + } +} + +impl<'a, I: Iterator + ?Sized> Iterator for Box<'a, I> { + type Item = I::Item; + fn next(&mut self) -> Option { + (**self).next() + } + fn size_hint(&self) -> (usize, Option) { + (**self).size_hint() + } + fn nth(&mut self, n: usize) -> Option { + (**self).nth(n) + } + fn last(self) -> Option { + #[inline] + fn some(_: Option, x: T) -> Option { + Some(x) + } + self.fold(None, some) + } +} + +impl<'a, I: DoubleEndedIterator + ?Sized> DoubleEndedIterator for Box<'a, I> { + fn next_back(&mut self) -> Option { + (**self).next_back() + } + fn nth_back(&mut self, n: usize) -> Option { + (**self).nth_back(n) + } +} +impl<'a, I: ExactSizeIterator + ?Sized> ExactSizeIterator for Box<'a, I> { + fn len(&self) -> usize { + (**self).len() + } +} + +impl<'a, I: FusedIterator + ?Sized> FusedIterator for Box<'a, I> {} + +impl<'a, T: ?Sized> borrow::Borrow for Box<'a, T> { + fn borrow(&self) -> &T { + self + } +} + +impl<'a, T: ?Sized> borrow::BorrowMut for Box<'a, T> { + fn borrow_mut(&mut self) -> &mut T { + self + } +} + +impl<'a, T: ?Sized> AsRef for Box<'a, T> { + fn as_ref(&self) -> &T { + self + } +} + +impl<'a, T: ?Sized> AsMut for Box<'a, T> { + fn as_mut(&mut self) -> &mut T { + self + } +} + +impl<'a, T: ?Sized> Unpin for Box<'a, T> {} + +impl<'a, F: Future + Unpin> Future for Box<'a, F> { + type Output = F::Output; + + fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { + F::poll(Pin::new(&mut *self), cx) + } +} + +impl<'a, R, F> From> for Box<'a, dyn Future> +where + F: Future, +{ + fn from(fut: Box<'a, F>) -> Box<'a, dyn Future> { + unsafe { + let ptr = Box::into_raw_with_no_release(fut) as *mut dyn Future; + Box::from_raw_with_no_release(ptr) + } + } +} + +impl<'a, R, F> From> for Box<'a, dyn Future + Send> +where + F: Future + Send, +{ + fn from(fut: Box<'a, F>) -> Box<'a, dyn Future + Send> { + unsafe { + let ptr = Box::into_raw_with_no_release(fut) as *mut (dyn Future + Send); + Box::from_raw_with_no_release(ptr) + } + } +} + +impl<'a, R, F> From> for Box<'a, dyn Future + Send + Sync> +where + F: Future + Send + Sync, +{ + fn from(fut: Box<'a, F>) -> Box<'a, dyn Future + Send + Sync> { + unsafe { + let ptr = + Box::into_raw_with_no_release(fut) as *mut (dyn Future + Send + Sync); + Box::from_raw_with_no_release(ptr) + } + } +} + +// Maybe put this behind a feature flag: +// +// impl<'a, T: ?Sized + core::marker::Unsize, U: ?Sized> core::ops::CoerceUnsized> +// for Box<'a, T> +// { +// } diff --git a/tests/all/main.rs b/tests/all/main.rs index 2f1ffa8..e540ee6 100755 --- a/tests/all/main.rs +++ b/tests/all/main.rs @@ -8,6 +8,7 @@ mod allocator_api; mod boxed; mod capacity; mod collect_in; +mod pin; mod quickcheck; mod quickchecks; mod string; diff --git a/tests/all/pin.rs b/tests/all/pin.rs new file mode 100644 index 0000000..66de079 --- /dev/null +++ b/tests/all/pin.rs @@ -0,0 +1,190 @@ +#![cfg(feature = "pin")] + +use core::future::Future; +use std::{ + mem, + pin::Pin, + rc::Rc, + sync::{ + atomic::{AtomicBool, AtomicUsize, Ordering}, + Arc, + }, + task::{Context, Poll, Wake, Waker}, +}; + +use bumpalo::pin::Box; +use bumpalo::Bump; + +struct NoopWaker; + +impl Wake for NoopWaker { + fn wake(self: std::sync::Arc) {} +} + +#[test] +fn box_pin() { + let bump = Bump::new(); + let mut fut = Box::pin_in(async { 1 }, &bump); + let fut = fut.as_mut(); + + let waker = Waker::from(Arc::new(NoopWaker)); + let mut context = Context::from_waker(&waker); + + assert_eq!(fut.poll(&mut context), Poll::Ready(1)); +} + +#[test] +fn dyn_box_pin() { + struct Foo(Rc); + impl Drop for Foo { + fn drop(&mut self) { + self.0.store(true, Ordering::SeqCst); + } + } + + let bump = Bump::new(); + let dropped = Rc::new(AtomicBool::new(false)); + + // -- + + let foo = Foo(dropped.clone()); + let fut = Box::new_in( + async move { + mem::forget(foo); + }, + &bump, + ); + let fut: Box<'_, dyn Future> = fut.into(); + drop(fut); + + assert_eq!(dropped.load(Ordering::SeqCst), true); + + // -- + + dropped.store(false, Ordering::SeqCst); + + let fut = Box::new_in(async move { 1 }, &bump); + let fut: Box<'_, dyn Future> = fut.into(); + let mut fut: Pin>> = fut.into(); + let fut = fut.as_mut(); + + let waker = Waker::from(Arc::new(NoopWaker)); + let mut context = Context::from_waker(&waker); + + assert_eq!(fut.poll(&mut context), Poll::Ready(1)); +} + +#[test] +fn box_pin_drop() { + struct Foo(Rc); + impl Drop for Foo { + fn drop(&mut self) { + self.0.store(true, Ordering::SeqCst); + } + } + + let dropped = Rc::new(AtomicBool::new(false)); + + let bump = Bump::new(); + let foo = Box::pin_in(Foo(dropped.clone()), &bump); + drop(foo); + + assert!(dropped.load(Ordering::SeqCst)); +} + +#[test] +fn box_pin_mut_drop() { + struct Foo(Rc, String); + impl Drop for Foo { + fn drop(&mut self) { + self.0.store(true, Ordering::SeqCst); + } + } + + let dropped = Rc::new(AtomicBool::new(false)); + + let bump = Bump::new(); + let mut foo = Box::pin_in(Foo(dropped.clone(), String::new()), &bump); + foo.1.push_str("123"); + + drop(foo); + + assert!(dropped.load(Ordering::SeqCst)); +} + +#[test] +fn box_pin_forget_drop() { + struct Foo(Rc); + impl Drop for Foo { + fn drop(&mut self) { + self.0.store(true, Ordering::SeqCst); + } + } + + let dropped = Rc::new(AtomicBool::new(false)); + + let bump = Bump::new(); + mem::forget(Box::pin_in(Foo(dropped.clone()), &bump)); + assert!(!dropped.load(Ordering::SeqCst)); + + drop(bump); + + assert!(dropped.load(Ordering::SeqCst)); +} + +#[test] +fn box_pin_multiple_forget_drop() { + struct Foo(Rc); + impl Drop for Foo { + fn drop(&mut self) { + self.0.fetch_add(1, Ordering::SeqCst); + } + } + + let dropped = Rc::new(AtomicUsize::new(0)); + + let bump = Bump::new(); + mem::forget(Box::pin_in(Foo(dropped.clone()), &bump)); + mem::forget(Box::pin_in(Foo(dropped.clone()), &bump)); + mem::drop(Box::pin_in(Foo(dropped.clone()), &bump)); + mem::forget(Box::pin_in(Foo(dropped.clone()), &bump)); + + assert_eq!(dropped.load(Ordering::SeqCst), 1); + + drop(bump); + + assert_eq!(dropped.load(Ordering::SeqCst), 4); +} + +#[test] +fn box_pin_raw() { + struct Foo(Rc, String); + impl Drop for Foo { + fn drop(&mut self) { + self.0.store(true, Ordering::SeqCst); + } + } + + let dropped = Rc::new(AtomicBool::new(false)); + + let bump = Bump::new(); + let foo = Pin::into_inner(Box::pin_in(Foo(dropped.clone(), String::new()), &bump)); + let ptr = Box::into_raw(foo); + + unsafe { (*ptr).1.push_str("Hello World") }; + + let foo = unsafe { Box::from_raw(ptr, &bump) }; + drop(foo); + + assert!(dropped.load(Ordering::SeqCst)); +} + +#[test] +fn into_raw_aliasing() { + let bump = Bump::new(); + let boxed = Box::new_in(1, &bump); + let raw = Box::into_raw(boxed); + + let mut_ref = unsafe { &mut *raw }; + dbg!(mut_ref); +}