Skip to content

Commit

Permalink
Eliminate extensions allocations
Browse files Browse the repository at this point in the history
Signed-off-by: Alex Saveau <[email protected]>
  • Loading branch information
SUPERCILEX committed Jan 1, 2024
1 parent bac2508 commit 2c83f67
Show file tree
Hide file tree
Showing 2 changed files with 161 additions and 28 deletions.
1 change: 1 addition & 0 deletions tracing-subscriber/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -171,6 +171,7 @@
// "needless".
#![allow(clippy::needless_update)]
#![cfg_attr(not(feature = "std"), no_std)]
#![cfg_attr(nightly, feature(ptr_mask, strict_provenance))]

#[cfg(feature = "alloc")]
extern crate alloc;
Expand Down
188 changes: 160 additions & 28 deletions tracing-subscriber/src/registry/extensions.rs
Original file line number Diff line number Diff line change
@@ -1,15 +1,134 @@
// taken from https://github.com/hyperium/http/blob/master/src/extensions.rs.

use crate::sync::{RwLockReadGuard, RwLockWriteGuard};
use std::{
any::{Any, TypeId},
collections::HashMap,
any::TypeId,
collections::{hash_map::Entry, HashMap},
fmt,
hash::{BuildHasherDefault, Hasher},
mem, ptr,
};

use crate::registry::extensions::boxed_entry::{Aligned, BoxedEntry, BoxedEntryOp};
use crate::sync::{RwLockReadGuard, RwLockWriteGuard};

#[allow(unreachable_pub)]
mod boxed_entry {
use std::mem::{transmute, MaybeUninit};
use std::ptr;
use std::ptr::NonNull;

/// Used to give ourselves one free bit for liveness checks.
#[repr(align(2))]
pub struct Aligned<T>(pub T);

/// Wrapper around an untyped pointer to avoid casting bugs.
///
/// This type holds the pointer to a `Box<Aligned<T>>`.
#[derive(Copy, Clone)]
pub struct ExtPtr(NonNull<()>);

impl ExtPtr {
fn new<T>(val: T) -> Self {
let mut this = {
let ptr = Box::into_raw(Box::new(Aligned(val)));
Self(unsafe { NonNull::new_unchecked(ptr) }.cast())
};
this.set_live(true);
this
}

pub fn typed<T>(self) -> NonNull<Aligned<T>> {
#[cfg(not(nightly))]
let ptr = {
let addr = self.0.as_ptr() as usize;
let addr = addr & !1; // Zero out the live bit
addr as *mut _
};
#[cfg(nightly)]
let ptr = self.0.as_ptr().mask(!1).cast();
unsafe { NonNull::new_unchecked(ptr) }
}

pub fn live(&self) -> bool {
#[cfg(not(nightly))]
let addr = self.0.as_ptr() as usize;
#[cfg(nightly)]
let addr = self.0.as_ptr().addr();
(addr & 1) == 1
}

pub fn set_live(&mut self, live: bool) {
let update = |addr: usize| if live { addr | 1 } else { addr & !1 };
#[cfg(not(nightly))]
let ptr = update(self.0.as_ptr() as usize) as *mut _;
#[cfg(nightly)]
let ptr = self.0.as_ptr().map_addr(update);
self.0 = unsafe { NonNull::new_unchecked(ptr) };
}
}

/// Extension storage
pub struct BoxedEntry {
pub ptr: ExtPtr,
op_fn: NonNull<unsafe fn(ExtPtr, BoxedEntryOp)>,
}

unsafe impl Send for BoxedEntry {}

unsafe impl Sync for BoxedEntry {}

pub enum BoxedEntryOp {
DropLiveValue,
Drop,
}

unsafe fn run_boxed_entry_op<T>(ext: ExtPtr, op: BoxedEntryOp) {
let ptr = ext.typed::<T>().as_ptr();
match op {
BoxedEntryOp::DropLiveValue => unsafe {
ptr::drop_in_place(ptr);
},
BoxedEntryOp::Drop => {
if ext.live() {
unsafe {
drop(Box::from_raw(ptr));
}
} else {
unsafe {
drop(Box::from_raw(ptr.cast::<MaybeUninit<Aligned<T>>>()));
}
}
}
}
}

impl BoxedEntry {
pub fn new<T: Send + Sync + 'static>(val: T) -> Self {
Self {
ptr: ExtPtr::new(val),
op_fn: {
let fn_ptr = run_boxed_entry_op::<T> as *mut _;
unsafe { NonNull::new_unchecked(fn_ptr) }
},
}
}

pub fn op_fn(&self) -> unsafe fn(ExtPtr, BoxedEntryOp) {
unsafe { transmute(self.op_fn.as_ptr()) }
}
}

impl Drop for BoxedEntry {
fn drop(&mut self) {
unsafe {
self.op_fn()(self.ptr, BoxedEntryOp::Drop);
}
}
}
}

#[allow(warnings)]
type AnyMap = HashMap<TypeId, Box<dyn Any + Send + Sync>, BuildHasherDefault<IdHasher>>;
type AnyMap = HashMap<TypeId, BoxedEntry, BuildHasherDefault<IdHasher>>;

/// With TypeIds as keys, there's no need to hash them. They are already hashes
/// themselves, coming from the compiler. The IdHasher holds the u64 of
Expand Down Expand Up @@ -135,46 +254,54 @@ impl ExtensionsInner {
/// If a extension of this type already existed, it will
/// be returned.
pub(crate) fn insert<T: Send + Sync + 'static>(&mut self, val: T) -> Option<T> {
self.map
.insert(TypeId::of::<T>(), Box::new(val))
.and_then(|boxed| {
#[allow(warnings)]
{
(boxed as Box<dyn Any + 'static>)
.downcast()
.ok()
.map(|boxed| *boxed)
match self.map.entry(TypeId::of::<T>()) {
Entry::Occupied(mut entry) => {
let entry = entry.get_mut();
let mut ptr = entry.ptr.typed::<T>();
if entry.ptr.live() {
Some(mem::replace(unsafe { ptr.as_mut() }, Aligned(val)).0)
} else {
unsafe {
ptr::write(ptr.as_ptr(), Aligned(val));
}
entry.ptr.set_live(true);
None
}
})
}
Entry::Vacant(entry) => {
entry.insert(BoxedEntry::new(val));
None
}
}
}

/// Get a reference to a type previously inserted on this `Extensions`.
pub(crate) fn get<T: 'static>(&self) -> Option<&T> {
self.map
.get(&TypeId::of::<T>())
.and_then(|boxed| (&**boxed as &(dyn Any + 'static)).downcast_ref())
.filter(|boxed| boxed.ptr.live())
.map(|boxed| &unsafe { boxed.ptr.typed::<T>().as_ref() }.0)
}

/// Get a mutable reference to a type previously inserted on this `Extensions`.
pub(crate) fn get_mut<T: 'static>(&mut self) -> Option<&mut T> {
self.map
.get_mut(&TypeId::of::<T>())
.and_then(|boxed| (&mut **boxed as &mut (dyn Any + 'static)).downcast_mut())
.filter(|boxed| boxed.ptr.live())
.map(|boxed| &mut unsafe { boxed.ptr.typed::<T>().as_mut() }.0)
}

/// Remove a type from this `Extensions`.
///
/// If a extension of this type existed, it will be returned.
pub(crate) fn remove<T: Send + Sync + 'static>(&mut self) -> Option<T> {
self.map.remove(&TypeId::of::<T>()).and_then(|boxed| {
#[allow(warnings)]
{
(boxed as Box<dyn Any + 'static>)
.downcast()
.ok()
.map(|boxed| *boxed)
}
})
self.map
.get_mut(&TypeId::of::<T>())
.filter(|boxed| boxed.ptr.live())
.map(|boxed| {
boxed.ptr.set_live(false);
unsafe { ptr::read(boxed.ptr.typed::<T>().as_ptr()) }.0
})
}

/// Clear the `ExtensionsInner` in-place, dropping any elements in the map but
Expand All @@ -184,7 +311,12 @@ impl ExtensionsInner {
/// that future spans will not need to allocate new hashmaps.
#[cfg(any(test, feature = "registry"))]
pub(crate) fn clear(&mut self) {
self.map.clear();
for boxed in self.map.values_mut().filter(|boxed| boxed.ptr.live()) {
boxed.ptr.set_live(false);
unsafe {
boxed.op_fn()(boxed.ptr, BoxedEntryOp::DropLiveValue);
}
}
}
}

Expand Down Expand Up @@ -234,8 +366,8 @@ mod tests {

assert_eq!(
extensions.map.len(),
0,
"after clear(), extensions map should have length 0"
3,
"after clear(), extensions map should have length 3"
);
assert_eq!(
extensions.map.capacity(),
Expand Down

0 comments on commit 2c83f67

Please sign in to comment.