Skip to content

Commit

Permalink
Make SyncDroplessArena allocations thread-local in the fast path
Browse files Browse the repository at this point in the history
  • Loading branch information
Zoxc committed Jun 15, 2019
1 parent f1961bc commit 512a1cd
Showing 1 changed file with 132 additions and 43 deletions.
175 changes: 132 additions & 43 deletions src/libarena/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
extern crate alloc;

use rustc_data_structures::cold_path;
use rustc_data_structures::sync::MTLock;
use rustc_data_structures::sync::{SharedWorkerLocal, WorkerLocal, Lock};
use smallvec::SmallVec;

use std::cell::{Cell, RefCell};
Expand Down Expand Up @@ -123,11 +123,6 @@ impl<T> Default for TypedArena<T> {
}

impl<T> TypedArena<T> {
pub fn in_arena(&self, ptr: *const T) -> bool {
let ptr = ptr as *const T as *mut T;

self.chunks.borrow().iter().any(|chunk| chunk.start() <= ptr && ptr < chunk.end())
}
/// Allocates an object in the `TypedArena`, returning a reference to it.
#[inline]
pub fn alloc(&self, object: T) -> &mut T {
Expand Down Expand Up @@ -378,12 +373,6 @@ impl Default for DroplessArena {
}

impl DroplessArena {
pub fn in_arena<T: ?Sized>(&self, ptr: *const T) -> bool {
let ptr = ptr as *const u8 as *mut u8;

self.chunks.borrow().iter().any(|chunk| chunk.start() <= ptr && ptr < chunk.end())
}

#[inline]
fn align(&self, align: usize) {
let final_address = ((self.ptr.get() as usize) + align - 1) & !(align - 1);
Expand Down Expand Up @@ -555,64 +544,164 @@ impl DroplessArena {
}
}

#[derive(Default)]
// FIXME(@Zoxc): this type is entirely unused in rustc
pub struct SyncTypedArena<T> {
lock: MTLock<TypedArena<T>>,
struct CurrentChunk<T> {
/// A pointer to the next object to be allocated.
ptr: Cell<*mut T>,

/// A pointer to the end of the allocated area. When this pointer is
/// reached, a new chunk is allocated.
end: Cell<*mut T>,
}

impl<T> SyncTypedArena<T> {
#[inline(always)]
pub fn alloc(&self, object: T) -> &mut T {
// Extend the lifetime of the result since it's limited to the lock guard
unsafe { &mut *(self.lock.lock().alloc(object) as *mut T) }
impl<T> Default for CurrentChunk<T> {
#[inline]
fn default() -> Self {
CurrentChunk {
// We set both `ptr` and `end` to 0 so that the first call to
// alloc() will trigger a grow().
ptr: Cell::new(0 as *mut T),
end: Cell::new(0 as *mut T),
}
}
}

#[inline(always)]
pub fn alloc_slice(&self, slice: &[T]) -> &mut [T]
where
T: Copy,
{
// Extend the lifetime of the result since it's limited to the lock guard
unsafe { &mut *(self.lock.lock().alloc_slice(slice) as *mut [T]) }
impl<T> CurrentChunk<T> {
#[inline]
fn align(&self, align: usize) {
let final_address = ((self.ptr.get() as usize) + align - 1) & !(align - 1);
self.ptr.set(final_address as *mut T);
assert!(self.ptr <= self.end);
}

/// Grows the arena.
#[inline(always)]
pub fn clear(&mut self) {
self.lock.get_mut().clear();
fn grow(&self, n: usize, chunks: &mut Vec<TypedArenaChunk<T>>) {
unsafe {
let (chunk, mut new_capacity);
if let Some(last_chunk) = chunks.last_mut() {
let used_bytes = self.ptr.get() as usize - last_chunk.start() as usize;
let currently_used_cap = used_bytes / mem::size_of::<T>();
last_chunk.entries = currently_used_cap;
if last_chunk.storage.reserve_in_place(currently_used_cap, n) {
self.end.set(last_chunk.end());
return;
} else {
new_capacity = last_chunk.storage.cap();
loop {
new_capacity = new_capacity.checked_mul(2).unwrap();
if new_capacity >= currently_used_cap + n {
break;
}
}
}
} else {
let elem_size = cmp::max(1, mem::size_of::<T>());
new_capacity = cmp::max(n, PAGE / elem_size);
}
chunk = TypedArenaChunk::<T>::new(new_capacity);
self.ptr.set(chunk.start());
self.end.set(chunk.end());
chunks.push(chunk);
}
}
}

#[derive(Default)]
pub struct SyncDroplessArena {
lock: MTLock<DroplessArena>,
/// Pointers to the current chunk
current: WorkerLocal<CurrentChunk<u8>>,

/// A vector of arena chunks.
chunks: Lock<SharedWorkerLocal<Vec<TypedArenaChunk<u8>>>>,
}

impl Default for SyncDroplessArena {
#[inline]
fn default() -> SyncDroplessArena {
SyncDroplessArena {
current: WorkerLocal::new(|_| CurrentChunk::default()),
chunks: Default::default(),
}
}
}

impl SyncDroplessArena {
#[inline(always)]
pub fn in_arena<T: ?Sized>(&self, ptr: *const T) -> bool {
self.lock.lock().in_arena(ptr)
let ptr = ptr as *const u8 as *mut u8;

self.chunks.lock().iter().any(|chunks| chunks.iter().any(|chunk| {
chunk.start() <= ptr && ptr < chunk.end()
}))
}

#[inline(always)]
#[inline(never)]
#[cold]
fn grow(&self, needed_bytes: usize) {
self.current.grow(needed_bytes, &mut **self.chunks.lock());
}

#[inline]
pub fn alloc_raw(&self, bytes: usize, align: usize) -> &mut [u8] {
// Extend the lifetime of the result since it's limited to the lock guard
unsafe { &mut *(self.lock.lock().alloc_raw(bytes, align) as *mut [u8]) }
unsafe {
assert!(bytes != 0);

let current = &*self.current;

current.align(align);

let future_end = intrinsics::arith_offset(current.ptr.get(), bytes as isize);
if (future_end as *mut u8) >= current.end.get() {
self.grow(bytes);
}

let ptr = current.ptr.get();
// Set the pointer past ourselves
current.ptr.set(
intrinsics::arith_offset(current.ptr.get(), bytes as isize) as *mut u8,
);
slice::from_raw_parts_mut(ptr, bytes)
}
}

#[inline(always)]
#[inline]
pub fn alloc<T>(&self, object: T) -> &mut T {
// Extend the lifetime of the result since it's limited to the lock guard
unsafe { &mut *(self.lock.lock().alloc(object) as *mut T) }
assert!(!mem::needs_drop::<T>());

let mem = self.alloc_raw(
mem::size_of::<T>(),
mem::align_of::<T>()) as *mut _ as *mut T;

unsafe {
// Write into uninitialized memory.
ptr::write(mem, object);
&mut *mem
}
}

#[inline(always)]
/// Allocates a slice of objects that are copied into the `SyncDroplessArena`, returning a
/// mutable reference to it. Will panic if passed a zero-sized type.
///
/// Panics:
///
/// - Zero-sized types
/// - Zero-length slices
#[inline]
pub fn alloc_slice<T>(&self, slice: &[T]) -> &mut [T]
where
T: Copy,
{
// Extend the lifetime of the result since it's limited to the lock guard
unsafe { &mut *(self.lock.lock().alloc_slice(slice) as *mut [T]) }
assert!(!mem::needs_drop::<T>());
assert!(mem::size_of::<T>() != 0);
assert!(!slice.is_empty());

let mem = self.alloc_raw(
slice.len() * mem::size_of::<T>(),
mem::align_of::<T>()) as *mut _ as *mut T;

unsafe {
let arena_slice = slice::from_raw_parts_mut(mem, slice.len());
arena_slice.copy_from_slice(slice);
arena_slice
}
}
}

Expand Down

0 comments on commit 512a1cd

Please sign in to comment.