Skip to content

Commit

Permalink
Fix unsoundness in TLS
Browse files Browse the repository at this point in the history
The API was unsound due to allowing leakage of a reference to another thread. We solve this in a manner similar to libstd, by using a `with` method, which temporarily gives access through a reference by taking a closure. Related to [RFC 1705](rust-lang/rfcs#1705).
  • Loading branch information
ticki committed Aug 7, 2016
1 parent cdec4b0 commit 863a1ec
Show file tree
Hide file tree
Showing 9 changed files with 130 additions and 136 deletions.
96 changes: 43 additions & 53 deletions src/allocator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,27 +4,29 @@

use prelude::*;

use core::{ops, mem, ptr};
use core::{mem, ops};

use {brk, tls, sys};
use {brk, tls};
use bookkeeper::{self, Bookkeeper, Allocator};
use sync::Mutex;

/// Alias for the wrapper type of the thread-local variable holding the local allocator.
type ThreadLocalAllocator = MoveCell<LazyInit<fn() -> LocalAllocator, LocalAllocator>>;

/// The global default allocator.
// TODO remove these filthy function pointers.
static GLOBAL_ALLOCATOR: Mutex<LazyInit<fn() -> GlobalAllocator, GlobalAllocator>> =
Mutex::new(LazyInit::new(global_init));
tls! {
/// The thread-local allocator.
static THREAD_ALLOCATOR: MoveCell<LazyInit<fn() -> LocalAllocator, LocalAllocator>> =
MoveCell::new(LazyInit::new(local_init));
static THREAD_ALLOCATOR: ThreadLocalAllocator = MoveCell::new(LazyInit::new(local_init));
}

/// Initialize the global allocator.
fn global_init() -> GlobalAllocator {
// The initial acquired segment.
let (aligner, initial_segment, excessive) =
brk::get(bookkeeper::EXTRA_ELEMENTS * 4, mem::align_of::<Block>());
brk::get(4 * bookkeeper::EXTRA_ELEMENTS * mem::size_of::<Block>(), mem::align_of::<Block>());

// Initialize the new allocator.
let mut res = GlobalAllocator {
Expand All @@ -42,48 +44,63 @@ fn global_init() -> GlobalAllocator {

/// Initialize the local allocator.
fn local_init() -> LocalAllocator {
/// The destructor of the local allocator.
///
/// This will simply free everything to the global allocator.
extern fn dtor(alloc: &ThreadLocalAllocator) {
// This is important! If we simply moved out of the reference, we would end up with another
// dtor could use the value after-free. Thus we replace it by the `Unreachable` state,
// meaning that any request will result in panic.
let alloc = alloc.replace(LazyInit::unreachable());

// Lock the global allocator.
// TODO dumb borrowck
let mut global_alloc = GLOBAL_ALLOCATOR.lock();
let global_alloc = global_alloc.get();

// TODO, we know this is sorted, so we could abuse that fact to faster insertion in the
// global allocator.

alloc.into_inner().inner.for_each(move |block| global_alloc.free(block));
}

// The initial acquired segment.
let initial_segment = GLOBAL_ALLOCATOR
.lock()
.get()
.alloc(bookkeeper::EXTRA_ELEMENTS * 4, mem::align_of::<Block>());
.alloc(4 * bookkeeper::EXTRA_ELEMENTS * mem::size_of::<Block>(), mem::align_of::<Block>());

unsafe {
// Initialize the new allocator.
let mut res = LocalAllocator {
inner: Bookkeeper::new(Vec::from_raw_parts(initial_segment, 0)),
};
// Attach the allocator to the current thread.
res.attach();
THREAD_ALLOCATOR.register_thread_destructor(dtor).unwrap();

res
LocalAllocator {
inner: Bookkeeper::new(Vec::from_raw_parts(initial_segment, 0)),
}
}
}

/// Temporarily get the allocator.
///
/// This is simply to avoid repeating ourself, so we let this take care of the hairy stuff.
fn get_allocator<T, F: FnOnce(&mut LocalAllocator) -> T>(f: F) -> T {
/// A dummy used as placeholder for the temporary initializer.
fn dummy() -> LocalAllocator {
unreachable!();
}

// Get the thread allocator.
let thread_alloc = THREAD_ALLOCATOR.get();
// Just dump some placeholding initializer in the place of the TLA.
let mut thread_alloc = thread_alloc.replace(LazyInit::new(dummy));
THREAD_ALLOCATOR.with(|thread_alloc| {
// Just dump some placeholding initializer in the place of the TLA.
let mut thread_alloc_original = thread_alloc.replace(LazyInit::unreachable());

// Call the closure involved.
let res = f(thread_alloc.get());
// Call the closure involved.
let res = f(thread_alloc_original.get());

// Put back the original allocator.
THREAD_ALLOCATOR.get().replace(thread_alloc);
// Put back the original allocator.
thread_alloc.replace(thread_alloc_original);

res
res
})
}

/// Derives `Deref` and `DerefMut` to the `inner` field.
///
/// This requires importing `core::ops`.
macro_rules! derive_deref {
($imp:ty, $target:ty) => {
impl ops::Deref for $imp {
Expand Down Expand Up @@ -140,33 +157,6 @@ pub struct LocalAllocator {

derive_deref!(LocalAllocator, Bookkeeper);

impl LocalAllocator {
/// Attach this allocator to the current thread.
///
/// This will make sure this allocator's data is freed to the
pub unsafe fn attach(&mut self) {
extern fn dtor(ptr: *mut LocalAllocator) {
let alloc = unsafe { ptr::read(ptr) };

// Lock the global allocator.
// TODO dumb borrowck
let mut global_alloc = GLOBAL_ALLOCATOR.lock();
let global_alloc = global_alloc.get();

// Gotta' make sure no memleaks are here.
#[cfg(feature = "debug_tools")]
alloc.assert_no_leak();

// TODO, we know this is sorted, so we could abuse that fact to faster insertion in the
// global allocator.

alloc.inner.for_each(move |block| global_alloc.free(block));
}

sys::register_thread_destructor(self as *mut LocalAllocator, dtor).unwrap();
}
}

impl Allocator for LocalAllocator {
#[inline]
fn alloc_fresh(&mut self, size: usize, align: usize) -> Block {
Expand Down
28 changes: 10 additions & 18 deletions src/bookkeeper.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,8 @@

use prelude::*;

use core::marker::PhantomData;
use core::ops::Range;
use core::{ptr, cmp, mem, ops};
use core::{ptr, mem, ops};

/// Elements required _more_ than the length as capacity.
///
Expand Down Expand Up @@ -38,8 +37,6 @@ pub struct Bookkeeper {
/// These are **not** invariants: If these assumpptions are not held, it will simply act strange
/// (e.g. logic bugs), but not memory unsafety.
pool: Vec<Block>,
/// Is this currently reallocating?
reallocating: bool,
}

impl Bookkeeper {
Expand All @@ -49,11 +46,13 @@ impl Bookkeeper {
debug_assert!(vec.capacity() >= EXTRA_ELEMENTS, "Not enough initial capacity of the vector.");
debug_assert!(vec.is_empty(), "Initial vector isn't empty.");

Bookkeeper {
let res = Bookkeeper {
pool: vec,
// Be careful with this!
.. Bookkeeper::default()
}
};

res.check();

res
}

/// Perform a binary search to find the appropriate place where the block can be insert or is
Expand Down Expand Up @@ -173,16 +172,6 @@ impl Bookkeeper {
log!(self.pool, "Check OK!");
}
}

/// Check for memory leaks.
///
/// This will ake sure that all the allocated blocks have been freed.
#[cfg(feature = "debug_tools")]
fn assert_no_leak(&self) {
assert!(self.allocated == self.pool.capacity() * mem::size_of::<Block>(), "Not all blocks \
freed. Total allocated space is {} ({} free blocks).", self.allocated,
self.pool.len());
}
}

/// An allocator.
Expand Down Expand Up @@ -588,6 +577,9 @@ pub trait Allocator: ops::DerefMut<Target = Bookkeeper> {
// Logging.
log!(self.pool;self.pool.len(), "Pushing {:?}.", block);

// Some assertions...
debug_assert!(&block <= self.pool.last().unwrap(), "Pushing will make the list unsorted.");

// Short-circuit in case on empty block.
if !block.is_empty() {
// We will try to simply merge it with the last block.
Expand Down
2 changes: 0 additions & 2 deletions src/cell.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
use prelude::*;

use core::cell::UnsafeCell;
use core::mem;

Expand Down
14 changes: 9 additions & 5 deletions src/fail.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ fn default_oom_handler() -> ! {
/// The rule of thumb is that this should be called, if and only if unwinding (which allocates)
/// will hit the same error.
pub fn oom() -> ! {
if let Some(handler) = THREAD_OOM_HANDLER.get().replace(None) {
if let Some(handler) = THREAD_OOM_HANDLER.with(|x| x.replace(None)) {
// There is a local allocator available.
handler();
} else {
Expand All @@ -63,10 +63,13 @@ pub fn set_oom_handler(handler: fn() -> !) {
/// This might panic if a thread OOM handler already exists.
#[inline]
pub fn set_thread_oom_handler(handler: fn() -> !) {
let mut thread_alloc = THREAD_OOM_HANDLER.get();
let out = thread_alloc.replace(Some(handler));
THREAD_OOM_HANDLER.with(|thread_oom| {
// Replace it with the new handler.
let res = thread_oom.replace(Some(handler));

debug_assert!(out.is_none());
// Make sure that it doesn't override another handler.
debug_assert!(res.is_none());
});
}

#[cfg(test)]
Expand All @@ -88,14 +91,15 @@ mod test {
#[should_panic]
fn test_panic_thread_oom() {
fn infinite() -> ! {
#[allow(empty_loop)]
loop {}
}
fn panic() -> ! {
panic!("cats are not cute.");
}

set_oom_handler(infinite);
set_thread_oom_handler(infinite);
set_thread_oom_handler(panic);
oom();
}
}
51 changes: 38 additions & 13 deletions src/lazy_init.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
//! `LazyStatic` like initialization.

use core::{mem, ptr, intrinsics};

/// The initialization state
enum State<F, T> {
/// The data is uninitialized, initialization is pending.
Expand All @@ -10,9 +8,14 @@ enum State<F, T> {
Uninitialized(F),
/// The data is initialized, and ready for use.
Initialized(T),
/// The data is unreachable, panick!
Unreachable,
}

/// A lazily initialized container.
///
/// This container starts out simply containing an initializer (i.e., a function to construct the
/// value in question). When the value is requested, the initializer runs.
pub struct LazyInit<F, T> {
/// The internal state.
state: State<F, T>,
Expand All @@ -21,29 +24,37 @@ pub struct LazyInit<F, T> {
impl<F: FnMut() -> T, T> LazyInit<F, T> {
/// Create a new to-be-initialized container.
///
/// The closure will be executed when initialization is required.
/// The closure will be executed when initialization is required, and is guaranteed to be
/// executed at most once.
#[inline]
pub const fn new(init: F) -> LazyInit<F, T> {
LazyInit {
state: State::Uninitialized(init),
}
}

/// Create a lazy initializer with unreachable inner data.
///
/// When access is tried, it will simply issue a panic. This is useful for e.g. temporarily
/// getting ownership.
pub const fn unreachable() -> LazyInit<F, T> {
LazyInit {
state: State::Unreachable,
}
}

/// Get a mutable reference to the inner value.
///
/// If it is uninitialize, it will be initialized and then returned.
#[inline]
pub fn get(&mut self) -> &mut T {
let mut inner;
let inner;

let res = match self.state {
State::Initialized(ref mut x) => {
return x;
},
State::Uninitialized(ref mut f) => {
inner = f();
},
};
match self.state {
State::Initialized(ref mut x) => return x,
State::Uninitialized(ref mut f) => inner = f(),
State::Unreachable => unreachable!(),
}

self.state = State::Initialized(inner);

Expand All @@ -54,6 +65,19 @@ impl<F: FnMut() -> T, T> LazyInit<F, T> {
unreachable!();
}
}

/// Get the inner of the container.
///
/// This won't mutate the container itself, since it consumes it. The initializer will (if
/// necessary) be called and the result returned. If already initialized, the inner value will
/// be moved out and returned.
pub fn into_inner(self) -> T {
match self.state {
State::Initialized(x) => x,
State::Uninitialized(mut f) => f(),
State::Unreachable => unreachable!(),
}
}
}

#[cfg(test)]
Expand All @@ -71,8 +95,9 @@ mod test {
assert_eq!(*lazy.get(), 400);
}

#[test]
fn test_laziness() {
let mut is_called = Cell::new(false);
let is_called = Cell::new(false);
let mut lazy = LazyInit::new(|| is_called.set(true));
assert!(!is_called.get());
lazy.get();
Expand Down
4 changes: 2 additions & 2 deletions src/ptr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -103,10 +103,10 @@ mod test {
assert_eq!(**ptr.offset(1), b'b');
}

let mut x = ['a', 'b'];
let mut y = ['a', 'b'];

unsafe {
let ptr = Pointer::new(&mut x[0] as *mut char);
let ptr = Pointer::new(&mut y[0] as *mut char);
assert_eq!(**ptr.clone().cast::<[char; 1]>(), ['a']);
assert_eq!(**ptr.offset(1), 'b');
}
Expand Down
3 changes: 3 additions & 0 deletions src/symbols.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
//! Rust allocation symbols.

// TODO remove this, this is a false positive.
#![allow(private_no_mangle_fns)]

use allocator;

/// Rust allocation symbol.
Expand Down
Loading

0 comments on commit 863a1ec

Please sign in to comment.