Skip to content

Commit

Permalink
Add conversion from Bytes to Vec<u8> (#547)
Browse files Browse the repository at this point in the history
Signed-off-by: Jiahao XU <[email protected]>
Co-authored-by: Alice Ryhl <[email protected]>
  • Loading branch information
NobodyXu and Darksonn authored Jul 13, 2022
1 parent 10d1f6e commit cd188cb
Show file tree
Hide file tree
Showing 5 changed files with 240 additions and 0 deletions.
89 changes: 89 additions & 0 deletions src/bytes.rs
Original file line number Diff line number Diff line change
Expand Up @@ -109,6 +109,10 @@ pub(crate) struct Vtable {
/// fn(data, ptr, len)
pub clone: unsafe fn(&AtomicPtr<()>, *const u8, usize) -> Bytes,
/// fn(data, ptr, len)
///
/// takes `Bytes` to value
pub to_vec: unsafe fn(&AtomicPtr<()>, *const u8, usize) -> Vec<u8>,
/// fn(data, ptr, len)
pub drop: unsafe fn(&mut AtomicPtr<()>, *const u8, usize),
}

Expand Down Expand Up @@ -845,6 +849,13 @@ impl From<String> for Bytes {
}
}

impl From<Bytes> for Vec<u8> {
fn from(bytes: Bytes) -> Vec<u8> {
let bytes = mem::ManuallyDrop::new(bytes);
unsafe { (bytes.vtable.to_vec)(&bytes.data, bytes.ptr, bytes.len) }
}
}

// ===== impl Vtable =====

impl fmt::Debug for Vtable {
Expand All @@ -860,6 +871,7 @@ impl fmt::Debug for Vtable {

const STATIC_VTABLE: Vtable = Vtable {
clone: static_clone,
to_vec: static_to_vec,
drop: static_drop,
};

Expand All @@ -868,6 +880,11 @@ unsafe fn static_clone(_: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Bytes {
Bytes::from_static(slice)
}

unsafe fn static_to_vec(_: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec<u8> {
let slice = slice::from_raw_parts(ptr, len);
slice.to_vec()
}

unsafe fn static_drop(_: &mut AtomicPtr<()>, _: *const u8, _: usize) {
// nothing to drop for &'static [u8]
}
Expand All @@ -876,11 +893,13 @@ unsafe fn static_drop(_: &mut AtomicPtr<()>, _: *const u8, _: usize) {

static PROMOTABLE_EVEN_VTABLE: Vtable = Vtable {
clone: promotable_even_clone,
to_vec: promotable_even_to_vec,
drop: promotable_even_drop,
};

static PROMOTABLE_ODD_VTABLE: Vtable = Vtable {
clone: promotable_odd_clone,
to_vec: promotable_odd_to_vec,
drop: promotable_odd_drop,
};

Expand All @@ -897,6 +916,38 @@ unsafe fn promotable_even_clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize
}
}

unsafe fn promotable_to_vec(
data: &AtomicPtr<()>,
ptr: *const u8,
len: usize,
f: fn(*mut ()) -> *mut u8,
) -> Vec<u8> {
let shared = data.load(Ordering::Acquire);
let kind = shared as usize & KIND_MASK;

if kind == KIND_ARC {
shared_to_vec_impl(shared.cast(), ptr, len)
} else {
// If Bytes holds a Vec, then the offset must be 0.
debug_assert_eq!(kind, KIND_VEC);

let buf = f(shared);

let cap = (ptr as usize - buf as usize) + len;

// Copy back buffer
ptr::copy(ptr, buf, len);

Vec::from_raw_parts(buf, len, cap)
}
}

unsafe fn promotable_even_to_vec(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec<u8> {
promotable_to_vec(data, ptr, len, |shared| {
ptr_map(shared.cast(), |addr| addr & !KIND_MASK)
})
}

unsafe fn promotable_even_drop(data: &mut AtomicPtr<()>, ptr: *const u8, len: usize) {
data.with_mut(|shared| {
let shared = *shared;
Expand Down Expand Up @@ -924,6 +975,10 @@ unsafe fn promotable_odd_clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize)
}
}

unsafe fn promotable_odd_to_vec(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec<u8> {
promotable_to_vec(data, ptr, len, |shared| shared.cast())
}

unsafe fn promotable_odd_drop(data: &mut AtomicPtr<()>, ptr: *const u8, len: usize) {
data.with_mut(|shared| {
let shared = *shared;
Expand Down Expand Up @@ -967,6 +1022,7 @@ const _: [(); 0 - mem::align_of::<Shared>() % 2] = []; // Assert that the alignm

static SHARED_VTABLE: Vtable = Vtable {
clone: shared_clone,
to_vec: shared_to_vec,
drop: shared_drop,
};

Expand All @@ -979,6 +1035,39 @@ unsafe fn shared_clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Byte
shallow_clone_arc(shared as _, ptr, len)
}

unsafe fn shared_to_vec_impl(shared: *mut Shared, ptr: *const u8, len: usize) -> Vec<u8> {
// Check that the ref_cnt is 1 (unique).
//
// If it is unique, then it is set to 0 with AcqRel fence for the same
// reason in release_shared.
//
// Otherwise, we take the other branch and call release_shared.
if (*shared)
.ref_cnt
.compare_exchange(1, 0, Ordering::AcqRel, Ordering::Relaxed)
.is_ok()
{
let buf = (*shared).buf;
let cap = (*shared).cap;

// Deallocate Shared
drop(Box::from_raw(shared as *mut mem::ManuallyDrop<Shared>));

// Copy back buffer
ptr::copy(ptr, buf, len);

Vec::from_raw_parts(buf, len, cap)
} else {
let v = slice::from_raw_parts(ptr, len).to_vec();
release_shared(shared);
v
}
}

unsafe fn shared_to_vec(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec<u8> {
shared_to_vec_impl(data.load(Ordering::Relaxed).cast(), ptr, len)
}

unsafe fn shared_drop(data: &mut AtomicPtr<()>, _ptr: *const u8, _len: usize) {
data.with_mut(|shared| {
release_shared(shared.cast());
Expand Down
23 changes: 23 additions & 0 deletions src/bytes_mut.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1611,6 +1611,7 @@ unsafe fn rebuild_vec(ptr: *mut u8, mut len: usize, mut cap: usize, off: usize)

static SHARED_VTABLE: Vtable = Vtable {
clone: shared_v_clone,
to_vec: shared_v_to_vec,
drop: shared_v_drop,
};

Expand All @@ -1622,6 +1623,28 @@ unsafe fn shared_v_clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> By
Bytes::with_vtable(ptr, len, data, &SHARED_VTABLE)
}

unsafe fn shared_v_to_vec(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec<u8> {
let shared: *mut Shared = data.load(Ordering::Relaxed).cast();

if (*shared).is_unique() {
let shared = &mut *shared;

// Drop shared
let mut vec = mem::replace(&mut shared.vec, Vec::new());
release_shared(shared);

// Copy back buffer
ptr::copy(ptr, vec.as_mut_ptr(), len);
vec.set_len(len);

vec
} else {
let v = slice::from_raw_parts(ptr, len).to_vec();
release_shared(shared);
v
}
}

unsafe fn shared_v_drop(data: &mut AtomicPtr<()>, _ptr: *const u8, _len: usize) {
data.with_mut(|shared| {
release_shared(*shared as *mut Shared);
Expand Down
70 changes: 70 additions & 0 deletions tests/test_bytes.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1065,3 +1065,73 @@ fn bytes_into_vec() {
let vec: Vec<u8> = bytes.into();
assert_eq!(&vec, prefix);
}

#[test]
fn test_bytes_into_vec() {
// Test STATIC_VTABLE.to_vec
let bs = b"1b23exfcz3r";
let vec: Vec<u8> = Bytes::from_static(bs).into();
assert_eq!(&*vec, bs);

// Test bytes_mut.SHARED_VTABLE.to_vec impl
eprintln!("1");
let mut bytes_mut: BytesMut = bs[..].into();

// Set kind to KIND_ARC so that after freeze, Bytes will use bytes_mut.SHARED_VTABLE
eprintln!("2");
drop(bytes_mut.split_off(bs.len()));

eprintln!("3");
let b1 = bytes_mut.freeze();
eprintln!("4");
let b2 = b1.clone();

eprintln!("{:#?}", (&*b1).as_ptr());

// shared.is_unique() = False
eprintln!("5");
assert_eq!(&*Vec::from(b2), bs);

// shared.is_unique() = True
eprintln!("6");
assert_eq!(&*Vec::from(b1), bs);

// Test bytes_mut.SHARED_VTABLE.to_vec impl where offset != 0
let mut bytes_mut1: BytesMut = bs[..].into();
let bytes_mut2 = bytes_mut1.split_off(9);

let b1 = bytes_mut1.freeze();
let b2 = bytes_mut2.freeze();

assert_eq!(Vec::from(b2), bs[9..]);
assert_eq!(Vec::from(b1), bs[..9]);
}

#[test]
fn test_bytes_into_vec_promotable_even() {
let vec = vec![33u8; 1024];

// Test cases where kind == KIND_VEC
let b1 = Bytes::from(vec.clone());
assert_eq!(Vec::from(b1), vec);

// Test cases where kind == KIND_ARC, ref_cnt == 1
let b1 = Bytes::from(vec.clone());
drop(b1.clone());
assert_eq!(Vec::from(b1), vec);

// Test cases where kind == KIND_ARC, ref_cnt == 2
let b1 = Bytes::from(vec.clone());
let b2 = b1.clone();
assert_eq!(Vec::from(b1), vec);

// Test cases where vtable = SHARED_VTABLE, kind == KIND_ARC, ref_cnt == 1
assert_eq!(Vec::from(b2), vec);

// Test cases where offset != 0
let mut b1 = Bytes::from(vec.clone());
let b2 = b1.split_off(20);

assert_eq!(Vec::from(b2), vec[20..]);
assert_eq!(Vec::from(b1), vec[..20]);
}
29 changes: 29 additions & 0 deletions tests/test_bytes_odd_alloc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -66,3 +66,32 @@ fn test_bytes_clone_drop() {
let b1 = Bytes::from(vec);
let _b2 = b1.clone();
}

#[test]
fn test_bytes_into_vec() {
let vec = vec![33u8; 1024];

// Test cases where kind == KIND_VEC
let b1 = Bytes::from(vec.clone());
assert_eq!(Vec::from(b1), vec);

// Test cases where kind == KIND_ARC, ref_cnt == 1
let b1 = Bytes::from(vec.clone());
drop(b1.clone());
assert_eq!(Vec::from(b1), vec);

// Test cases where kind == KIND_ARC, ref_cnt == 2
let b1 = Bytes::from(vec.clone());
let b2 = b1.clone();
assert_eq!(Vec::from(b1), vec);

// Test cases where vtable = SHARED_VTABLE, kind == KIND_ARC, ref_cnt == 1
assert_eq!(Vec::from(b2), vec);

// Test cases where offset != 0
let mut b1 = Bytes::from(vec.clone());
let b2 = b1.split_off(20);

assert_eq!(Vec::from(b2), vec[20..]);
assert_eq!(Vec::from(b1), vec[..20]);
}
29 changes: 29 additions & 0 deletions tests/test_bytes_vec_alloc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -112,3 +112,32 @@ fn invalid_ptr<T>(addr: usize) -> *mut T {
debug_assert_eq!(ptr as usize, addr);
ptr.cast::<T>()
}

#[test]
fn test_bytes_into_vec() {
let vec = vec![33u8; 1024];

// Test cases where kind == KIND_VEC
let b1 = Bytes::from(vec.clone());
assert_eq!(Vec::from(b1), vec);

// Test cases where kind == KIND_ARC, ref_cnt == 1
let b1 = Bytes::from(vec.clone());
drop(b1.clone());
assert_eq!(Vec::from(b1), vec);

// Test cases where kind == KIND_ARC, ref_cnt == 2
let b1 = Bytes::from(vec.clone());
let b2 = b1.clone();
assert_eq!(Vec::from(b1), vec);

// Test cases where vtable = SHARED_VTABLE, kind == KIND_ARC, ref_cnt == 1
assert_eq!(Vec::from(b2), vec);

// Test cases where offset != 0
let mut b1 = Bytes::from(vec.clone());
let b2 = b1.split_off(20);

assert_eq!(Vec::from(b2), vec[20..]);
assert_eq!(Vec::from(b1), vec[..20]);
}

0 comments on commit cd188cb

Please sign in to comment.