Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

inflate: some safety documentation #244

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 9 additions & 2 deletions zlib-rs/src/inflate/bitreader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,9 @@ impl<'a> BitReader<'a> {
}
}

/// # Safety
///
/// ptr and len must satisfy the requirements of [`core::slice::from_raw_parts`].
#[inline(always)]
pub unsafe fn update_slice(&mut self, ptr: *const u8, len: usize) {
let end = ptr.wrapping_add(len);
Expand Down Expand Up @@ -55,6 +58,7 @@ impl<'a> BitReader<'a> {
#[inline(always)]
pub fn as_slice(&self) -> &[u8] {
let len = self.bytes_remaining();
// SAFETY: condition of constructing this struct.
unsafe { core::slice::from_raw_parts(self.ptr, len) }
}

Expand Down Expand Up @@ -91,10 +95,10 @@ impl<'a> BitReader<'a> {

#[inline(always)]
pub fn pull_byte(&mut self) -> Result<u8, ReturnCode> {
// SAFETY: bounds checking.
if self.ptr == self.end {
return Err(ReturnCode::Ok);
}

let byte = unsafe { *self.ptr };
self.ptr = unsafe { self.ptr.add(1) };

Expand All @@ -108,6 +112,7 @@ impl<'a> BitReader<'a> {
pub fn refill(&mut self) {
debug_assert!(self.bytes_remaining() >= 8);

// SAFETY: assertion above ensures we have 8 bytes to read for a u64.
let read = unsafe { core::ptr::read_unaligned(self.ptr.cast::<u64>()) }.to_le();
self.bit_buffer |= read << self.bits_used;
let increment = (63 - self.bits_used) >> 3;
Expand Down Expand Up @@ -163,6 +168,8 @@ impl<'a> BitReader<'a> {
#[inline(always)]
pub fn return_unused_bytes(&mut self) {
let len = self.bits_used >> 3;
// SAFETY: ptr is advanced whenever bits_used is incremented by 8, so this sub is always
// in bounds.
self.ptr = unsafe { self.ptr.sub(len as usize) };
self.bits_used -= len << 3;
self.bit_buffer &= (1u64 << self.bits_used) - 1u64;
Expand All @@ -178,7 +185,7 @@ impl std::io::Read for BitReader<'_> {

let number_of_bytes = Ord::min(buf.len(), self.bytes_remaining());

// safety: `buf` is a mutable (exclusive) reference, so it cannot overlap the memory that
// SAFETY: `buf` is a mutable (exclusive) reference, so it cannot overlap the memory that
// the reader contains
unsafe { core::ptr::copy_nonoverlapping(self.ptr, buf.as_mut_ptr(), number_of_bytes) }

Expand Down
19 changes: 16 additions & 3 deletions zlib-rs/src/inflate/writer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,8 @@ impl<'a> Writer<'a> {
/// Returns a shared reference to the filled portion of the buffer.
#[inline]
pub fn filled(&self) -> &[u8] {
// SAFETY: the filled area of the buffer is always initialized, and self.filled is always
// in-bounds.
unsafe { core::slice::from_raw_parts(self.buf.as_ptr().cast(), self.filled) }
}

Expand Down Expand Up @@ -112,9 +114,11 @@ impl<'a> Writer<'a> {
let len = range.end - range.start;

if self.remaining() >= len + core::mem::size_of::<C>() {
// Safety: we know that our window has at least a core::mem::size_of::<C>() extra bytes
// SAFETY: we know that our window has at least a core::mem::size_of::<C>() extra bytes
// at the end, making it always safe to perform an (unaligned) Chunk read anywhere in
// the window slice.
//
// The calling function checks for CPU features requirements for C.
unsafe {
let src = window.as_ptr();
Self::copy_chunk_unchecked::<C>(
Expand Down Expand Up @@ -210,6 +214,7 @@ impl<'a> Writer<'a> {

if current + length + core::mem::size_of::<C>() < capacity {
let ptr = buf.as_mut_ptr();
// SAFETY: if statement and checked_sub ensures we stay in bounds.
unsafe { Self::copy_chunk_unchecked::<C>(ptr.add(start), ptr.add(current), length) }
} else {
// a full simd copy does not fit in the output buffer
Expand Down Expand Up @@ -259,10 +264,18 @@ fn slice_to_uninit(slice: &[u8]) -> &[MaybeUninit<u8>] {
}

trait Chunk {
/// Safety: must be valid to read a `Self::Chunk` value from `from` with an unaligned read.
/// # Safety
///
/// Must be valid to read a `Self::Chunk` value from `from` with an unaligned read.
///
/// Implementations may have CPU feature specific requirements depending on the type.
unsafe fn load_chunk(from: *const MaybeUninit<u8>) -> Self;

/// Safety: must be valid to write a `Self::Chunk` value to `out` with an unaligned write.
/// # Safety
///
/// Must be valid to write a `Self::Chunk` value to `out` with an unaligned write.
///
/// Implementations may have CPU feature specific requirements depending on the type.
unsafe fn store_chunk(out: *mut MaybeUninit<u8>, chunk: Self);
}

Expand Down
Loading