Skip to content

Commit

Permalink
Merge pull request #32 from google/unsafe_doc
Browse files Browse the repository at this point in the history
Require safety comments.
  • Loading branch information
qwandor authored Jul 19, 2023
2 parents a9f34cb + 46f69fc commit ba617f6
Show file tree
Hide file tree
Showing 3 changed files with 26 additions and 20 deletions.
5 changes: 5 additions & 0 deletions .github/workflows/rust.yml
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,11 @@ jobs:
uses: actions-rs/clippy-check@v1
with:
token: ${{ secrets.GITHUB_TOKEN }}
- name: Run clippy for aarch64
uses: actions-rs/clippy-check@v1
with:
token: ${{ secrets.GITHUB_TOKEN }}
args: --target=aarch64-unknown-none

format:
runs-on: ubuntu-latest
Expand Down
15 changes: 8 additions & 7 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@
//! ```

#![no_std]
#![deny(clippy::undocumented_unsafe_blocks)]

#[cfg(feature = "alloc")]
pub mod idmap;
Expand Down Expand Up @@ -125,10 +126,10 @@ impl<T: Translation + Clone> Mapping<T> {
assert!(self.previous_ttbr.is_none());

let mut previous_ttbr;
// SAFETY: Safe because we trust that self.root.to_physical() returns a valid physical
// address of a page table, and the `Drop` implementation will reset `TTBRn_EL1` before it
// becomes invalid.
unsafe {
// Safe because we trust that self.root.to_physical() returns a valid physical address
// of a page table, and the `Drop` implementation will reset `TTBRn_EL1` before it
// becomes invalid.
match self.root.va_range() {
VaRange::Lower => asm!(
"mrs {previous_ttbr}, ttbr0_el1",
Expand Down Expand Up @@ -159,9 +160,9 @@ impl<T: Translation + Clone> Mapping<T> {
/// called.
#[cfg(target_arch = "aarch64")]
pub fn deactivate(&mut self) {
// SAFETY: Safe because this just restores the previously saved value of `TTBRn_EL1`, which
// must have been valid.
unsafe {
// Safe because this just restores the previously saved value of `TTBRn_EL1`, which must
// have been valid.
match self.root.va_range() {
VaRange::Lower => asm!(
"msr ttbr0_el1, {ttbrval}",
Expand Down Expand Up @@ -211,8 +212,8 @@ impl<T: Translation + Clone> Mapping<T> {
) -> Result<(), MapError> {
self.root.map_range(range, pa, flags)?;
#[cfg(target_arch = "aarch64")]
// SAFETY: Safe because this is just a memory barrier.
unsafe {
// Safe because this is just a memory barrier.
asm!("dsb ishst");
}
Ok(())
Expand All @@ -239,8 +240,8 @@ impl<T: Translation + Clone> Mapping<T> {
pub fn modify_range(&mut self, range: &MemoryRegion, f: &PteUpdater) -> Result<(), MapError> {
self.root.modify_range(range, f)?;
#[cfg(target_arch = "aarch64")]
// SAFETY: Safe because this is just a memory barrier.
unsafe {
// Safe because this is just a memory barrier.
asm!("dsb ishst");
}
Ok(())
Expand Down
26 changes: 13 additions & 13 deletions src/paging.rs
Original file line number Diff line number Diff line change
Expand Up @@ -455,9 +455,9 @@ impl<T: Translation> PageTableWithLevel<T> {
fn get_entry(&self, va: VirtualAddress) -> &Descriptor {
let shift = PAGE_SHIFT + (LEAF_LEVEL - self.level) * BITS_PER_LEVEL;
let index = (va.0 >> shift) % (1 << BITS_PER_LEVEL);
// Safe because we know that the pointer is properly aligned, dereferenced and initialised,
// and nothing else can access the page table while we hold a mutable reference to the
// PageTableWithLevel (assuming it is not currently active).
// SAFETY: Safe because we know that the pointer is properly aligned, dereferenced and
// initialised, and nothing else can access the page table while we hold a mutable reference
// to the PageTableWithLevel (assuming it is not currently active).
let table = unsafe { self.table.as_ref() };
&table.entries[index]
}
Expand All @@ -466,9 +466,9 @@ impl<T: Translation> PageTableWithLevel<T> {
fn get_entry_mut(&mut self, va: VirtualAddress) -> &mut Descriptor {
let shift = PAGE_SHIFT + (LEAF_LEVEL - self.level) * BITS_PER_LEVEL;
let index = (va.0 >> shift) % (1 << BITS_PER_LEVEL);
// Safe because we know that the pointer is properly aligned, dereferenced and initialised,
// and nothing else can access the page table while we hold a mutable reference to the
// PageTableWithLevel (assuming it is not currently active).
// SAFETY: Safe because we know that the pointer is properly aligned, dereferenced and
// initialised, and nothing else can access the page table while we hold a mutable reference
// to the PageTableWithLevel (assuming it is not currently active).
let table = unsafe { self.table.as_mut() };
&mut table.entries[index]
}
Expand Down Expand Up @@ -543,8 +543,8 @@ impl<T: Translation> PageTableWithLevel<T> {
indentation: usize,
) -> Result<(), fmt::Error> {
const WIDTH: usize = 3;
// Safe because we know that the pointer is aligned, initialised and dereferencable, and the
// PageTable won't be mutated while we are using it.
// SAFETY: Safe because we know that the pointer is aligned, initialised and dereferencable,
// and the PageTable won't be mutated while we are using it.
let table = unsafe { self.table.as_ref() };

let mut i = 0;
Expand Down Expand Up @@ -577,8 +577,8 @@ impl<T: Translation> PageTableWithLevel<T> {
/// Frees the memory used by this pagetable and all subtables. It is not valid to access the
/// page table after this.
fn free(&mut self, translation: &T) {
// Safe because we know that the pointer is aligned, initialised and dereferencable, and the
// PageTable won't be mutated while we are freeing it.
// SAFETY: Safe because we know that the pointer is aligned, initialised and dereferencable,
// and the PageTable won't be mutated while we are freeing it.
let table = unsafe { self.table.as_ref() };
for entry in table.entries {
if let Some(mut subtable) = entry.subtable(translation, self.level) {
Expand All @@ -587,7 +587,7 @@ impl<T: Translation> PageTableWithLevel<T> {
subtable.free(translation);
}
}
// Safe because the table was allocated by `PageTableWithLevel::new` with the global
// SAFETY: Safe because the table was allocated by `PageTableWithLevel::new` with the global
// allocator and appropriate layout.
unsafe {
// Actually free the memory used by the `PageTable`.
Expand Down Expand Up @@ -647,8 +647,8 @@ impl PageTable {
/// allocator and returns a pointer to it.
#[cfg(feature = "alloc")]
pub fn new() -> NonNull<Self> {
// Safe because the pointer has been allocated with the appropriate layout by the global
// allocator, and the memory is zeroed which is valid initialisation for a PageTable.
// SAFETY: Safe because the pointer has been allocated with the appropriate layout by the
// global allocator, and the memory is zeroed which is valid initialisation for a PageTable.
unsafe { allocate_zeroed() }
}
}
Expand Down

0 comments on commit ba617f6

Please sign in to comment.