Skip to content

Commit

Permalink
Remove NULL ObjectReference
Browse files Browse the repository at this point in the history
  • Loading branch information
wks committed Apr 24, 2024
1 parent 063f8fc commit f89caea
Show file tree
Hide file tree
Showing 8 changed files with 117 additions and 73 deletions.
17 changes: 9 additions & 8 deletions mmtk/Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion mmtk/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ lazy_static = "1.1"
# - change branch
# - change repo name
# But other changes including adding/removing whitespaces in commented lines may break the CI
mmtk = { git = "https://github.com/mmtk/mmtk-core.git", rev="e79e94e744660c486d5471f252ff05c4248bcea9" }
mmtk = { git = "https://github.com/mmtk/mmtk-core.git", rev="990980858be221db5572e0e362b986e5294dc025" }
# Uncomment the following to build locally
# mmtk = { path = "../repos/mmtk-core" }
log = {version = "0.4", features = ["max_level_trace", "release_max_level_off"] }
Expand Down
5 changes: 3 additions & 2 deletions mmtk/src/edges.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ pub enum JuliaVMEdge {
unsafe impl Send for JuliaVMEdge {}

impl Edge for JuliaVMEdge {
fn load(&self) -> ObjectReference {
fn load(&self) -> Option<ObjectReference> {
match self {
JuliaVMEdge::Simple(e) => e.load(),
JuliaVMEdge::Offset(e) => e.load(),
Expand Down Expand Up @@ -74,9 +74,10 @@ impl OffsetEdge {
}

impl Edge for OffsetEdge {
fn load(&self) -> ObjectReference {
fn load(&self) -> Option<ObjectReference> {
let middle = unsafe { (*self.slot_addr).load(atomic::Ordering::Relaxed) };
let begin = middle - self.offset;
debug_assert!(!begin.is_zero());
ObjectReference::from_raw_address(begin)
}

Expand Down
7 changes: 5 additions & 2 deletions mmtk/src/julia_finalizer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,9 @@ impl ArrayListT {
}

fn gc_ptr_clear_tag(addr: Address, tag: usize) -> ObjectReference {
ObjectReference::from_raw_address(unsafe { Address::from_usize(addr & !tag) })
let addr = unsafe { Address::from_usize(addr & !tag) };
debug_assert!(!addr.is_zero());
unsafe { ObjectReference::from_raw_address_unchecked(addr) }
}

fn gc_ptr_tag(addr: Address, tag: usize) -> bool {
Expand Down Expand Up @@ -201,7 +203,8 @@ fn mark_finlist<T: ObjectTracer>(list: &mut ArrayListT, start: usize, tracer: &m
cur_tag = 1;
gc_ptr_clear_tag(cur, 1)
} else {
ObjectReference::from_raw_address(cur)
// unsafe: We checked `cur.is_zero()` before.
unsafe { ObjectReference::from_raw_address_unchecked(cur) }
};
if gc_ptr_tag(cur, 2) {
i += 1;
Expand Down
66 changes: 40 additions & 26 deletions mmtk/src/julia_scanning.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@ use crate::edges::JuliaVMEdge;
use crate::edges::OffsetEdge;
use crate::julia_types::*;
use crate::object_model::mmtk_jl_array_ndims;
use crate::JuliaVM;
use crate::JULIA_BUFF_TAG;
use crate::UPCALLS;
use memoffset::offset_of;
Expand Down Expand Up @@ -416,27 +415,34 @@ fn get_stack_addr(addr: Address, offset: isize, lb: u64, ub: u64) -> Address {
}
}

use mmtk::vm::edge_shape::Edge;

#[inline(always)]
pub fn process_edge<EV: EdgeVisitor<JuliaVMEdge>>(closure: &mut EV, slot: Address) {
let simple_edge = SimpleEdge::from_address(slot);
debug_assert!(
simple_edge.load().is_null()
|| mmtk::memory_manager::is_in_mmtk_spaces::<JuliaVM>(simple_edge.load()),
"Object {:?} in slot {:?} is not mapped address",
simple_edge.load(),
simple_edge
);

// captures wrong edges before creating the work
debug_assert!(
simple_edge.load().to_raw_address().as_usize() % 16 == 0
|| simple_edge.load().to_raw_address().as_usize() % 8 == 0,
"Object {:?} in slot {:?} is not aligned to 8 or 16",
simple_edge.load(),
simple_edge
);
#[cfg(debug_assertions)]
{
use crate::JuliaVM;
use mmtk::vm::edge_shape::Edge;

if let Some(objref) = simple_edge.load() {
debug_assert!(
mmtk::memory_manager::is_in_mmtk_spaces::<JuliaVM>(objref),
"Object {:?} in slot {:?} is not mapped address",
objref,
simple_edge
);

let raw_addr_usize = objref.to_raw_address().as_usize();

// captures wrong edges before creating the work
debug_assert!(
raw_addr_usize % 16 == 0 || raw_addr_usize % 8 == 0,
"Object {:?} in slot {:?} is not aligned to 8 or 16",
objref,
simple_edge
);
}
}

closure.visit_edge(JuliaVMEdge::Simple(simple_edge));
}
Expand Down Expand Up @@ -485,13 +491,20 @@ pub fn process_offset_edge<EV: EdgeVisitor<JuliaVMEdge>>(
offset: usize,
) {
let offset_edge = OffsetEdge::new_with_offset(slot, offset);
debug_assert!(
offset_edge.load().is_null()
|| mmtk::memory_manager::is_in_mmtk_spaces::<JuliaVM>(offset_edge.load()),
"Object {:?} in slot {:?} is not mapped address",
offset_edge.load(),
offset_edge
);
#[cfg(debug_assertions)]
{
use crate::JuliaVM;
use mmtk::vm::edge_shape::Edge;

if let Some(objref) = offset_edge.load() {
debug_assert!(
mmtk::memory_manager::is_in_mmtk_spaces::<JuliaVM>(objref),
"Object {:?} in slot {:?} is not mapped address",
objref,
offset_edge
);
}
}

closure.visit_edge(JuliaVMEdge::Offset(offset_edge));
}
Expand Down Expand Up @@ -603,5 +616,6 @@ pub fn mmtk_jl_bt_num_uintvals(bt_entry: *mut mmtk_jl_bt_element_t) -> usize {

pub fn mmtk_jl_bt_entry_jlvalue(bt_entry: *mut mmtk_jl_bt_element_t, i: usize) -> ObjectReference {
let entry = unsafe { (*bt_entry.add(2 + i)).__bindgen_anon_1.jlvalue };
ObjectReference::from_raw_address(Address::from_mut_ptr(entry))
debug_assert!(!entry.is_null());
unsafe { ObjectReference::from_raw_address_unchecked(Address::from_mut_ptr(entry)) }
}
21 changes: 13 additions & 8 deletions mmtk/src/object_model.rs
Original file line number Diff line number Diff line change
Expand Up @@ -62,25 +62,28 @@ impl ObjectModel<JuliaVM> for VMObjectModel {
copy_context: &mut GCWorkerCopyContext<JuliaVM>,
) -> ObjectReference {
let bytes = Self::get_current_size(from);
let from_start_ref = ObjectReference::from_raw_address(Self::ref_to_object_start(from));
let header_offset =
from.to_raw_address().as_usize() - from_start_ref.to_raw_address().as_usize();
let from_addr = from.to_raw_address();
let from_start = Self::ref_to_object_start(from);
let header_offset = from_addr - from_start;

let dst = if header_offset == 8 {
// regular object
copy_context.alloc_copy(from_start_ref, bytes, 16, 8, semantics)
// Note: The `from` reference is not used by any allocator currently in MMTk core.
copy_context.alloc_copy(from, bytes, 16, 8, semantics)
} else if header_offset == 16 {
// buffer should not be copied
unimplemented!();
} else {
unimplemented!()
};
// `alloc_copy`` should never return zero.
debug_assert!(!dst.is_zero());

let src = Self::ref_to_object_start(from);
let src = from_start;
unsafe {
std::ptr::copy_nonoverlapping::<u8>(src.to_ptr(), dst.to_mut_ptr(), bytes);
}
let to_obj = ObjectReference::from_raw_address(dst + header_offset);
let to_obj = unsafe { ObjectReference::from_raw_address_unchecked(dst + header_offset) };

trace!("Copying object from {} to {}", from, to_obj);

Expand All @@ -99,7 +102,7 @@ impl ObjectModel<JuliaVM> for VMObjectModel {
{
use atomic::Ordering;
unsafe {
libc::memset(from_start_ref.to_raw_address().to_mut_ptr(), 0, bytes);
libc::memset(from_start.to_mut_ptr(), 0, bytes);
}

Self::LOCAL_FORWARDING_BITS_SPEC.store_atomic::<JuliaVM, u8>(
Expand Down Expand Up @@ -160,7 +163,9 @@ impl ObjectModel<JuliaVM> for VMObjectModel {

#[inline(always)]
fn address_to_ref(address: Address) -> ObjectReference {
ObjectReference::from_raw_address(address)
// `address` is a result of `ref_to_address(object)`, where `object` cannot be NULL.
debug_assert!(!address.is_zero());
unsafe { ObjectReference::from_raw_address_unchecked(address) }
}

#[inline(always)]
Expand Down
52 changes: 33 additions & 19 deletions mmtk/src/reference_glue.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,40 +26,54 @@ impl Finalizable for JuliaFinalizableObject {
self.set_reference(trace.trace_object(self.get_reference()));
if !self.2 {
// not a void pointer
trace.trace_object(ObjectReference::from_raw_address(self.1));
debug_assert!(!self.1.is_zero());
let objref = unsafe { ObjectReference::from_raw_address_unchecked(self.1) };
trace.trace_object(objref);
}
}
}

pub struct VMReferenceGlue {}

impl ReferenceGlue<JuliaVM> for VMReferenceGlue {
type FinalizableType = JuliaFinalizableObject;
fn set_referent(reference: ObjectReference, referent: ObjectReference) {
impl VMReferenceGlue {
fn load_referent_raw(reference: ObjectReference) -> *mut mmtk_jl_value_t {
let reff = reference.to_raw_address().to_ptr::<mmtk_jl_weakref_t>();
unsafe { (*reff).value }
}

fn set_referent_raw(reference: ObjectReference, referent_raw: *mut mmtk_jl_value_t) {
let reff = reference.to_raw_address().to_mut_ptr::<mmtk_jl_weakref_t>();
unsafe {
let reff = reference.to_raw_address().to_mut_ptr::<mmtk_jl_weakref_t>();
let referent_raw = referent.to_raw_address().to_mut_ptr::<mmtk_jl_value_t>();
(*reff).value = referent_raw;
}
}
}

fn clear_referent(new_reference: ObjectReference) {
Self::set_referent(new_reference, unsafe {
ObjectReference::from_raw_address(Address::from_mut_ptr(jl_nothing))
});
impl ReferenceGlue<JuliaVM> for VMReferenceGlue {
type FinalizableType = JuliaFinalizableObject;

fn set_referent(reference: ObjectReference, referent: ObjectReference) {
Self::set_referent_raw(reference, referent.to_raw_address().to_mut_ptr());
}

fn get_referent(object: ObjectReference) -> ObjectReference {
let referent;
unsafe {
let reff = object.to_raw_address().to_mut_ptr::<mmtk_jl_weakref_t>();
referent = ObjectReference::from_raw_address(Address::from_mut_ptr((*reff).value));
}
referent
fn clear_referent(new_reference: ObjectReference) {
Self::set_referent_raw(new_reference, unsafe { jl_nothing });
}

fn is_referent_cleared(referent: ObjectReference) -> bool {
unsafe { referent.to_raw_address().to_mut_ptr() == jl_nothing }
fn get_referent(object: ObjectReference) -> Option<ObjectReference> {
let value = Self::load_referent_raw(object);
if value == unsafe { jl_nothing } {
return None;
} else {
debug_assert!(
!value.is_null(),
"A weak reference {} contains null referent pointer",
object
);
let objref =
unsafe { ObjectReference::from_raw_address_unchecked(Address::from_ptr(value)) };
Some(objref)
}
}

fn enqueue_references(_references: &[ObjectReference], _tls: VMWorkerThread) {}
Expand Down
20 changes: 13 additions & 7 deletions mmtk/src/scanning.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ use mmtk::memory_manager;
use mmtk::scheduler::*;
use mmtk::util::opaque_pointer::*;
use mmtk::util::ObjectReference;
use mmtk::vm::edge_shape::Edge;
use mmtk::vm::EdgeVisitor;
use mmtk::vm::ObjectTracerContext;
use mmtk::vm::RootsWorkFactory;
Expand Down Expand Up @@ -31,9 +32,7 @@ impl Scanning<JuliaVM> for VMScanning {
fn visit_edge(&mut self, edge: JuliaVMEdge) {
match edge {
JuliaVMEdge::Simple(se) => {
let slot = se.as_address();
let object = unsafe { slot.load::<ObjectReference>() };
if !object.is_null() {
if let Some(object) = se.load() {
self.buffer.push(object);
}
}
Expand Down Expand Up @@ -67,7 +66,11 @@ impl Scanning<JuliaVM> for VMScanning {
Address::from_ptr(task)
);

node_buffer.push(ObjectReference::from_raw_address(Address::from_ptr(task)));
// unsafe: We checked `!task.is_null()` before.
let objref = unsafe {
ObjectReference::from_raw_address_unchecked(Address::from_ptr(task))
};
node_buffer.push(objref);
}
}
};
Expand All @@ -88,9 +91,12 @@ impl Scanning<JuliaVM> for VMScanning {
root_scan_task(ptls.next_task, true);
root_scan_task(ptls.previous_task, true);
if !ptls.previous_exception.is_null() {
node_buffer.push(ObjectReference::from_raw_address(Address::from_mut_ptr(
ptls.previous_exception,
)));
node_buffer.push(unsafe {
// unsafe: We have just checked `ptls.previous_exception` is not null.
ObjectReference::from_raw_address_unchecked(Address::from_mut_ptr(
ptls.previous_exception,
))
});
}

// Scan backtrace buffer: See gc_queue_bt_buf in gc.c
Expand Down

0 comments on commit f89caea

Please sign in to comment.