diff --git a/docs/userguide/src/tutorial/code/mygc_semispace/gc_work.rs b/docs/userguide/src/tutorial/code/mygc_semispace/gc_work.rs index 29be4f6184..f80ff56f9a 100644 --- a/docs/userguide/src/tutorial/code/mygc_semispace/gc_work.rs +++ b/docs/userguide/src/tutorial/code/mygc_semispace/gc_work.rs @@ -56,9 +56,7 @@ impl ProcessEdgesWork for MyGCProcessEdges { } fn trace_object(&mut self, object: ObjectReference) -> ObjectReference { - if object.is_null() { - return object; - } + debug_assert!(!object.is_null()); let worker = self.worker(); let queue = &mut self.base.nodes; if self.plan.tospace().in_space(object) { diff --git a/docs/userguide/src/tutorial/mygc/ss/exercise_solution.md b/docs/userguide/src/tutorial/mygc/ss/exercise_solution.md index 4f4717ed77..cb751aa082 100644 --- a/docs/userguide/src/tutorial/mygc/ss/exercise_solution.md +++ b/docs/userguide/src/tutorial/mygc/ss/exercise_solution.md @@ -149,9 +149,7 @@ In `gc_work.rs`: the tospace and fromspace: ```rust fn trace_object(&mut self, object: ObjectReference) -> ObjectReference { - if object.is_null() { - return object; - } + debug_assert!(!object.is_null()); // Add this! else if self.plan().youngspace().in_space(object) { diff --git a/src/plan/generational/gc_work.rs b/src/plan/generational/gc_work.rs index a5c38e84ea..637f21a34a 100644 --- a/src/plan/generational/gc_work.rs +++ b/src/plan/generational/gc_work.rs @@ -36,19 +36,21 @@ impl + PlanTraceObject> ProcessEdg Self { plan, base } } fn trace_object(&mut self, object: ObjectReference) -> ObjectReference { - if object.is_null() { - return object; - } + debug_assert!(!object.is_null()); + // We cannot borrow `self` twice in a call, so we extract `worker` as a local variable. let worker = self.worker(); self.plan .trace_object_nursery(&mut self.base.nodes, object, worker) } fn process_edge(&mut self, slot: EdgeOf) { - let object = slot.load(); - let new_object = self.trace_object(object); - debug_assert!(!self.plan.is_object_in_nursery(new_object)); - slot.store(new_object); + slot.update_for_forwarding(|object| { + debug_assert!(!object.is_null()); + let new_object = self.trace_object(object); + debug_assert!(!new_object.is_null()); + debug_assert!(!self.plan.is_object_in_nursery(new_object)); + new_object + }); } fn create_scan_work( diff --git a/src/policy/copyspace.rs b/src/policy/copyspace.rs index 8d08ec1507..8193c7c63c 100644 --- a/src/policy/copyspace.rs +++ b/src/policy/copyspace.rs @@ -217,6 +217,7 @@ impl CopySpace { worker: &mut GCWorker, ) -> ObjectReference { trace!("copyspace.trace_object(, {:?}, {:?})", object, semantics,); + debug_assert!(!object.is_null()); // If this is not from space, we do not need to trace it (the object has been copied to the tosapce) if !self.is_from_space() { diff --git a/src/policy/immix/immixspace.rs b/src/policy/immix/immixspace.rs index 3809f7bd24..bf72394c01 100644 --- a/src/policy/immix/immixspace.rs +++ b/src/policy/immix/immixspace.rs @@ -184,6 +184,7 @@ impl crate::policy::gc_work::PolicyTraceObject for ImmixSpace copy: Option, worker: &mut GCWorker, ) -> ObjectReference { + debug_assert!(!object.is_null()); if KIND == TRACE_KIND_TRANSITIVE_PIN { self.trace_object_without_moving(queue, object) } else if KIND == TRACE_KIND_DEFRAG { diff --git a/src/policy/immortalspace.rs b/src/policy/immortalspace.rs index 5eeebd58c9..1e4d19eefa 100644 --- a/src/policy/immortalspace.rs +++ b/src/policy/immortalspace.rs @@ -187,6 +187,7 @@ impl ImmortalSpace { queue: &mut Q, object: ObjectReference, ) -> ObjectReference { + debug_assert!(!object.is_null()); #[cfg(feature = "vo_bit")] debug_assert!( crate::util::metadata::vo_bit::is_vo_bit_set::(object), diff --git a/src/policy/largeobjectspace.rs b/src/policy/largeobjectspace.rs index ec6b2f7506..64a13c8f37 100644 --- a/src/policy/largeobjectspace.rs +++ b/src/policy/largeobjectspace.rs @@ -189,6 +189,7 @@ impl LargeObjectSpace { queue: &mut Q, object: ObjectReference, ) -> ObjectReference { + debug_assert!(!object.is_null()); #[cfg(feature = "vo_bit")] debug_assert!( crate::util::metadata::vo_bit::is_vo_bit_set::(object), diff --git a/src/policy/markcompactspace.rs b/src/policy/markcompactspace.rs index 693218b492..e6d332919e 100644 --- a/src/policy/markcompactspace.rs +++ b/src/policy/markcompactspace.rs @@ -130,6 +130,7 @@ impl crate::policy::gc_work::PolicyTraceObject for MarkCompac _copy: Option, _worker: &mut GCWorker, ) -> ObjectReference { + debug_assert!(!object.is_null()); debug_assert!( KIND != TRACE_KIND_TRANSITIVE_PIN, "MarkCompact does not support transitive pin trace." diff --git a/src/policy/marksweepspace/malloc_ms/global.rs b/src/policy/marksweepspace/malloc_ms/global.rs index 7454fbb287..8d42b74f0d 100644 --- a/src/policy/marksweepspace/malloc_ms/global.rs +++ b/src/policy/marksweepspace/malloc_ms/global.rs @@ -400,9 +400,7 @@ impl MallocSpace { queue: &mut Q, object: ObjectReference, ) -> ObjectReference { - if object.is_null() { - return object; - } + debug_assert!(!object.is_null()); assert!( self.in_space(object), diff --git a/src/policy/marksweepspace/native_ms/global.rs b/src/policy/marksweepspace/native_ms/global.rs index 8d8eae7d0e..a533fcde33 100644 --- a/src/policy/marksweepspace/native_ms/global.rs +++ b/src/policy/marksweepspace/native_ms/global.rs @@ -241,9 +241,7 @@ impl MarkSweepSpace { queue: &mut Q, object: ObjectReference, ) -> ObjectReference { - if object.is_null() { - return object; - } + debug_assert!(!object.is_null()); debug_assert!( self.in_space(object), "Cannot mark an object {} that was not alloced by free list allocator.", diff --git a/src/scheduler/gc_work.rs b/src/scheduler/gc_work.rs index 7d0add88a4..9375790f03 100644 --- a/src/scheduler/gc_work.rs +++ b/src/scheduler/gc_work.rs @@ -662,11 +662,16 @@ pub trait ProcessEdgesWork: /// Process an edge, including loading the object reference from the memory slot, /// trace the object and store back the new object reference if necessary. fn process_edge(&mut self, slot: EdgeOf) { - let object = slot.load(); - let new_object = self.trace_object(object); - if Self::OVERWRITE_REFERENCE { - slot.store(new_object); - } + slot.update_for_forwarding(|object| { + debug_assert!(!object.is_null()); + let new_object = self.trace_object(object); + debug_assert!(!new_object.is_null()); + if Self::OVERWRITE_REFERENCE { + new_object + } else { + ObjectReference::NULL + } + }); } /// Process all the edges in the work packet. @@ -721,9 +726,7 @@ impl ProcessEdgesWork for SFTProcessEdges { fn trace_object(&mut self, object: ObjectReference) -> ObjectReference { use crate::policy::sft::GCWorkerMutRef; - if object.is_null() { - return object; - } + debug_assert!(!object.is_null()); // Erase type parameter let worker = GCWorkerMutRef::new(self.worker()); @@ -997,9 +1000,7 @@ impl + Plan, const KIND: TraceKin } fn trace_object(&mut self, object: ObjectReference) -> ObjectReference { - if object.is_null() { - return object; - } + debug_assert!(!object.is_null()); // We cannot borrow `self` twice in a call, so we extract `worker` as a local variable. let worker = self.worker(); self.plan @@ -1007,11 +1008,16 @@ impl + Plan, const KIND: TraceKin } fn process_edge(&mut self, slot: EdgeOf) { - let object = slot.load(); - let new_object = self.trace_object(object); - if P::may_move_objects::() { - slot.store(new_object); - } + slot.update_for_forwarding(|object| { + debug_assert!(!object.is_null()); + let new_object = self.trace_object(object); + debug_assert!(!new_object.is_null()); + if P::may_move_objects::() { + new_object + } else { + ObjectReference::NULL + } + }); } } diff --git a/src/vm/edge_shape.rs b/src/vm/edge_shape.rs index dd23efd393..5bcb93fb0d 100644 --- a/src/vm/edge_shape.rs +++ b/src/vm/edge_shape.rs @@ -41,11 +41,72 @@ use crate::util::{Address, ObjectReference}; /// in a word as a pointer, it can also use `SimpleEdge` for weak reference fields. pub trait Edge: Copy + Send + Debug + PartialEq + Eq + Hash { /// Load object reference from the edge. + /// + /// If the slot is not holding an object reference, it should return `ObjectReference::NULL`. + /// Specifically, + /// + /// - If the langauge has the concept of "null pointer" which does not point to any object in + /// the heap, this method should return `ObjectReference::NULL` regardless how a null + /// pointer is encoded in the VM. However, if the VM uses a special object in the heap to + /// represent a null value, such as the `None` object of `NoneType` in Python, this method + /// should still return the object reference to such `None` objects so that they are + /// properly traced, kept alive, and they have their references forwarded. + /// - If, in a VM, the data type a slot can hold is a union of references and non-reference + /// values, and the slot is currently holding a non-reference value, such as a small + /// integer, a floating-point number, or any special value such as `true`, `false` or `nil` + /// that do not point to any object, the slot is considered not holding an reference. This + /// method should return `ObjectReference::NULL` in such cases. + /// + /// If the slot holds an object reference with tag bits, the returned value shall be the object + /// reference with the tag bits removed. fn load(&self) -> ObjectReference; /// Store the object reference `object` into the edge. + /// + /// FIXME: Currently the subsuming write barrier (`Barrier::object_reference_write`) calls this + /// method to perform the actual store to the field. It only works if the VM does not store + /// tag bits in the slot. + /// + /// FIXME: If the slot contains tag bits, consider overriding the `update_for_forwarding` + /// method. See: https://github.com/mmtk/mmtk-core/issues/1033 fn store(&self, object: ObjectReference); + /// Update the slot for forwarding. + /// + /// If the slot is holding an object reference, this method shall call `updater` with that + /// reference; if the slot is not holding an object reference, including when holding a NULL + /// pointer or holding small integers or special non-reference values such as `true`, `false` + /// or `nil`, this method does not need to take further action. In no circumstance should this + /// method pass `ObjectReference::NULL` to `updater`. + /// + /// If the returned value of the `updater` closure is not `ObjectReference::NULL`, it will be + /// the forwarded object reference of the original object, and this method shall update the + /// slot to point to the new location; if the returned value is `ObjectReference::NULL`, this + /// method does not need to take further action. + /// + /// This method is called to trace the object pointed by this slot, and forward the reference + /// in the slot. To implement this semantics, the VM should usually preserve the tag bits if + /// it uses tagged pointers to indicate whether the slot holds an object reference or + /// non-reference values. + /// + /// The default implementation calls `self.load()` to load the object reference, and calls + /// `self.store` to store the updated reference. VMs that use tagged pointers should override + /// this method to preserve the tag bits between the load and store. + fn update_for_forwarding(&self, updater: F) + where + F: FnOnce(ObjectReference) -> ObjectReference, + { + let object = self.load(); + if object.is_null() { + return; + } + let new_object = updater(object); + if new_object.is_null() { + return; + } + self.store(new_object); + } + /// Prefetch the edge so that a subsequent `load` will be faster. fn prefetch_load(&self) { // no-op by default