Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove NULL ObjectReference #1064

Merged
merged 16 commits into from
Apr 27, 2024
1 change: 0 additions & 1 deletion docs/userguide/src/tutorial/code/mygc_semispace/gc_work.rs
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,6 @@ impl<VM: VMBinding> ProcessEdgesWork for MyGCProcessEdges<VM> {
}

fn trace_object(&mut self, object: ObjectReference) -> ObjectReference {
debug_assert!(!object.is_null());
let worker = self.worker();
let queue = &mut self.base.nodes;
if self.plan.tospace().in_space(object) {
Expand Down
2 changes: 0 additions & 2 deletions docs/userguide/src/tutorial/mygc/ss/exercise_solution.md
Original file line number Diff line number Diff line change
Expand Up @@ -149,8 +149,6 @@ In `gc_work.rs`:
the tospace and fromspace:
```rust
fn trace_object(&mut self, object: ObjectReference) -> ObjectReference {
debug_assert!(!object.is_null());

// Add this!
else if self.plan().youngspace().in_space(object) {
self.plan().youngspace.trace_object::<Self, TripleSpaceCopyContext<VM>>(
Expand Down
3 changes: 0 additions & 3 deletions src/memory_manager.rs
Original file line number Diff line number Diff line change
Expand Up @@ -638,9 +638,6 @@ pub fn is_mmtk_object(addr: Address) -> bool {
/// * `object`: The object reference to query.
pub fn is_in_mmtk_spaces<VM: VMBinding>(object: ObjectReference) -> bool {
use crate::mmtk::SFT_MAP;
if object.is_null() {
return false;
}
SFT_MAP
.get_checked(object.to_address::<VM>())
.is_in_space(object)
Expand Down
8 changes: 3 additions & 5 deletions src/plan/generational/gc_work.rs
Original file line number Diff line number Diff line change
Expand Up @@ -43,8 +43,6 @@ impl<VM: VMBinding, P: GenerationalPlanExt<VM> + PlanTraceObject<VM>, const KIND
}

fn trace_object(&mut self, object: ObjectReference) -> ObjectReference {
debug_assert!(!object.is_null());

// We cannot borrow `self` twice in a call, so we extract `worker` as a local variable.
let worker = self.worker();
self.plan.trace_object_nursery::<VectorObjectQueue, KIND>(
Expand All @@ -55,10 +53,10 @@ impl<VM: VMBinding, P: GenerationalPlanExt<VM> + PlanTraceObject<VM>, const KIND
}

fn process_edge(&mut self, slot: EdgeOf<Self>) {
let object = slot.load();
if object.is_null() {
let Some(object) = slot.load() else {
// Skip slots that are not holding an object reference.
return;
}
};
let new_object = self.trace_object(object);
debug_assert!(!self.plan.is_object_in_nursery(new_object));
// Note: If `object` is a mature object, `trace_object` will not call `space.trace_object`,
Expand Down
2 changes: 1 addition & 1 deletion src/plan/global.rs
Original file line number Diff line number Diff line change
Expand Up @@ -691,7 +691,7 @@ pub trait PlanTraceObject<VM: VMBinding> {
///
/// Arguments:
/// * `trace`: the current transitive closure
/// * `object`: the object to trace. This is a non-nullable object reference.
/// * `object`: the object to trace.
/// * `worker`: the GC worker that is tracing this object.
fn trace_object<Q: ObjectQueue, const KIND: TraceKind>(
&self,
Expand Down
2 changes: 1 addition & 1 deletion src/plan/tracing.rs
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ impl<'a, E: ProcessEdgesWork> EdgeVisitor<EdgeOf<E>> for ObjectsClosure<'a, E> {
{
use crate::vm::edge_shape::Edge;
trace!(
"(ObjectsClosure) Visit edge {:?} (pointing to {})",
"(ObjectsClosure) Visit edge {:?} (pointing to {:?})",
slot,
slot.load()
);
Expand Down
1 change: 0 additions & 1 deletion src/policy/copyspace.rs
Original file line number Diff line number Diff line change
Expand Up @@ -204,7 +204,6 @@ impl<VM: VMBinding> CopySpace<VM> {
worker: &mut GCWorker<VM>,
) -> ObjectReference {
trace!("copyspace.trace_object(, {:?}, {:?})", object, semantics,);
debug_assert!(!object.is_null());

// If this is not from space, we do not need to trace it (the object has been copied to the tosapce)
if !self.is_from_space() {
Expand Down
1 change: 0 additions & 1 deletion src/policy/immix/immixspace.rs
Original file line number Diff line number Diff line change
Expand Up @@ -184,7 +184,6 @@ impl<VM: VMBinding> crate::policy::gc_work::PolicyTraceObject<VM> for ImmixSpace
copy: Option<CopySemantics>,
worker: &mut GCWorker<VM>,
) -> ObjectReference {
debug_assert!(!object.is_null());
if KIND == TRACE_KIND_TRANSITIVE_PIN {
self.trace_object_without_moving(queue, object)
} else if KIND == TRACE_KIND_DEFRAG {
Expand Down
1 change: 0 additions & 1 deletion src/policy/immortalspace.rs
Original file line number Diff line number Diff line change
Expand Up @@ -187,7 +187,6 @@ impl<VM: VMBinding> ImmortalSpace<VM> {
queue: &mut Q,
object: ObjectReference,
) -> ObjectReference {
debug_assert!(!object.is_null());
#[cfg(feature = "vo_bit")]
debug_assert!(
crate::util::metadata::vo_bit::is_vo_bit_set::<VM>(object),
Expand Down
1 change: 0 additions & 1 deletion src/policy/largeobjectspace.rs
Original file line number Diff line number Diff line change
Expand Up @@ -189,7 +189,6 @@ impl<VM: VMBinding> LargeObjectSpace<VM> {
queue: &mut Q,
object: ObjectReference,
) -> ObjectReference {
debug_assert!(!object.is_null());
#[cfg(feature = "vo_bit")]
debug_assert!(
crate::util::metadata::vo_bit::is_vo_bit_set::<VM>(object),
Expand Down
30 changes: 11 additions & 19 deletions src/policy/markcompactspace.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,12 +37,7 @@ impl<VM: VMBinding> SFT for MarkCompactSpace<VM> {
}

fn get_forwarded_object(&self, object: ObjectReference) -> Option<ObjectReference> {
let forwarding_pointer = Self::get_header_forwarding_pointer(object);
if forwarding_pointer.is_null() {
None
} else {
Some(forwarding_pointer)
}
Self::get_header_forwarding_pointer(object)
}

fn is_live(&self, object: ObjectReference) -> bool {
Expand Down Expand Up @@ -130,7 +125,6 @@ impl<VM: VMBinding> crate::policy::gc_work::PolicyTraceObject<VM> for MarkCompac
_copy: Option<CopySemantics>,
_worker: &mut GCWorker<VM>,
) -> ObjectReference {
debug_assert!(!object.is_null());
debug_assert!(
KIND != TRACE_KIND_TRANSITIVE_PIN,
"MarkCompact does not support transitive pin trace."
Expand Down Expand Up @@ -177,8 +171,9 @@ impl<VM: VMBinding> MarkCompactSpace<VM> {
}

/// Get header forwarding pointer for an object
fn get_header_forwarding_pointer(object: ObjectReference) -> ObjectReference {
unsafe { Self::header_forwarding_pointer_address(object).load::<ObjectReference>() }
fn get_header_forwarding_pointer(object: ObjectReference) -> Option<ObjectReference> {
let addr = unsafe { Self::header_forwarding_pointer_address(object).load::<Address>() };
ObjectReference::from_raw_address(addr)
}

/// Store header forwarding pointer for an object
Expand Down Expand Up @@ -251,12 +246,8 @@ impl<VM: VMBinding> MarkCompactSpace<VM> {
queue.enqueue(object);
}

let result = Self::get_header_forwarding_pointer(object);
debug_assert!(
!result.is_null(),
"Object {object} does not have a forwarding pointer"
);
result
Self::get_header_forwarding_pointer(object)
.unwrap_or_else(|| panic!("Object {object} does not have a forwarding pointer"))
}

pub fn test_and_mark(object: ObjectReference) -> bool {
Expand Down Expand Up @@ -393,10 +384,9 @@ impl<VM: VMBinding> MarkCompactSpace<VM> {
// clear the VO bit
vo_bit::unset_vo_bit::<VM>(obj);

let forwarding_pointer = Self::get_header_forwarding_pointer(obj);

trace!("Compact {} to {}", obj, forwarding_pointer);
if !forwarding_pointer.is_null() {
let maybe_forwarding_pointer = Self::get_header_forwarding_pointer(obj);
if let Some(forwarding_pointer) = maybe_forwarding_pointer {
trace!("Compact {} to {}", obj, forwarding_pointer);
let new_object = forwarding_pointer;
Self::clear_header_forwarding_pointer(new_object);

Expand All @@ -408,6 +398,8 @@ impl<VM: VMBinding> MarkCompactSpace<VM> {
vo_bit::set_vo_bit::<VM>(new_object);
to = new_object.to_object_start::<VM>() + copied_size;
debug_assert_eq!(end_of_new_object, to);
} else {
trace!("Skipping dead object {}", obj);
}
}
}
Expand Down
2 changes: 0 additions & 2 deletions src/policy/marksweepspace/malloc_ms/global.rs
Original file line number Diff line number Diff line change
Expand Up @@ -400,8 +400,6 @@ impl<VM: VMBinding> MallocSpace<VM> {
queue: &mut Q,
object: ObjectReference,
) -> ObjectReference {
debug_assert!(!object.is_null());

assert!(
self.in_space(object),
"Cannot mark an object {} that was not alloced by malloc.",
Expand Down
12 changes: 8 additions & 4 deletions src/policy/marksweepspace/native_ms/block.rs
Original file line number Diff line number Diff line change
Expand Up @@ -280,7 +280,8 @@ impl Block {
while cell + cell_size <= self.start() + Block::BYTES {
// The invariants we checked earlier ensures that we can use cell and object reference interchangably
// We may not really have an object in this cell, but if we do, this object reference is correct.
let potential_object = ObjectReference::from_raw_address(cell);
// About unsafe: We know `cell` is non-zero here.
let potential_object = unsafe { ObjectReference::from_raw_address_unchecked(cell) };

if !VM::VMObjectModel::LOCAL_MARK_BIT_SPEC
.is_marked::<VM>(potential_object, Ordering::SeqCst)
Expand Down Expand Up @@ -320,9 +321,12 @@ impl Block {

while cell + cell_size <= self.end() {
// possible object ref
let potential_object_ref = ObjectReference::from_raw_address(
cursor + VM::VMObjectModel::OBJECT_REF_OFFSET_LOWER_BOUND,
);
let potential_object_ref = unsafe {
// We know cursor plus an offset cannot be 0.
ObjectReference::from_raw_address_unchecked(
cursor + VM::VMObjectModel::OBJECT_REF_OFFSET_LOWER_BOUND,
)
};
trace!(
"{:?}: cell = {}, last cell in free list = {}, cursor = {}, potential object = {}",
self,
Expand Down
1 change: 0 additions & 1 deletion src/policy/marksweepspace/native_ms/global.rs
Original file line number Diff line number Diff line change
Expand Up @@ -291,7 +291,6 @@ impl<VM: VMBinding> MarkSweepSpace<VM> {
queue: &mut Q,
object: ObjectReference,
) -> ObjectReference {
debug_assert!(!object.is_null());
debug_assert!(
self.in_space(object),
"Cannot mark an object {} that was not alloced by free list allocator.",
Expand Down
18 changes: 6 additions & 12 deletions src/scheduler/gc_work.rs
Original file line number Diff line number Diff line change
Expand Up @@ -223,7 +223,6 @@ impl<E: ProcessEdgesWork> ObjectTracer for ProcessEdgesWorkTracer<E> {
/// Forward the `trace_object` call to the underlying `ProcessEdgesWork`,
/// and flush as soon as the underlying buffer of `process_edges_work` is full.
fn trace_object(&mut self, object: ObjectReference) -> ObjectReference {
debug_assert!(!object.is_null());
let result = self.process_edges_work.trace_object(object);
self.flush_if_full();
result
Expand Down Expand Up @@ -604,12 +603,11 @@ pub trait ProcessEdgesWork:
/// Process an edge, including loading the object reference from the memory slot,
/// trace the object and store back the new object reference if necessary.
fn process_edge(&mut self, slot: EdgeOf<Self>) {
let object = slot.load();
if object.is_null() {
let Some(object) = slot.load() else {
// Skip slots that are not holding an object reference.
return;
}
};
let new_object = self.trace_object(object);
debug_assert!(!new_object.is_null());
if Self::OVERWRITE_REFERENCE && new_object != object {
slot.store(new_object);
}
Expand Down Expand Up @@ -667,8 +665,6 @@ impl<VM: VMBinding> ProcessEdgesWork for SFTProcessEdges<VM> {
fn trace_object(&mut self, object: ObjectReference) -> ObjectReference {
use crate::policy::sft::GCWorkerMutRef;

debug_assert!(!object.is_null());

// Erase <VM> type parameter
let worker = GCWorkerMutRef::new(self.worker());

Expand Down Expand Up @@ -925,20 +921,18 @@ impl<VM: VMBinding, P: PlanTraceObject<VM> + Plan<VM = VM>, const KIND: TraceKin
}

fn trace_object(&mut self, object: ObjectReference) -> ObjectReference {
debug_assert!(!object.is_null());
// We cannot borrow `self` twice in a call, so we extract `worker` as a local variable.
let worker = self.worker();
self.plan
.trace_object::<VectorObjectQueue, KIND>(&mut self.base.nodes, object, worker)
}

fn process_edge(&mut self, slot: EdgeOf<Self>) {
let object = slot.load();
if object.is_null() {
let Some(object) = slot.load() else {
// Skip slots that are not holding an object reference.
return;
}
};
let new_object = self.trace_object(object);
debug_assert!(!new_object.is_null());
if P::may_move_objects::<KIND>() && new_object != object {
slot.store(new_object);
}
Expand Down
54 changes: 32 additions & 22 deletions src/util/address.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ use bytemuck::NoUninit;

use std::fmt;
use std::mem;
use std::num::NonZeroUsize;
use std::ops::*;
use std::sync::atomic::Ordering;

Expand Down Expand Up @@ -477,30 +478,52 @@ use crate::vm::VMBinding;
/// their layout. We now only allow a binding to define their semantics through a set of
/// methods in [`crate::vm::ObjectModel`]. Major refactoring is needed in MMTk to allow
/// the opaque `ObjectReference` type, and we haven't seen a use case for now.
///
/// Note that [`ObjectReference`] cannot be null. For the cases where a non-null object reference
/// may or may not exist, (such as the result of [`crate::vm::edge_shape::Edge::load`])
/// `Option<ObjectReference>` should be used. [`ObjectReference`] is backed by `NonZeroUsize`
/// which cannot be zero, and it has the `#[repr(transparent)]` attribute. Thanks to [null pointer
/// optimization (NPO)][NPO], `Option<ObjectReference>` has the same size as `NonZeroUsize` and
/// `usize`. For the convenience of passing `Option<ObjectReference>` to and from native (C/C++)
/// programs, mmtk-core provides [`crate::util::api_util::NullableObjectReference`].
///
/// [NPO]: https://doc.rust-lang.org/std/option/index.html#representation
#[repr(transparent)]
#[derive(Copy, Clone, Eq, Hash, PartialOrd, Ord, PartialEq, NoUninit)]
pub struct ObjectReference(usize);
pub struct ObjectReference(NonZeroUsize);
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The comments for the type should note that ObjectReference cannot be null, and for places where we may have an invalid object reference, we use Option<ObjectReference>. NonZeroUsize makes sure that Option<ObjectReference> has the same length as ObjectReference.


impl ObjectReference {
/// The null object reference, represented as zero.
pub const NULL: ObjectReference = ObjectReference(0);

/// Cast the object reference to its raw address. This method is mostly for the convinience of a binding.
///
/// MMTk should not make any assumption on the actual location of the address with the object reference.
/// MMTk should not assume the address returned by this method is in our allocation. For the purposes of
/// setting object metadata, MMTk should use [`crate::vm::ObjectModel::ref_to_address()`] or [`crate::vm::ObjectModel::ref_to_header()`].
pub fn to_raw_address(self) -> Address {
Address(self.0)
Address(self.0.get())
}

/// Cast a raw address to an object reference. This method is mostly for the convinience of a binding.
/// This is how a binding creates `ObjectReference` instances.
///
/// If `addr` is 0, the result is `None`.
///
/// MMTk should not assume an arbitrary address can be turned into an object reference. MMTk can use [`crate::vm::ObjectModel::address_to_ref()`]
/// to turn addresses that are from [`crate::vm::ObjectModel::ref_to_address()`] back to object.
pub fn from_raw_address(addr: Address) -> ObjectReference {
ObjectReference(addr.0)
pub fn from_raw_address(addr: Address) -> Option<ObjectReference> {
NonZeroUsize::new(addr.0).map(ObjectReference)
}

/// Like `from_raw_address`, but assume `addr` is not zero. This can be used to elide a check
/// against zero for performance-critical code.
///
/// # Safety
///
/// This method assumes `addr` is not zero. It should only be used in cases where we know at
/// compile time that the input cannot be zero. For example, if we compute the address by
/// adding a positive offset to a non-zero address, we know the result must not be zero.
pub unsafe fn from_raw_address_unchecked(addr: Address) -> ObjectReference {
debug_assert!(!addr.is_zero());
ObjectReference(NonZeroUsize::new_unchecked(addr.0))
}

/// Get the in-heap address from an object reference. This method is used by MMTk to get an in-heap address
Expand Down Expand Up @@ -541,28 +564,15 @@ impl ObjectReference {
obj
}

/// is this object reference null reference?
pub fn is_null(self) -> bool {
self.0 == 0
}

/// Is the object reachable, determined by the policy?
/// Note: Objects in ImmortalSpace may have `is_live = true` but are actually unreachable.
pub fn is_reachable<VM: VMBinding>(self) -> bool {
if self.is_null() {
false
} else {
unsafe { SFT_MAP.get_unchecked(self.to_address::<VM>()) }.is_reachable(self)
}
unsafe { SFT_MAP.get_unchecked(self.to_address::<VM>()) }.is_reachable(self)
}

/// Is the object live, determined by the policy?
pub fn is_live<VM: VMBinding>(self) -> bool {
if self.0 == 0 {
false
} else {
unsafe { SFT_MAP.get_unchecked(self.to_address::<VM>()) }.is_live(self)
}
unsafe { SFT_MAP.get_unchecked(self.to_address::<VM>()) }.is_live(self)
}

/// Can the object be moved?
Expand Down
12 changes: 7 additions & 5 deletions src/util/alloc/free_list_allocator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -370,6 +370,8 @@ impl<VM: VMBinding> FreeListAllocator<VM> {

#[cfg(feature = "malloc_native_mimalloc")]
fn free(&self, addr: Address) {
assert!(!addr.is_zero(), "Attempted to free zero address.");

use crate::util::ObjectReference;
let block = Block::from_unaligned_address(addr);
let block_tls = block.load_tls();
Expand Down Expand Up @@ -402,11 +404,11 @@ impl<VM: VMBinding> FreeListAllocator<VM> {
}

// unset allocation bit
unsafe {
crate::util::metadata::vo_bit::unset_vo_bit_unsafe::<VM>(
ObjectReference::from_raw_address(addr),
)
};
// Note: We cannot use `unset_vo_bit_unsafe` because two threads may attempt to free
// objects at adjacent addresses, and they may share the same byte in the VO bit metadata.
crate::util::metadata::vo_bit::unset_vo_bit::<VM>(unsafe {
ObjectReference::from_raw_address_unchecked(addr)
})
}

fn store_block_tls(&self, block: Block) {
Expand Down
Loading
Loading