Skip to content

Commit

Permalink
Refactor object reference to address (mmtk#699)
Browse files Browse the repository at this point in the history
This PR removes most of our uses of `ObjectReference::to_address()`, and replace that with `ObjectModel::ref_to_address()`. With this change, the address we get from an object reference is always guaranteed to be within the allocation. Thus changes in mmtk#555 are reverted in this PR.

This PR also addresses the comments raised in mmtk#643 (comment).

Changes:
* Replace our use of `ObjectReference::to_address()` to `ObjectModel::ref_to_address()`
* Changes to `ObjectReference`:
  * Rename `ObjectReference::to_address()` to `ObjectReference::to_raw_address()`, and make it clear that we should avoid using it.
  * Remove `Address::to_object_reference()`, and add `ObjectReference::from_raw_address()`. Make it clear that we should avoid using it.
* Changes to `ObjectModel`:
  * add `address_to_ref` which does the opposite of `ref_to_address`
  * add `ref_to_header`
  * rename `object_start_ref` to `ref_to_object_start`, to make it consistent with other methods.
* Change `Treadmill` to store `ObjectReference` instead of `Address`. We previously stored object start address in `Treadmill` and we assumed alloc bit is set on the object start address. With changes in this PR, alloc bit is no longer set on object start address. I made changes accordingly.
* Remove code about 'object ref guard' which was used to deal with the case that an object reference (address) may not be in the same chunk as the allocation. That should no longer happen.
* `alloc_bit::is_alloced_object(address)` now returns an `Option<ObjectReference>`. We may consider return `Option<ObjectReference>` with our API `is_mmtk_object()` as well, but i did not include this change in the PR.
  • Loading branch information
qinsoon authored and wenyuzhao committed Mar 20, 2023
1 parent 664d16b commit 40e3e2b
Show file tree
Hide file tree
Showing 40 changed files with 415 additions and 490 deletions.
9 changes: 7 additions & 2 deletions src/memory_manager.rs
Original file line number Diff line number Diff line change
Expand Up @@ -646,10 +646,15 @@ pub fn is_mmtk_object(addr: Address) -> bool {
///
/// Arguments:
/// * `object`: The object reference to query.
pub fn is_in_mmtk_spaces(object: ObjectReference) -> bool {
pub fn is_in_mmtk_spaces<VM: VMBinding>(object: ObjectReference) -> bool {
use crate::mmtk::SFT_MAP;
use crate::policy::sft_map::SFTMap;
SFT_MAP.get_checked(object.to_address()).is_in_space(object)
if object.is_null() {
return false;
}
SFT_MAP
.get_checked(object.to_address::<VM>())
.is_in_space(object)
}

/// Is the address in the mapped memory? The runtime can use this function to check
Expand Down
10 changes: 8 additions & 2 deletions src/policy/copyspace.rs
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ impl<VM: VMBinding> SFT for CopySpace<VM> {

fn initialize_object_metadata(&self, _object: ObjectReference, _alloc: bool) {
#[cfg(feature = "global_alloc_bit")]
crate::util::alloc_bit::set_alloc_bit(_object);
crate::util::alloc_bit::set_alloc_bit::<VM>(_object);
}

#[inline(always)]
Expand All @@ -63,6 +63,12 @@ impl<VM: VMBinding> SFT for CopySpace<VM> {
}
}

#[cfg(feature = "is_mmtk_object")]
#[inline(always)]
fn is_mmtk_object(&self, addr: Address) -> bool {
crate::util::alloc_bit::is_alloced_object::<VM>(addr).is_some()
}

#[inline(always)]
fn sft_trace_object(
&self,
Expand Down Expand Up @@ -231,7 +237,7 @@ impl<VM: VMBinding> CopySpace<VM> {

#[cfg(feature = "global_alloc_bit")]
debug_assert!(
crate::util::alloc_bit::is_alloced(object),
crate::util::alloc_bit::is_alloced::<VM>(object),
"{:x}: alloc bit not set",
object
);
Expand Down
11 changes: 8 additions & 3 deletions src/policy/immix/immixspace.rs
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,12 @@ impl<VM: VMBinding> SFT for ImmixSpace<VM> {
}
fn initialize_object_metadata(&self, _object: ObjectReference, _alloc: bool) {
#[cfg(feature = "global_alloc_bit")]
crate::util::alloc_bit::set_alloc_bit(_object);
crate::util::alloc_bit::set_alloc_bit::<VM>(_object);
}
#[cfg(feature = "is_mmtk_object")]
#[inline(always)]
fn is_mmtk_object(&self, addr: Address) -> bool {
crate::util::alloc_bit::is_alloced_object::<VM>(addr).is_some()
}
#[inline(always)]
fn sft_trace_object(
Expand Down Expand Up @@ -390,7 +395,7 @@ impl<VM: VMBinding> ImmixSpace<VM> {
) -> ObjectReference {
#[cfg(feature = "global_alloc_bit")]
debug_assert!(
crate::util::alloc_bit::is_alloced(object),
crate::util::alloc_bit::is_alloced::<VM>(object),
"{:x}: alloc bit not set",
object
);
Expand Down Expand Up @@ -483,7 +488,7 @@ impl<VM: VMBinding> ImmixSpace<VM> {
object
} else {
#[cfg(feature = "global_alloc_bit")]
crate::util::alloc_bit::unset_alloc_bit(object);
crate::util::alloc_bit::unset_alloc_bit::<VM>(object);
ForwardingWord::forward_object::<VM>(object, semantics, copy_context)
};
debug_assert_eq!(
Expand Down
2 changes: 1 addition & 1 deletion src/policy/immix/line.rs
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ impl Line {
#[inline]
pub fn mark_lines_for_object<VM: VMBinding>(object: ObjectReference, state: u8) -> usize {
debug_assert!(!super::BLOCK_ONLY);
let start = VM::VMObjectModel::object_start_ref(object);
let start = VM::VMObjectModel::ref_to_object_start(object);
let end = start + VM::VMObjectModel::get_current_size(object);
let start_line = Line::from_unaligned_address(start);
let mut end_line = Line::from_unaligned_address(end);
Expand Down
9 changes: 7 additions & 2 deletions src/policy/immortalspace.rs
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,12 @@ impl<VM: VMBinding> SFT for ImmortalSpace<VM> {
VM::VMObjectModel::GLOBAL_LOG_BIT_SPEC.mark_as_unlogged::<VM>(object, Ordering::SeqCst);
}
#[cfg(feature = "global_alloc_bit")]
crate::util::alloc_bit::set_alloc_bit(object);
crate::util::alloc_bit::set_alloc_bit::<VM>(object);
}
#[cfg(feature = "is_mmtk_object")]
#[inline(always)]
fn is_mmtk_object(&self, addr: Address) -> bool {
crate::util::alloc_bit::is_alloced_object::<VM>(addr).is_some()
}
#[inline(always)]
fn sft_trace_object(
Expand Down Expand Up @@ -209,7 +214,7 @@ impl<VM: VMBinding> ImmortalSpace<VM> {
) -> ObjectReference {
#[cfg(feature = "global_alloc_bit")]
debug_assert!(
crate::util::alloc_bit::is_alloced(object),
crate::util::alloc_bit::is_alloced::<VM>(object),
"{:x}: alloc bit not set",
object
);
Expand Down
40 changes: 21 additions & 19 deletions src/policy/largeobjectspace.rs
Original file line number Diff line number Diff line change
Expand Up @@ -73,9 +73,13 @@ impl<VM: VMBinding> SFT for LargeObjectSpace<VM> {
}

#[cfg(feature = "global_alloc_bit")]
crate::util::alloc_bit::set_alloc_bit(object);
let cell = VM::VMObjectModel::object_start_ref(object);
self.treadmill.add_to_treadmill(cell, alloc);
crate::util::alloc_bit::set_alloc_bit::<VM>(object);
self.treadmill.add_to_treadmill(object, alloc);
}
#[cfg(feature = "is_mmtk_object")]
#[inline(always)]
fn is_mmtk_object(&self, addr: Address) -> bool {
crate::util::alloc_bit::is_alloced_object::<VM>(addr).is_some()
}
#[inline(always)]
fn sft_trace_object(
Expand Down Expand Up @@ -205,16 +209,15 @@ impl<VM: VMBinding> LargeObjectSpace<VM> {
) -> ObjectReference {
#[cfg(feature = "global_alloc_bit")]
debug_assert!(
crate::util::alloc_bit::is_alloced(object),
crate::util::alloc_bit::is_alloced::<VM>(object),
"{:x}: alloc bit not set",
object
);
let nursery_object = self.is_in_nursery(object);
if !self.in_nursery_gc || nursery_object {
// Note that test_and_mark() has side effects
if self.test_and_mark(object, self.mark_state) {
let cell = VM::VMObjectModel::object_start_ref(object);
self.treadmill.copy(cell, nursery_object);
self.treadmill.copy(object, nursery_object);
self.clear_nursery(object);
// We just moved the object out of the logical nursery, mark it as unlogged.
if nursery_object && self.common.needs_log_bit {
Expand All @@ -228,22 +231,21 @@ impl<VM: VMBinding> LargeObjectSpace<VM> {
}

fn sweep_large_pages(&mut self, sweep_nursery: bool) {
// FIXME: borrow checker fighting
// didn't call self.release_multiple_pages
// so the compiler knows I'm borrowing two different fields
let sweep = |object: ObjectReference| {
#[cfg(feature = "global_alloc_bit")]
crate::util::alloc_bit::unset_alloc_bit::<VM>(object);
self.pr
.release_pages(get_super_page(VM::VMObjectModel::ref_to_object_start(
object,
)));
};
if sweep_nursery {
for cell in self.treadmill.collect_nursery() {
// println!("- cn {}", cell);
#[cfg(feature = "global_alloc_bit")]
crate::util::alloc_bit::unset_addr_alloc_bit(cell);
self.pr.release_pages(get_super_page(cell));
for object in self.treadmill.collect_nursery() {
sweep(object);
}
} else {
for cell in self.treadmill.collect() {
// println!("- ts {}", cell);
#[cfg(feature = "global_alloc_bit")]
crate::util::alloc_bit::unset_addr_alloc_bit(cell);
self.pr.release_pages(get_super_page(cell));
for object in self.treadmill.collect() {
sweep(object)
}
}
}
Expand Down
7 changes: 6 additions & 1 deletion src/policy/lockfreeimmortalspace.rs
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,12 @@ impl<VM: VMBinding> SFT for LockFreeImmortalSpace<VM> {
}
fn initialize_object_metadata(&self, _object: ObjectReference, _alloc: bool) {
#[cfg(feature = "global_alloc_bit")]
crate::util::alloc_bit::set_alloc_bit(_object);
crate::util::alloc_bit::set_alloc_bit::<VM>(_object);
}
#[cfg(feature = "is_mmtk_object")]
#[inline(always)]
fn is_mmtk_object(&self, addr: Address) -> bool {
crate::util::alloc_bit::is_alloced_object::<VM>(addr).is_some()
}
fn sft_trace_object(
&self,
Expand Down
23 changes: 12 additions & 11 deletions src/policy/mallocspace/global.rs
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ impl<VM: VMBinding> SFT for MallocSpace<VM> {

// For malloc space, we need to further check the alloc bit.
fn is_in_space(&self, object: ObjectReference) -> bool {
is_alloced_by_malloc(object)
is_alloced_by_malloc::<VM>(object)
}

/// For malloc space, we just use the side metadata.
Expand All @@ -84,12 +84,12 @@ impl<VM: VMBinding> SFT for MallocSpace<VM> {
debug_assert!(!addr.is_zero());
// `addr` cannot be mapped by us. It should be mapped by the malloc library.
debug_assert!(!addr.is_mapped());
has_object_alloced_by_malloc(addr)
has_object_alloced_by_malloc::<VM>(addr).is_some()
}

fn initialize_object_metadata(&self, object: ObjectReference, _alloc: bool) {
trace!("initialize_object_metadata for object {}", object);
set_alloc_bit(object);
set_alloc_bit::<VM>(object);
}

#[inline(always)]
Expand Down Expand Up @@ -131,11 +131,11 @@ impl<VM: VMBinding> Space<VM> for MallocSpace<VM> {
// We have assertions in a debug build. We allow this pattern for the release build.
#[allow(clippy::let_and_return)]
fn in_space(&self, object: ObjectReference) -> bool {
let ret = is_alloced_by_malloc(object);
let ret = is_alloced_by_malloc::<VM>(object);

#[cfg(debug_assertions)]
if ASSERT_ALLOCATION {
let addr = VM::VMObjectModel::object_start_ref(object);
let addr = VM::VMObjectModel::ref_to_object_start(object);
let active_mem = self.active_mem.lock().unwrap();
if ret {
// The alloc bit tells that the object is in space.
Expand Down Expand Up @@ -364,15 +364,15 @@ impl<VM: VMBinding> MallocSpace<VM> {
return object;
}

let address = object.to_address();
assert!(
self.in_space(object),
"Cannot mark an object {} that was not alloced by malloc.",
address,
object,
);

if !is_marked::<VM>(object, Ordering::Relaxed) {
let chunk_start = conversions::chunk_align_down(address);
let chunk_start =
conversions::chunk_align_down(VM::VMObjectModel::ref_to_object_start(object));
set_mark_bit::<VM>(object, Ordering::SeqCst);
set_chunk_mark(chunk_start);
queue.enqueue(object);
Expand Down Expand Up @@ -440,7 +440,7 @@ impl<VM: VMBinding> MallocSpace<VM> {
/// Given an object in MallocSpace, return its malloc address, whether it is an offset malloc, and malloc size
#[inline(always)]
fn get_malloc_addr_size(object: ObjectReference) -> (Address, bool, usize) {
let obj_start = VM::VMObjectModel::object_start_ref(object);
let obj_start = VM::VMObjectModel::ref_to_object_start(object);
let offset_malloc_bit = is_offset_malloc(obj_start);
let bytes = get_malloc_usable_size(obj_start, offset_malloc_bit);
(obj_start, offset_malloc_bit, bytes)
Expand Down Expand Up @@ -470,7 +470,7 @@ impl<VM: VMBinding> MallocSpace<VM> {
// Free object
self.free_internal(obj_start, bytes, offset_malloc);
trace!("free object {}", object);
unsafe { unset_alloc_bit_unsafe(object) };
unsafe { unset_alloc_bit_unsafe::<VM>(object) };

true
} else {
Expand All @@ -479,7 +479,8 @@ impl<VM: VMBinding> MallocSpace<VM> {
// Unset marks for free pages and update last_object_end
if !empty_page_start.is_zero() {
// unset marks for pages since last object
let current_page = object.to_address().align_down(BYTES_IN_MALLOC_PAGE);
let current_page =
VM::VMObjectModel::ref_to_object_start(object).align_down(BYTES_IN_MALLOC_PAGE);
if current_page > *empty_page_start {
// we are the only GC thread that is accessing this chunk
unsafe {
Expand Down
28 changes: 18 additions & 10 deletions src/policy/mallocspace/metadata.rs
Original file line number Diff line number Diff line change
Expand Up @@ -153,16 +153,21 @@ pub fn map_meta_space(metadata: &SideMetadataContext, addr: Address, size: usize
}

/// Check if a given object was allocated by malloc
pub fn is_alloced_by_malloc(object: ObjectReference) -> bool {
has_object_alloced_by_malloc(object.to_address())
pub fn is_alloced_by_malloc<VM: VMBinding>(object: ObjectReference) -> bool {
is_meta_space_mapped_for_address(object.to_address::<VM>())
&& alloc_bit::is_alloced::<VM>(object)
}

/// Check if there is an object allocated by malloc at the address.
///
/// This function doesn't check if `addr` is aligned.
/// If not, it will try to load the alloc bit for the address rounded down to the metadata's granularity.
pub fn has_object_alloced_by_malloc(addr: Address) -> bool {
is_meta_space_mapped_for_address(addr) && alloc_bit::is_alloced_object(addr)
#[cfg(feature = "is_mmtk_object")]
pub fn has_object_alloced_by_malloc<VM: VMBinding>(addr: Address) -> Option<ObjectReference> {
if !is_meta_space_mapped_for_address(addr) {
return None;
}
alloc_bit::is_alloced_object::<VM>(addr)
}

pub fn is_marked<VM: VMBinding>(object: ObjectReference, ordering: Ordering) -> bool {
Expand Down Expand Up @@ -213,17 +218,17 @@ pub unsafe fn is_chunk_marked_unsafe(chunk_start: Address) -> bool {
ACTIVE_CHUNK_METADATA_SPEC.load::<u8>(chunk_start) == 1
}

pub fn set_alloc_bit(object: ObjectReference) {
alloc_bit::set_alloc_bit(object);
pub fn set_alloc_bit<VM: VMBinding>(object: ObjectReference) {
alloc_bit::set_alloc_bit::<VM>(object);
}

pub fn set_mark_bit<VM: VMBinding>(object: ObjectReference, ordering: Ordering) {
VM::VMObjectModel::LOCAL_MARK_BIT_SPEC.store_atomic::<VM, u8>(object, 1, None, ordering);
}

#[allow(unused)]
pub fn unset_alloc_bit(object: ObjectReference) {
alloc_bit::unset_alloc_bit(object);
pub fn unset_alloc_bit<VM: VMBinding>(object: ObjectReference) {
alloc_bit::unset_alloc_bit::<VM>(object);
}

#[allow(unused)]
Expand All @@ -235,20 +240,23 @@ pub(super) fn set_chunk_mark(chunk_start: Address) {
ACTIVE_CHUNK_METADATA_SPEC.store_atomic::<u8>(chunk_start, 1, Ordering::SeqCst);
}

/// Is this allocation an offset malloc? The argument address should be the allocation address (object start)
pub(super) fn is_offset_malloc(address: Address) -> bool {
unsafe { OFFSET_MALLOC_METADATA_SPEC.load::<u8>(address) == 1 }
}

/// Set the offset bit for the allocation. The argument address should be the allocation address (object start)
pub(super) fn set_offset_malloc_bit(address: Address) {
OFFSET_MALLOC_METADATA_SPEC.store_atomic::<u8>(address, 1, Ordering::SeqCst);
}

/// Unset the offset bit for the allocation. The argument address should be the allocation address (object start)
pub(super) unsafe fn unset_offset_malloc_bit_unsafe(address: Address) {
OFFSET_MALLOC_METADATA_SPEC.store::<u8>(address, 0);
}

pub unsafe fn unset_alloc_bit_unsafe(object: ObjectReference) {
alloc_bit::unset_alloc_bit_unsafe(object);
pub unsafe fn unset_alloc_bit_unsafe<VM: VMBinding>(object: ObjectReference) {
alloc_bit::unset_alloc_bit_unsafe::<VM>(object);
}

#[allow(unused)]
Expand Down
Loading

0 comments on commit 40e3e2b

Please sign in to comment.