Skip to content

Commit

Permalink
Remove From<Address> and Into<Address> for Region (#695)
Browse files Browse the repository at this point in the history
This PR addresses the issues we discussed in #643 (comment). Basically, Rust expects `From<T>` to always succeed, and Rust suggests using `TryFrom<T>` if the conversion may fail. For us, turning an address into a region may fail. So we should not use `From<T>`. And we do not need the error handling with `TryFrom<T>`. Thus we just provide two methods: `Region::from_unaligned_address()` and `Region::from_aligned_address()`.
  • Loading branch information
qinsoon authored Nov 11, 2022
1 parent 0614487 commit c453a1c
Show file tree
Hide file tree
Showing 5 changed files with 61 additions and 69 deletions.
30 changes: 13 additions & 17 deletions src/policy/immix/block.rs
Original file line number Diff line number Diff line change
Expand Up @@ -64,32 +64,28 @@ impl BlockState {
}

/// Data structure to reference an immix block.
#[repr(C)]
#[repr(transparent)]
#[derive(Debug, Clone, Copy, PartialOrd, PartialEq)]
pub struct Block(Address);

impl From<Address> for Block {
impl Region for Block {
#[cfg(not(feature = "immix_smaller_block"))]
const LOG_BYTES: usize = 15;
#[cfg(feature = "immix_smaller_block")]
const LOG_BYTES: usize = 13;

#[inline(always)]
fn from(address: Address) -> Block {
fn from_aligned_address(address: Address) -> Self {
debug_assert!(address.is_aligned_to(Self::BYTES));
Self(address)
}
}

impl From<Block> for Address {
#[inline(always)]
fn from(block: Block) -> Address {
block.0
fn start(&self) -> Address {
self.0
}
}

impl Region for Block {
#[cfg(not(feature = "immix_smaller_block"))]
const LOG_BYTES: usize = 15;
#[cfg(feature = "immix_smaller_block")]
const LOG_BYTES: usize = 13;
}

impl Block {
/// Log pages in block
pub const LOG_PAGES: usize = Self::LOG_BYTES - LOG_BYTES_IN_PAGE as usize;
Expand All @@ -111,7 +107,7 @@ impl Block {
/// Get the chunk containing the block.
#[inline(always)]
pub fn chunk(&self) -> Chunk {
Chunk::from(Chunk::align(self.0))
Chunk::from_unaligned_address(self.0)
}

/// Get the address range of the block's line mark table.
Expand Down Expand Up @@ -190,12 +186,12 @@ impl Block {

#[inline(always)]
pub fn start_line(&self) -> Line {
Line::from(self.start())
Line::from_aligned_address(self.start())
}

#[inline(always)]
pub fn end_line(&self) -> Line {
Line::from(self.end())
Line::from_aligned_address(self.end())
}

/// Get the range of lines within the block.
Expand Down
22 changes: 9 additions & 13 deletions src/policy/immix/chunk.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,29 +13,25 @@ use spin::Mutex;
use std::{ops::Range, sync::atomic::Ordering};

/// Data structure to reference a MMTk 4 MB chunk.
#[repr(C)]
#[repr(transparent)]
#[derive(Debug, Clone, Copy, PartialOrd, PartialEq, Eq)]
pub struct Chunk(Address);

impl From<Address> for Chunk {
impl Region for Chunk {
const LOG_BYTES: usize = LOG_BYTES_IN_CHUNK;

#[inline(always)]
fn from(address: Address) -> Chunk {
fn from_aligned_address(address: Address) -> Self {
debug_assert!(address.is_aligned_to(Self::BYTES));
Self(address)
}
}

impl From<Chunk> for Address {
#[inline(always)]
fn from(chunk: Chunk) -> Address {
chunk.0
fn start(&self) -> Address {
self.0
}
}

impl Region for Chunk {
const LOG_BYTES: usize = LOG_BYTES_IN_CHUNK;
}

impl Chunk {
/// Chunk constant with zero address
const ZERO: Self = Self(Address::ZERO);
Expand All @@ -47,8 +43,8 @@ impl Chunk {
/// Get a range of blocks within this chunk.
#[inline(always)]
pub fn blocks(&self) -> RegionIterator<Block> {
let start = Block::from(Block::align(self.0));
let end = Block::from(start.start() + (Self::BLOCKS << Block::LOG_BYTES));
let start = Block::from_unaligned_address(self.0);
let end = Block::from_aligned_address(start.start() + (Self::BLOCKS << Block::LOG_BYTES));
RegionIterator::<Block>::new(start, end)
}

Expand Down
2 changes: 1 addition & 1 deletion src/policy/immix/immixspace.rs
Original file line number Diff line number Diff line change
Expand Up @@ -344,7 +344,7 @@ impl<VM: VMBinding> ImmixSpace<VM> {
return None;
}
self.defrag.notify_new_clean_block(copy);
let block = Block::from(block_address);
let block = Block::from_aligned_address(block_address);
block.init(copy);
self.chunk_map.set(block.chunk(), ChunkState::Allocated);
Some(block)
Expand Down
26 changes: 11 additions & 15 deletions src/policy/immix/line.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,31 +7,27 @@ use crate::{
};

/// Data structure to reference a line within an immix block.
#[repr(C)]
#[repr(transparent)]
#[derive(Debug, Clone, Copy, PartialOrd, PartialEq, Eq)]
pub struct Line(Address);

impl From<Address> for Line {
#[allow(clippy::assertions_on_constants)]
impl Region for Line {
const LOG_BYTES: usize = 8;

#[inline(always)]
fn from(address: Address) -> Line {
#[allow(clippy::assertions_on_constants)] // make sure line is not used when BLOCK_ONLY is turned on.
fn from_aligned_address(address: Address) -> Self {
debug_assert!(!super::BLOCK_ONLY);
debug_assert!(address.is_aligned_to(Self::BYTES));
Self(address)
}
}

impl From<Line> for Address {
#[inline(always)]
fn from(line: Line) -> Address {
line.0
fn start(&self) -> Address {
self.0
}
}

impl Region for Line {
const LOG_BYTES: usize = 8;
}

#[allow(clippy::assertions_on_constants)]
impl Line {
pub const RESET_MARK_STATE: u8 = 1;
Expand All @@ -45,7 +41,7 @@ impl Line {
#[inline(always)]
pub fn block(&self) -> Block {
debug_assert!(!super::BLOCK_ONLY);
Block::from(Block::align(self.0))
Block::from_unaligned_address(self.0)
}

/// Get line index within its containing block.
Expand Down Expand Up @@ -77,8 +73,8 @@ impl Line {
debug_assert!(!super::BLOCK_ONLY);
let start = VM::VMObjectModel::object_start_ref(object);
let end = start + VM::VMObjectModel::get_current_size(object);
let start_line = Line::from(Line::align(start));
let mut end_line = Line::from(Line::align(end));
let start_line = Line::from_unaligned_address(start);
let mut end_line = Line::from_unaligned_address(end);
if !Line::is_aligned(end) {
end_line = end_line.next();
}
Expand Down
50 changes: 27 additions & 23 deletions src/util/linear_scan.rs
Original file line number Diff line number Diff line change
Expand Up @@ -76,10 +76,22 @@ impl<VM: VMBinding> LinearScanObjectSize for DefaultObjectSize<VM> {

/// Region represents a memory region with a properly aligned address as its start and a fixed size for the region.
/// Region provides a set of utility methods, along with a RegionIterator that linearly scans at the step of a region.
pub trait Region: Copy + PartialEq + PartialOrd + From<Address> + Into<Address> {
pub trait Region: Copy + PartialEq + PartialOrd {
const LOG_BYTES: usize;
const BYTES: usize = 1 << Self::LOG_BYTES;

/// Create a region from an address that is aligned to the region boundary. The method should panic if the address
/// is not properly aligned to the region. For performance, this method should always be inlined.
fn from_aligned_address(address: Address) -> Self;
/// Return the start address of the region. For performance, this method should always be inlined.
fn start(&self) -> Address;

/// Create a region from an arbitrary address.
#[inline(always)]
fn from_unaligned_address(address: Address) -> Self {
Self::from_aligned_address(Self::align(address))
}

/// Align the address to the region.
#[inline(always)]
fn align(address: Address) -> Address {
Expand All @@ -90,11 +102,7 @@ pub trait Region: Copy + PartialEq + PartialOrd + From<Address> + Into<Address>
fn is_aligned(address: Address) -> bool {
address.is_aligned_to(Self::BYTES)
}
/// Return the start address of the region.
#[inline(always)]
fn start(&self) -> Address {
(*self).into()
}

/// Return the end address of the region. Note that the end address is not in the region.
#[inline(always)]
fn end(&self) -> Address {
Expand All @@ -109,12 +117,12 @@ pub trait Region: Copy + PartialEq + PartialOrd + From<Address> + Into<Address>
#[inline(always)]
fn next_nth(&self, n: usize) -> Self {
debug_assert!(self.start().as_usize() < usize::MAX - (n << Self::LOG_BYTES));
Self::from(self.start() + (n << Self::LOG_BYTES))
Self::from_aligned_address(self.start() + (n << Self::LOG_BYTES))
}
/// Return the region that contains the object (by its cell address).
#[inline(always)]
fn containing<VM: VMBinding>(object: ObjectReference) -> Self {
Self::from(VM::VMObjectModel::ref_to_address(object).align_down(Self::BYTES))
Self::from_unaligned_address(VM::VMObjectModel::ref_to_address(object))
}
}

Expand Down Expand Up @@ -156,25 +164,21 @@ mod tests {
#[derive(Copy, Clone, Debug, PartialEq, PartialOrd)]
struct Page(Address);

impl From<Address> for Page {
impl Region for Page {
const LOG_BYTES: usize = LOG_BYTES_IN_PAGE as usize;

#[inline(always)]
fn from(address: Address) -> Page {
fn from_aligned_address(address: Address) -> Self {
debug_assert!(address.is_aligned_to(Self::BYTES));
Self(address)
}
}

impl From<Page> for Address {
#[inline(always)]
fn from(page: Page) -> Address {
page.0
fn start(&self) -> Address {
self.0
}
}

impl Region for Page {
const LOG_BYTES: usize = LOG_BYTES_IN_PAGE as usize;
}

#[test]
fn test_region_methods() {
let addr4k = unsafe { Address::from_usize(PAGE_SIZE) };
Expand All @@ -186,7 +190,7 @@ mod tests {
debug_assert!(Page::is_aligned(addr4k));
debug_assert!(!Page::is_aligned(addr4k1));

let page = Page::from(addr4k);
let page = Page::from_aligned_address(addr4k);
// start/end
debug_assert_eq!(page.start(), addr4k);
debug_assert_eq!(page.end(), addr4k + PAGE_SIZE);
Expand All @@ -199,7 +203,7 @@ mod tests {
#[test]
fn test_region_iterator_normal() {
let addr4k = unsafe { Address::from_usize(PAGE_SIZE) };
let page = Page::from(addr4k);
let page = Page::from_aligned_address(addr4k);
let end_page = page.next_nth(5);

let mut results = vec![];
Expand All @@ -222,7 +226,7 @@ mod tests {
#[test]
fn test_region_iterator_same_start_end() {
let addr4k = unsafe { Address::from_usize(PAGE_SIZE) };
let page = Page::from(addr4k);
let page = Page::from_aligned_address(addr4k);

let mut results = vec![];
let iter = RegionIterator::new(page, page);
Expand All @@ -235,8 +239,8 @@ mod tests {
#[test]
fn test_region_iterator_smaller_end() {
let addr4k = unsafe { Address::from_usize(PAGE_SIZE) };
let page = Page::from(addr4k);
let end = Page::from(Address::ZERO);
let page = Page::from_aligned_address(addr4k);
let end = Page::from_aligned_address(Address::ZERO);

let mut results = vec![];
let iter = RegionIterator::new(page, end);
Expand Down

0 comments on commit c453a1c

Please sign in to comment.