Skip to content

Commit

Permalink
Make align_of behave like min_align_of.
Browse files Browse the repository at this point in the history
This removes a footgun, since it is a reasonable assumption to make that
pointers to `T` will be aligned to `align_of::<T>()`. This also matches
the behaviour of C/C++. `min_align_of` is now deprecated.

Closes #21611.
  • Loading branch information
huonw committed May 20, 2015
1 parent b301e02 commit c168494
Show file tree
Hide file tree
Showing 8 changed files with 68 additions and 56 deletions.
6 changes: 3 additions & 3 deletions src/liballoc/arc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ use core::atomic;
use core::atomic::Ordering::{Relaxed, Release, Acquire, SeqCst};
use core::fmt;
use core::cmp::Ordering;
use core::mem::{min_align_of_val, size_of_val};
use core::mem::{align_of_val, size_of_val};
use core::intrinsics::drop_in_place;
use core::mem;
use core::nonzero::NonZero;
Expand Down Expand Up @@ -230,7 +230,7 @@ impl<T: ?Sized> Arc<T> {

if self.inner().weak.fetch_sub(1, Release) == 1 {
atomic::fence(Acquire);
deallocate(ptr as *mut u8, size_of_val(&*ptr), min_align_of_val(&*ptr))
deallocate(ptr as *mut u8, size_of_val(&*ptr), align_of_val(&*ptr))
}
}
}
Expand Down Expand Up @@ -533,7 +533,7 @@ impl<T: ?Sized> Drop for Weak<T> {
atomic::fence(Acquire);
unsafe { deallocate(ptr as *mut u8,
size_of_val(&*ptr),
min_align_of_val(&*ptr)) }
align_of_val(&*ptr)) }
}
}
}
Expand Down
14 changes: 7 additions & 7 deletions src/liballoc/rc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,7 @@ use core::default::Default;
use core::fmt;
use core::hash::{Hasher, Hash};
use core::marker::{self, Sized};
use core::mem::{self, min_align_of, size_of, forget};
use core::mem::{self, align_of, size_of, forget};
use core::nonzero::NonZero;
use core::ops::{Deref, Drop};
use core::option::Option;
Expand All @@ -175,7 +175,7 @@ use core::intrinsics::drop_in_place;
#[cfg(not(stage0))]
use core::marker::Unsize;
#[cfg(not(stage0))]
use core::mem::{min_align_of_val, size_of_val};
use core::mem::{align_of_val, size_of_val};
#[cfg(not(stage0))]
use core::ops::CoerceUnsized;

Expand Down Expand Up @@ -369,7 +369,7 @@ pub fn try_unwrap<T>(rc: Rc<T>) -> Result<T, Rc<T>> {
// destruct the box and skip our Drop
// we can ignore the refcounts because we know we're unique
deallocate(*rc._ptr as *mut u8, size_of::<RcBox<T>>(),
min_align_of::<RcBox<T>>());
align_of::<RcBox<T>>());
forget(rc);
Ok(val)
}
Expand Down Expand Up @@ -502,7 +502,7 @@ impl<T> Drop for Rc<T> {

if self.weak() == 0 {
deallocate(ptr as *mut u8, size_of::<RcBox<T>>(),
min_align_of::<RcBox<T>>())
align_of::<RcBox<T>>())
}
}
}
Expand Down Expand Up @@ -556,7 +556,7 @@ impl<T: ?Sized> Drop for Rc<T> {
if self.weak() == 0 {
deallocate(ptr as *mut u8,
size_of_val(&*ptr),
min_align_of_val(&*ptr))
align_of_val(&*ptr))
}
}
}
Expand Down Expand Up @@ -1011,7 +1011,7 @@ impl<T> Drop for Weak<T> {
// the strong pointers have disappeared.
if self.weak() == 0 {
deallocate(ptr as *mut u8, size_of::<RcBox<T>>(),
min_align_of::<RcBox<T>>())
align_of::<RcBox<T>>())
}
}
}
Expand Down Expand Up @@ -1057,7 +1057,7 @@ impl<T: ?Sized> Drop for Weak<T> {
// the strong pointers have disappeared.
if self.weak() == 0 {
deallocate(ptr as *mut u8, size_of_val(&*ptr),
min_align_of_val(&*ptr))
align_of_val(&*ptr))
}
}
}
Expand Down
12 changes: 6 additions & 6 deletions src/libarena/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -241,7 +241,7 @@ impl<'longer_than_self> Arena<'longer_than_self> {
fn alloc_copy<T, F>(&self, op: F) -> &mut T where F: FnOnce() -> T {
unsafe {
let ptr = self.alloc_copy_inner(mem::size_of::<T>(),
mem::min_align_of::<T>());
mem::align_of::<T>());
let ptr = ptr as *mut T;
ptr::write(&mut (*ptr), op());
return &mut *ptr;
Expand Down Expand Up @@ -297,7 +297,7 @@ impl<'longer_than_self> Arena<'longer_than_self> {
let tydesc = get_tydesc::<T>();
let (ty_ptr, ptr) =
self.alloc_noncopy_inner(mem::size_of::<T>(),
mem::min_align_of::<T>());
mem::align_of::<T>());
let ty_ptr = ty_ptr as *mut usize;
let ptr = ptr as *mut T;
// Write in our tydesc along with a bit indicating that it
Expand Down Expand Up @@ -390,7 +390,7 @@ struct TypedArenaChunk<T> {

fn calculate_size<T>(capacity: usize) -> usize {
let mut size = mem::size_of::<TypedArenaChunk<T>>();
size = round_up(size, mem::min_align_of::<T>());
size = round_up(size, mem::align_of::<T>());
let elem_size = mem::size_of::<T>();
let elems_size = elem_size.checked_mul(capacity).unwrap();
size = size.checked_add(elems_size).unwrap();
Expand All @@ -402,7 +402,7 @@ impl<T> TypedArenaChunk<T> {
unsafe fn new(next: *mut TypedArenaChunk<T>, capacity: usize)
-> *mut TypedArenaChunk<T> {
let size = calculate_size::<T>(capacity);
let chunk = allocate(size, mem::min_align_of::<TypedArenaChunk<T>>())
let chunk = allocate(size, mem::align_of::<TypedArenaChunk<T>>())
as *mut TypedArenaChunk<T>;
if chunk.is_null() { alloc::oom() }
(*chunk).next = next;
Expand All @@ -428,7 +428,7 @@ impl<T> TypedArenaChunk<T> {
let size = calculate_size::<T>(self.capacity);
let self_ptr: *mut TypedArenaChunk<T> = self;
deallocate(self_ptr as *mut u8, size,
mem::min_align_of::<TypedArenaChunk<T>>());
mem::align_of::<TypedArenaChunk<T>>());
if !next.is_null() {
let capacity = (*next).capacity;
(*next).destroy(capacity);
Expand All @@ -441,7 +441,7 @@ impl<T> TypedArenaChunk<T> {
let this: *const TypedArenaChunk<T> = self;
unsafe {
mem::transmute(round_up(this.offset(1) as usize,
mem::min_align_of::<T>()))
mem::align_of::<T>()))
}
}

Expand Down
10 changes: 5 additions & 5 deletions src/libcollections/btree/node.rs
Original file line number Diff line number Diff line change
Expand Up @@ -164,12 +164,12 @@ fn test_offset_calculation() {
}

fn calculate_allocation_generic<K, V>(capacity: usize, is_leaf: bool) -> (usize, usize) {
let (keys_size, keys_align) = (capacity * mem::size_of::<K>(), mem::min_align_of::<K>());
let (vals_size, vals_align) = (capacity * mem::size_of::<V>(), mem::min_align_of::<V>());
let (keys_size, keys_align) = (capacity * mem::size_of::<K>(), mem::align_of::<K>());
let (vals_size, vals_align) = (capacity * mem::size_of::<V>(), mem::align_of::<V>());
let (edges_size, edges_align) = if is_leaf {
(0, 1)
} else {
((capacity + 1) * mem::size_of::<Node<K, V>>(), mem::min_align_of::<Node<K, V>>())
((capacity + 1) * mem::size_of::<Node<K, V>>(), mem::align_of::<Node<K, V>>())
};

calculate_allocation(
Expand All @@ -182,11 +182,11 @@ fn calculate_allocation_generic<K, V>(capacity: usize, is_leaf: bool) -> (usize,
fn calculate_offsets_generic<K, V>(capacity: usize, is_leaf: bool) -> (usize, usize) {
let keys_size = capacity * mem::size_of::<K>();
let vals_size = capacity * mem::size_of::<V>();
let vals_align = mem::min_align_of::<V>();
let vals_align = mem::align_of::<V>();
let edges_align = if is_leaf {
1
} else {
mem::min_align_of::<Node<K, V>>()
mem::align_of::<Node<K, V>>()
};

calculate_offsets(
Expand Down
14 changes: 7 additions & 7 deletions src/libcollections/vec.rs
Original file line number Diff line number Diff line change
Expand Up @@ -221,7 +221,7 @@ impl<T> Vec<T> {
} else {
let size = capacity.checked_mul(mem::size_of::<T>())
.expect("capacity overflow");
let ptr = unsafe { allocate(size, mem::min_align_of::<T>()) };
let ptr = unsafe { allocate(size, mem::align_of::<T>()) };
if ptr.is_null() { ::alloc::oom() }
unsafe { Vec::from_raw_parts(ptr as *mut T, 0, capacity) }
}
Expand Down Expand Up @@ -394,7 +394,7 @@ impl<T> Vec<T> {
let ptr = reallocate(*self.ptr as *mut u8,
self.cap * mem::size_of::<T>(),
self.len * mem::size_of::<T>(),
mem::min_align_of::<T>()) as *mut T;
mem::align_of::<T>()) as *mut T;
if ptr.is_null() { ::alloc::oom() }
self.ptr = Unique::new(ptr);
}
Expand Down Expand Up @@ -865,9 +865,9 @@ impl<T> Vec<T> {
// FIXME: Assert statically that the types `T` and `U` have the
// same minimal alignment in case they are not zero-sized.

// These asserts are necessary because the `min_align_of` of the
// These asserts are necessary because the `align_of` of the
// types are passed to the allocator by `Vec`.
assert!(mem::min_align_of::<T>() == mem::min_align_of::<U>());
assert!(mem::align_of::<T>() == mem::align_of::<U>());

// This `as isize` cast is safe, because the size of the elements of the
// vector is not 0, and:
Expand Down Expand Up @@ -1268,9 +1268,9 @@ impl<T> Vec<T> {
#[inline(never)]
unsafe fn alloc_or_realloc<T>(ptr: *mut T, old_size: usize, size: usize) -> *mut T {
if old_size == 0 {
allocate(size, mem::min_align_of::<T>()) as *mut T
allocate(size, mem::align_of::<T>()) as *mut T
} else {
reallocate(ptr as *mut u8, old_size, size, mem::min_align_of::<T>()) as *mut T
reallocate(ptr as *mut u8, old_size, size, mem::align_of::<T>()) as *mut T
}
}

Expand All @@ -1279,7 +1279,7 @@ unsafe fn dealloc<T>(ptr: *mut T, len: usize) {
if mem::size_of::<T>() != 0 {
deallocate(ptr as *mut u8,
len * mem::size_of::<T>(),
mem::min_align_of::<T>())
mem::align_of::<T>())
}
}

Expand Down
8 changes: 4 additions & 4 deletions src/libcollections/vec_deque.rs
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ impl<T> Drop for VecDeque<T> {
if mem::size_of::<T>() != 0 {
heap::deallocate(*self.ptr as *mut u8,
self.cap * mem::size_of::<T>(),
mem::min_align_of::<T>())
mem::align_of::<T>())
}
}
}
Expand Down Expand Up @@ -172,7 +172,7 @@ impl<T> VecDeque<T> {

let ptr = unsafe {
if mem::size_of::<T>() != 0 {
let ptr = heap::allocate(size, mem::min_align_of::<T>()) as *mut T;;
let ptr = heap::allocate(size, mem::align_of::<T>()) as *mut T;;
if ptr.is_null() { ::alloc::oom() }
Unique::new(ptr)
} else {
Expand Down Expand Up @@ -344,7 +344,7 @@ impl<T> VecDeque<T> {
let ptr = heap::reallocate(*self.ptr as *mut u8,
old,
new,
mem::min_align_of::<T>()) as *mut T;
mem::align_of::<T>()) as *mut T;
if ptr.is_null() { ::alloc::oom() }
self.ptr = Unique::new(ptr);
}
Expand Down Expand Up @@ -464,7 +464,7 @@ impl<T> VecDeque<T> {
let ptr = heap::reallocate(*self.ptr as *mut u8,
old,
new_size,
mem::min_align_of::<T>()) as *mut T;
mem::align_of::<T>()) as *mut T;
if ptr.is_null() { ::alloc::oom() }
self.ptr = Unique::new(ptr);
}
Expand Down
38 changes: 25 additions & 13 deletions src/libcore/mem.rs
Original file line number Diff line number Diff line change
Expand Up @@ -131,6 +131,7 @@ pub fn size_of_val<T>(_val: &T) -> usize {
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
#[deprecated(reason = "use `align_of` instead", since = "1.1.0")]
pub fn min_align_of<T>() -> usize {
unsafe { intrinsics::min_align_of::<T>() }
}
Expand All @@ -147,6 +148,7 @@ pub fn min_align_of<T>() -> usize {
#[cfg(not(stage0))]
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
#[deprecated(reason = "use `align_of_val` instead", since = "1.1.0")]
pub fn min_align_of_val<T: ?Sized>(val: &T) -> usize {
unsafe { intrinsics::min_align_of_val(val) }
}
Expand All @@ -163,44 +165,54 @@ pub fn min_align_of_val<T: ?Sized>(val: &T) -> usize {
#[cfg(stage0)]
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
#[deprecated(reason = "use `align_of_val` instead", since = "1.1.0")]
pub fn min_align_of_val<T>(_val: &T) -> usize {
min_align_of::<T>()
}

/// Returns the alignment in memory for a type.
/// Returns the ABI-required minimum alignment of a type
///
/// This function will return the alignment, in bytes, of a type in memory. If the alignment
/// returned is adhered to, then the type is guaranteed to function properly.
/// This is the alignment used for struct fields. It may be smaller than the preferred alignment.
///
/// # Examples
///
/// ```
/// use std::mem;
///
/// assert_eq!(4, mem::align_of::<i32>());
/// assert_eq!(4, mem::min_align_of::<i32>());
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn align_of<T>() -> usize {
// We use the preferred alignment as the default alignment for a type. This
// appears to be what clang migrated towards as well:
//
// http://lists.cs.uiuc.edu/pipermail/cfe-commits/Week-of-Mon-20110725/044411.html
unsafe { intrinsics::pref_align_of::<T>() }
unsafe { intrinsics::min_align_of::<T>() }
}

/// Returns the alignment of the type of the value that `_val` points to.
/// Returns the ABI-required minimum alignment of the type of the value that `val` points to
///
/// This is similar to `align_of`, but function will properly handle types such as trait objects
/// (in the future), returning the alignment for an arbitrary value at runtime.
/// # Examples
///
/// ```
/// use std::mem;
///
/// assert_eq!(4, mem::min_align_of_val(&5i32));
/// ```
#[cfg(not(stage0))]
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn align_of_val<T: ?Sized>(val: &T) -> usize {
unsafe { intrinsics::min_align_of_val(val) }
}

/// Returns the ABI-required minimum alignment of the type of the value that `_val` points to
///
/// # Examples
///
/// ```
/// use std::mem;
///
/// assert_eq!(4, mem::align_of_val(&5i32));
/// assert_eq!(4, mem::min_align_of_val(&5i32));
/// ```
#[cfg(stage0)]
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn align_of_val<T>(_val: &T) -> usize {
Expand Down
Loading

0 comments on commit c168494

Please sign in to comment.