Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add debug assertions to some unsafe functions #92686

Merged
merged 1 commit into from
Apr 3, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions compiler/rustc_data_structures/src/map_in_place.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,13 +30,13 @@ impl<T> MapInPlace<T> for Vec<T> {
while read_i < old_len {
// move the read_i'th item out of the vector and map it
// to an iterator
let e = ptr::read(self.get_unchecked(read_i));
let e = ptr::read(self.as_ptr().add(read_i));
let iter = f(e).into_iter();
read_i += 1;

for e in iter {
if write_i < read_i {
ptr::write(self.get_unchecked_mut(write_i), e);
ptr::write(self.as_mut_ptr().add(write_i), e);
write_i += 1;
} else {
// If this is reached we ran out of space
Expand Down Expand Up @@ -76,13 +76,13 @@ impl<T, A: Array<Item = T>> MapInPlace<T> for SmallVec<A> {
while read_i < old_len {
// move the read_i'th item out of the vector and map it
// to an iterator
let e = ptr::read(self.get_unchecked(read_i));
let e = ptr::read(self.as_ptr().add(read_i));
let iter = f(e).into_iter();
read_i += 1;

for e in iter {
if write_i < read_i {
ptr::write(self.get_unchecked_mut(write_i), e);
ptr::write(self.as_mut_ptr().add(write_i), e);
write_i += 1;
} else {
// If this is reached we ran out of space
Expand Down
4 changes: 2 additions & 2 deletions library/alloc/benches/vec.rs
Original file line number Diff line number Diff line change
Expand Up @@ -627,10 +627,10 @@ fn bench_map_regular(b: &mut Bencher) {
fn bench_map_fast(b: &mut Bencher) {
let data = black_box([(0, 0); LEN]);
b.iter(|| {
let mut result = Vec::with_capacity(data.len());
let mut result: Vec<u32> = Vec::with_capacity(data.len());
for i in 0..data.len() {
unsafe {
*result.get_unchecked_mut(i) = data[i].0;
*result.as_mut_ptr().add(i) = data[i].0;
result.set_len(i);
}
}
Expand Down
101 changes: 48 additions & 53 deletions library/core/src/intrinsics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1969,6 +1969,40 @@ extern "rust-intrinsic" {
// (`transmute` also falls into this category, but it cannot be wrapped due to the
// check that `T` and `U` have the same size.)

/// Check that the preconditions of an unsafe function are followed, if debug_assertions are on,
/// and only at runtime.
///
/// # Safety
///
/// Invoking this macro is only sound if the following code is already UB when the passed
/// expression evaluates to false.
///
/// This macro expands to a check at runtime if debug_assertions is set. It has no effect at
/// compile time, but the semantics of the contained `const_eval_select` must be the same at
/// runtime and at compile time. Thus if the expression evaluates to false, this macro produces
/// different behavior at compile time and at runtime, and invoking it is incorrect.
///
/// So in a sense it is UB if this macro is useful, but we expect callers of `unsafe fn` to make
/// the occasional mistake, and this check should help them figure things out.
#[allow_internal_unstable(const_eval_select)] // permit this to be called in stably-const fn
macro_rules! assert_unsafe_precondition {
($e:expr) => {
if cfg!(debug_assertions) {
// Use a closure so that we can capture arbitrary expressions from the invocation
let runtime = || {
if !$e {
// abort instead of panicking to reduce impact on code size
::core::intrinsics::abort();
}
};
const fn comptime() {}

::core::intrinsics::const_eval_select((), comptime, runtime);
}
};
}
pub(crate) use assert_unsafe_precondition;

/// Checks whether `ptr` is properly aligned with respect to
/// `align_of::<T>()`.
pub(crate) fn is_aligned_and_not_null<T>(ptr: *const T) -> bool {
Expand All @@ -1977,7 +2011,6 @@ pub(crate) fn is_aligned_and_not_null<T>(ptr: *const T) -> bool {

/// Checks whether the regions of memory starting at `src` and `dst` of size
/// `count * size_of::<T>()` do *not* overlap.
#[cfg(debug_assertions)]
pub(crate) fn is_nonoverlapping<T>(src: *const T, dst: *const T, count: usize) -> bool {
let src_usize = src as usize;
let dst_usize = dst as usize;
Expand Down Expand Up @@ -2079,28 +2112,16 @@ pub const unsafe fn copy_nonoverlapping<T>(src: *const T, dst: *mut T, count: us
pub fn copy_nonoverlapping<T>(src: *const T, dst: *mut T, count: usize);
}

#[cfg(debug_assertions)]
fn runtime_check<T>(src: *const T, dst: *mut T, count: usize) {
if !is_aligned_and_not_null(src)
|| !is_aligned_and_not_null(dst)
|| !is_nonoverlapping(src, dst, count)
{
// Not panicking to keep codegen impact smaller.
abort();
}
}
#[cfg(debug_assertions)]
const fn compiletime_check<T>(_src: *const T, _dst: *mut T, _count: usize) {}
#[cfg(debug_assertions)]
// SAFETY: As per our safety precondition, we may assume that the `abort` above is never reached.
// Therefore, compiletime_check and runtime_check are observably equivalent.
unsafe {
const_eval_select((src, dst, count), compiletime_check, runtime_check);
}

// SAFETY: the safety contract for `copy_nonoverlapping` must be
// upheld by the caller.
unsafe { copy_nonoverlapping(src, dst, count) }
unsafe {
assert_unsafe_precondition!(
is_aligned_and_not_null(src)
&& is_aligned_and_not_null(dst)
&& is_nonoverlapping(src, dst, count)
);
copy_nonoverlapping(src, dst, count)
}
}

/// Copies `count * size_of::<T>()` bytes from `src` to `dst`. The source
Expand Down Expand Up @@ -2173,24 +2194,11 @@ pub const unsafe fn copy<T>(src: *const T, dst: *mut T, count: usize) {
fn copy<T>(src: *const T, dst: *mut T, count: usize);
}

#[cfg(debug_assertions)]
fn runtime_check<T>(src: *const T, dst: *mut T) {
if !is_aligned_and_not_null(src) || !is_aligned_and_not_null(dst) {
// Not panicking to keep codegen impact smaller.
abort();
}
}
#[cfg(debug_assertions)]
const fn compiletime_check<T>(_src: *const T, _dst: *mut T) {}
#[cfg(debug_assertions)]
// SAFETY: As per our safety precondition, we may assume that the `abort` above is never reached.
// Therefore, compiletime_check and runtime_check are observably equivalent.
// SAFETY: the safety contract for `copy` must be upheld by the caller.
unsafe {
const_eval_select((src, dst), compiletime_check, runtime_check);
assert_unsafe_precondition!(is_aligned_and_not_null(src) && is_aligned_and_not_null(dst));
copy(src, dst, count)
}

// SAFETY: the safety contract for `copy` must be upheld by the caller.
unsafe { copy(src, dst, count) }
}

/// Sets `count * size_of::<T>()` bytes of memory starting at `dst` to
Expand Down Expand Up @@ -2274,24 +2282,11 @@ pub const unsafe fn write_bytes<T>(dst: *mut T, val: u8, count: usize) {
fn write_bytes<T>(dst: *mut T, val: u8, count: usize);
}

#[cfg(debug_assertions)]
fn runtime_check<T>(ptr: *mut T) {
debug_assert!(
is_aligned_and_not_null(ptr),
"attempt to write to unaligned or null pointer"
);
}
#[cfg(debug_assertions)]
const fn compiletime_check<T>(_ptr: *mut T) {}
#[cfg(debug_assertions)]
// SAFETY: runtime debug-assertions are a best-effort basis; it's fine to
// not do them during compile time
// SAFETY: the safety contract for `write_bytes` must be upheld by the caller.
unsafe {
const_eval_select((dst,), compiletime_check, runtime_check);
assert_unsafe_precondition!(is_aligned_and_not_null(dst));
write_bytes(dst, val, count)
}

// SAFETY: the safety contract for `write_bytes` must be upheld by the caller.
unsafe { write_bytes(dst, val, count) }
}

/// Selects which function to call depending on the context.
Expand Down
6 changes: 5 additions & 1 deletion library/core/src/num/nonzero.rs
Original file line number Diff line number Diff line change
Expand Up @@ -52,9 +52,13 @@ macro_rules! nonzero_integers {
#[$const_new_unchecked_stability]
#[must_use]
#[inline]
#[rustc_allow_const_fn_unstable(const_fn_fn_ptr_basics)] // required by assert_unsafe_precondition
pub const unsafe fn new_unchecked(n: $Int) -> Self {
// SAFETY: this is guaranteed to be safe by the caller.
unsafe { Self(n) }
unsafe {
core::intrinsics::assert_unsafe_precondition!(n != 0);
Self(n)
}
}

/// Creates a non-zero if the given value is not zero.
Expand Down
30 changes: 20 additions & 10 deletions library/core/src/ptr/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,10 @@
use crate::cmp::Ordering;
use crate::fmt;
use crate::hash;
use crate::intrinsics::{self, abort, is_aligned_and_not_null};
use crate::intrinsics::{
self, assert_unsafe_precondition, is_aligned_and_not_null, is_nonoverlapping,
};

use crate::mem::{self, MaybeUninit};

#[stable(feature = "rust1", since = "1.0.0")]
Expand Down Expand Up @@ -438,6 +441,16 @@ pub const unsafe fn swap_nonoverlapping<T>(x: *mut T, y: *mut T, count: usize) {
};
}

// SAFETY: the caller must guarantee that `x` and `y` are
// valid for writes and properly aligned.
unsafe {
assert_unsafe_precondition!(
is_aligned_and_not_null(x)
&& is_aligned_and_not_null(y)
&& is_nonoverlapping(x, y, count)
);
}

// NOTE(scottmcm) MIRI is disabled here as reading in smaller units is a
// pessimization for it. Also, if the type contains any unaligned pointers,
// copying those over multiple reads is difficult to support.
Expand Down Expand Up @@ -528,6 +541,7 @@ pub const unsafe fn replace<T>(dst: *mut T, mut src: T) -> T {
// and cannot overlap `src` since `dst` must point to a distinct
// allocated object.
unsafe {
assert_unsafe_precondition!(is_aligned_and_not_null(dst));
mem::swap(&mut *dst, &mut src); // cannot overlap
}
src
Expand Down Expand Up @@ -1007,12 +1021,11 @@ pub const unsafe fn write_unaligned<T>(dst: *mut T, src: T) {
#[inline]
#[stable(feature = "volatile", since = "1.9.0")]
pub unsafe fn read_volatile<T>(src: *const T) -> T {
if cfg!(debug_assertions) && !is_aligned_and_not_null(src) {
// Not panicking to keep codegen impact smaller.
abort();
}
// SAFETY: the caller must uphold the safety contract for `volatile_load`.
unsafe { intrinsics::volatile_load(src) }
unsafe {
assert_unsafe_precondition!(is_aligned_and_not_null(src));
intrinsics::volatile_load(src)
}
}

/// Performs a volatile write of a memory location with the given value without
Expand Down Expand Up @@ -1078,12 +1091,9 @@ pub unsafe fn read_volatile<T>(src: *const T) -> T {
#[inline]
#[stable(feature = "volatile", since = "1.9.0")]
pub unsafe fn write_volatile<T>(dst: *mut T, src: T) {
if cfg!(debug_assertions) && !is_aligned_and_not_null(dst) {
// Not panicking to keep codegen impact smaller.
abort();
}
// SAFETY: the caller must uphold the safety contract for `volatile_store`.
unsafe {
assert_unsafe_precondition!(is_aligned_and_not_null(dst));
intrinsics::volatile_store(dst, src);
}
}
Expand Down
18 changes: 15 additions & 3 deletions library/core/src/slice/index.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
//! Indexing implementations for `[T]`.

use crate::intrinsics::assert_unsafe_precondition;
use crate::intrinsics::const_eval_select;
use crate::ops;
use crate::ptr;
Expand Down Expand Up @@ -219,13 +220,19 @@ unsafe impl<T> const SliceIndex<[T]> for usize {
// cannot be longer than `isize::MAX`. They also guarantee that
// `self` is in bounds of `slice` so `self` cannot overflow an `isize`,
// so the call to `add` is safe.
unsafe { slice.as_ptr().add(self) }
unsafe {
assert_unsafe_precondition!(self < slice.len());
slice.as_ptr().add(self)
}
}

#[inline]
unsafe fn get_unchecked_mut(self, slice: *mut [T]) -> *mut T {
// SAFETY: see comments for `get_unchecked` above.
unsafe { slice.as_mut_ptr().add(self) }
unsafe {
assert_unsafe_precondition!(self < slice.len());
slice.as_mut_ptr().add(self)
}
}

#[inline]
Expand Down Expand Up @@ -272,13 +279,18 @@ unsafe impl<T> const SliceIndex<[T]> for ops::Range<usize> {
// cannot be longer than `isize::MAX`. They also guarantee that
// `self` is in bounds of `slice` so `self` cannot overflow an `isize`,
// so the call to `add` is safe.
unsafe { ptr::slice_from_raw_parts(slice.as_ptr().add(self.start), self.end - self.start) }

unsafe {
assert_unsafe_precondition!(self.end >= self.start && self.end <= slice.len());
ptr::slice_from_raw_parts(slice.as_ptr().add(self.start), self.end - self.start)
}
}

#[inline]
unsafe fn get_unchecked_mut(self, slice: *mut [T]) -> *mut [T] {
// SAFETY: see comments for `get_unchecked` above.
unsafe {
assert_unsafe_precondition!(self.end >= self.start && self.end <= slice.len());
ptr::slice_from_raw_parts_mut(slice.as_mut_ptr().add(self.start), self.end - self.start)
}
}
Expand Down
33 changes: 16 additions & 17 deletions library/core/src/slice/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
#![stable(feature = "rust1", since = "1.0.0")]

use crate::cmp::Ordering::{self, Greater, Less};
use crate::intrinsics::{assert_unsafe_precondition, exact_div};
use crate::marker::Copy;
use crate::mem;
use crate::num::NonZeroUsize;
Expand Down Expand Up @@ -637,15 +638,10 @@ impl<T> [T] {
#[unstable(feature = "slice_swap_unchecked", issue = "88539")]
#[rustc_const_unstable(feature = "const_swap", issue = "83163")]
pub const unsafe fn swap_unchecked(&mut self, a: usize, b: usize) {
#[cfg(debug_assertions)]
{
let _ = &self[a];
let _ = &self[b];
}

let ptr = self.as_mut_ptr();
// SAFETY: caller has to guarantee that `a < self.len()` and `b < self.len()`
unsafe {
assert_unsafe_precondition!(a < self.len() && b < self.len());
ptr::swap(ptr.add(a), ptr.add(b));
}
}
Expand Down Expand Up @@ -950,11 +946,11 @@ impl<T> [T] {
#[unstable(feature = "slice_as_chunks", issue = "74985")]
#[inline]
pub unsafe fn as_chunks_unchecked<const N: usize>(&self) -> &[[T; N]] {
debug_assert_ne!(N, 0);
debug_assert_eq!(self.len() % N, 0);
let new_len =
// SAFETY: Our precondition is exactly what's needed to call this
unsafe { crate::intrinsics::exact_div(self.len(), N) };
// SAFETY: Caller must guarantee that `N` is nonzero and exactly divides the slice length
let new_len = unsafe {
assert_unsafe_precondition!(N != 0 && self.len() % N == 0);
exact_div(self.len(), N)
};
// SAFETY: We cast a slice of `new_len * N` elements into
// a slice of `new_len` many `N` elements chunks.
unsafe { from_raw_parts(self.as_ptr().cast(), new_len) }
Expand Down Expand Up @@ -1086,11 +1082,11 @@ impl<T> [T] {
#[unstable(feature = "slice_as_chunks", issue = "74985")]
#[inline]
pub unsafe fn as_chunks_unchecked_mut<const N: usize>(&mut self) -> &mut [[T; N]] {
debug_assert_ne!(N, 0);
debug_assert_eq!(self.len() % N, 0);
let new_len =
// SAFETY: Our precondition is exactly what's needed to call this
unsafe { crate::intrinsics::exact_div(self.len(), N) };
// SAFETY: Caller must guarantee that `N` is nonzero and exactly divides the slice length
let new_len = unsafe {
assert_unsafe_precondition!(N != 0 && self.len() % N == 0);
exact_div(self.len(), N)
};
// SAFETY: We cast a slice of `new_len * N` elements into
// a slice of `new_len` many `N` elements chunks.
unsafe { from_raw_parts_mut(self.as_mut_ptr().cast(), new_len) }
Expand Down Expand Up @@ -1646,7 +1642,10 @@ impl<T> [T] {
//
// `[ptr; mid]` and `[mid; len]` are not overlapping, so returning a mutable reference
// is fine.
unsafe { (from_raw_parts_mut(ptr, mid), from_raw_parts_mut(ptr.add(mid), len - mid)) }
unsafe {
assert_unsafe_precondition!(mid <= len);
(from_raw_parts_mut(ptr, mid), from_raw_parts_mut(ptr.add(mid), len - mid))
}
}

/// Divides one slice into an array and a remainder slice at an index.
Expand Down
Loading