Auto merge of #92686 - saethlin:unsafe-debug-asserts, r=Amanieu
Add debug assertions to some unsafe functions As suggested by https://github.com/rust-lang/rust/issues/51713 ~~Some similar code calls `abort()` instead of `panic!()` but aborting doesn't work in a `const fn`, and the intrinsic for doing dispatch based on whether execution is in a const is unstable.~~ This picked up some invalid uses of `get_unchecked` in the compiler, and fixes them. I can confirm that they do in fact pick up invalid uses of `get_unchecked` in the wild, though the user experience is less-than-awesome: ``` Running unittests (target/x86_64-unknown-linux-gnu/debug/deps/rle_decode_fast-04b7918da2001b50) running 6 tests error: test failed, to rerun pass '--lib' Caused by: process didn't exit successfully: `/home/ben/rle-decode-helper/target/x86_64-unknown-linux-gnu/debug/deps/rle_decode_fast-04b7918da2001b50` (signal: 4, SIGILL: illegal instruction) ``` ~~As best I can tell these changes produce a 6% regression in the runtime of `./x.py test` when `[rust] debug = true` is set.~~ Latest commit (https://github.com/rust-lang/rust/pull/92686/commits/6894d559bdb4365243b3f4bf73f18e4b1bed04d1) brings the additional overhead from this PR down to 0.5%, while also adding a few more assertions. I think this actually covers all the places in `core` that it is reasonable to check for safety requirements at runtime. Thoughts?
This commit is contained in:
commit
168a020900
8 changed files with 125 additions and 130 deletions
|
@ -30,13 +30,13 @@ impl<T> MapInPlace<T> for Vec<T> {
|
||||||
while read_i < old_len {
|
while read_i < old_len {
|
||||||
// move the read_i'th item out of the vector and map it
|
// move the read_i'th item out of the vector and map it
|
||||||
// to an iterator
|
// to an iterator
|
||||||
let e = ptr::read(self.get_unchecked(read_i));
|
let e = ptr::read(self.as_ptr().add(read_i));
|
||||||
let iter = f(e).into_iter();
|
let iter = f(e).into_iter();
|
||||||
read_i += 1;
|
read_i += 1;
|
||||||
|
|
||||||
for e in iter {
|
for e in iter {
|
||||||
if write_i < read_i {
|
if write_i < read_i {
|
||||||
ptr::write(self.get_unchecked_mut(write_i), e);
|
ptr::write(self.as_mut_ptr().add(write_i), e);
|
||||||
write_i += 1;
|
write_i += 1;
|
||||||
} else {
|
} else {
|
||||||
// If this is reached we ran out of space
|
// If this is reached we ran out of space
|
||||||
|
@ -76,13 +76,13 @@ impl<T, A: Array<Item = T>> MapInPlace<T> for SmallVec<A> {
|
||||||
while read_i < old_len {
|
while read_i < old_len {
|
||||||
// move the read_i'th item out of the vector and map it
|
// move the read_i'th item out of the vector and map it
|
||||||
// to an iterator
|
// to an iterator
|
||||||
let e = ptr::read(self.get_unchecked(read_i));
|
let e = ptr::read(self.as_ptr().add(read_i));
|
||||||
let iter = f(e).into_iter();
|
let iter = f(e).into_iter();
|
||||||
read_i += 1;
|
read_i += 1;
|
||||||
|
|
||||||
for e in iter {
|
for e in iter {
|
||||||
if write_i < read_i {
|
if write_i < read_i {
|
||||||
ptr::write(self.get_unchecked_mut(write_i), e);
|
ptr::write(self.as_mut_ptr().add(write_i), e);
|
||||||
write_i += 1;
|
write_i += 1;
|
||||||
} else {
|
} else {
|
||||||
// If this is reached we ran out of space
|
// If this is reached we ran out of space
|
||||||
|
|
|
@ -627,10 +627,10 @@ fn bench_map_regular(b: &mut Bencher) {
|
||||||
fn bench_map_fast(b: &mut Bencher) {
|
fn bench_map_fast(b: &mut Bencher) {
|
||||||
let data = black_box([(0, 0); LEN]);
|
let data = black_box([(0, 0); LEN]);
|
||||||
b.iter(|| {
|
b.iter(|| {
|
||||||
let mut result = Vec::with_capacity(data.len());
|
let mut result: Vec<u32> = Vec::with_capacity(data.len());
|
||||||
for i in 0..data.len() {
|
for i in 0..data.len() {
|
||||||
unsafe {
|
unsafe {
|
||||||
*result.get_unchecked_mut(i) = data[i].0;
|
*result.as_mut_ptr().add(i) = data[i].0;
|
||||||
result.set_len(i);
|
result.set_len(i);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1969,6 +1969,40 @@ extern "rust-intrinsic" {
|
||||||
// (`transmute` also falls into this category, but it cannot be wrapped due to the
|
// (`transmute` also falls into this category, but it cannot be wrapped due to the
|
||||||
// check that `T` and `U` have the same size.)
|
// check that `T` and `U` have the same size.)
|
||||||
|
|
||||||
|
/// Check that the preconditions of an unsafe function are followed, if debug_assertions are on,
|
||||||
|
/// and only at runtime.
|
||||||
|
///
|
||||||
|
/// # Safety
|
||||||
|
///
|
||||||
|
/// Invoking this macro is only sound if the following code is already UB when the passed
|
||||||
|
/// expression evaluates to false.
|
||||||
|
///
|
||||||
|
/// This macro expands to a check at runtime if debug_assertions is set. It has no effect at
|
||||||
|
/// compile time, but the semantics of the contained `const_eval_select` must be the same at
|
||||||
|
/// runtime and at compile time. Thus if the expression evaluates to false, this macro produces
|
||||||
|
/// different behavior at compile time and at runtime, and invoking it is incorrect.
|
||||||
|
///
|
||||||
|
/// So in a sense it is UB if this macro is useful, but we expect callers of `unsafe fn` to make
|
||||||
|
/// the occasional mistake, and this check should help them figure things out.
|
||||||
|
#[allow_internal_unstable(const_eval_select)] // permit this to be called in stably-const fn
|
||||||
|
macro_rules! assert_unsafe_precondition {
|
||||||
|
($e:expr) => {
|
||||||
|
if cfg!(debug_assertions) {
|
||||||
|
// Use a closure so that we can capture arbitrary expressions from the invocation
|
||||||
|
let runtime = || {
|
||||||
|
if !$e {
|
||||||
|
// abort instead of panicking to reduce impact on code size
|
||||||
|
::core::intrinsics::abort();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
const fn comptime() {}
|
||||||
|
|
||||||
|
::core::intrinsics::const_eval_select((), comptime, runtime);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
pub(crate) use assert_unsafe_precondition;
|
||||||
|
|
||||||
/// Checks whether `ptr` is properly aligned with respect to
|
/// Checks whether `ptr` is properly aligned with respect to
|
||||||
/// `align_of::<T>()`.
|
/// `align_of::<T>()`.
|
||||||
pub(crate) fn is_aligned_and_not_null<T>(ptr: *const T) -> bool {
|
pub(crate) fn is_aligned_and_not_null<T>(ptr: *const T) -> bool {
|
||||||
|
@ -1977,7 +2011,6 @@ pub(crate) fn is_aligned_and_not_null<T>(ptr: *const T) -> bool {
|
||||||
|
|
||||||
/// Checks whether the regions of memory starting at `src` and `dst` of size
|
/// Checks whether the regions of memory starting at `src` and `dst` of size
|
||||||
/// `count * size_of::<T>()` do *not* overlap.
|
/// `count * size_of::<T>()` do *not* overlap.
|
||||||
#[cfg(debug_assertions)]
|
|
||||||
pub(crate) fn is_nonoverlapping<T>(src: *const T, dst: *const T, count: usize) -> bool {
|
pub(crate) fn is_nonoverlapping<T>(src: *const T, dst: *const T, count: usize) -> bool {
|
||||||
let src_usize = src.addr();
|
let src_usize = src.addr();
|
||||||
let dst_usize = dst.addr();
|
let dst_usize = dst.addr();
|
||||||
|
@ -2079,28 +2112,16 @@ pub const unsafe fn copy_nonoverlapping<T>(src: *const T, dst: *mut T, count: us
|
||||||
pub fn copy_nonoverlapping<T>(src: *const T, dst: *mut T, count: usize);
|
pub fn copy_nonoverlapping<T>(src: *const T, dst: *mut T, count: usize);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(debug_assertions)]
|
|
||||||
fn runtime_check<T>(src: *const T, dst: *mut T, count: usize) {
|
|
||||||
if !is_aligned_and_not_null(src)
|
|
||||||
|| !is_aligned_and_not_null(dst)
|
|
||||||
|| !is_nonoverlapping(src, dst, count)
|
|
||||||
{
|
|
||||||
// Not panicking to keep codegen impact smaller.
|
|
||||||
abort();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
#[cfg(debug_assertions)]
|
|
||||||
const fn compiletime_check<T>(_src: *const T, _dst: *mut T, _count: usize) {}
|
|
||||||
#[cfg(debug_assertions)]
|
|
||||||
// SAFETY: As per our safety precondition, we may assume that the `abort` above is never reached.
|
|
||||||
// Therefore, compiletime_check and runtime_check are observably equivalent.
|
|
||||||
unsafe {
|
|
||||||
const_eval_select((src, dst, count), compiletime_check, runtime_check);
|
|
||||||
}
|
|
||||||
|
|
||||||
// SAFETY: the safety contract for `copy_nonoverlapping` must be
|
// SAFETY: the safety contract for `copy_nonoverlapping` must be
|
||||||
// upheld by the caller.
|
// upheld by the caller.
|
||||||
unsafe { copy_nonoverlapping(src, dst, count) }
|
unsafe {
|
||||||
|
assert_unsafe_precondition!(
|
||||||
|
is_aligned_and_not_null(src)
|
||||||
|
&& is_aligned_and_not_null(dst)
|
||||||
|
&& is_nonoverlapping(src, dst, count)
|
||||||
|
);
|
||||||
|
copy_nonoverlapping(src, dst, count)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Copies `count * size_of::<T>()` bytes from `src` to `dst`. The source
|
/// Copies `count * size_of::<T>()` bytes from `src` to `dst`. The source
|
||||||
|
@ -2173,24 +2194,11 @@ pub const unsafe fn copy<T>(src: *const T, dst: *mut T, count: usize) {
|
||||||
fn copy<T>(src: *const T, dst: *mut T, count: usize);
|
fn copy<T>(src: *const T, dst: *mut T, count: usize);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(debug_assertions)]
|
|
||||||
fn runtime_check<T>(src: *const T, dst: *mut T) {
|
|
||||||
if !is_aligned_and_not_null(src) || !is_aligned_and_not_null(dst) {
|
|
||||||
// Not panicking to keep codegen impact smaller.
|
|
||||||
abort();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
#[cfg(debug_assertions)]
|
|
||||||
const fn compiletime_check<T>(_src: *const T, _dst: *mut T) {}
|
|
||||||
#[cfg(debug_assertions)]
|
|
||||||
// SAFETY: As per our safety precondition, we may assume that the `abort` above is never reached.
|
|
||||||
// Therefore, compiletime_check and runtime_check are observably equivalent.
|
|
||||||
unsafe {
|
|
||||||
const_eval_select((src, dst), compiletime_check, runtime_check);
|
|
||||||
}
|
|
||||||
|
|
||||||
// SAFETY: the safety contract for `copy` must be upheld by the caller.
|
// SAFETY: the safety contract for `copy` must be upheld by the caller.
|
||||||
unsafe { copy(src, dst, count) }
|
unsafe {
|
||||||
|
assert_unsafe_precondition!(is_aligned_and_not_null(src) && is_aligned_and_not_null(dst));
|
||||||
|
copy(src, dst, count)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Sets `count * size_of::<T>()` bytes of memory starting at `dst` to
|
/// Sets `count * size_of::<T>()` bytes of memory starting at `dst` to
|
||||||
|
@ -2274,24 +2282,11 @@ pub const unsafe fn write_bytes<T>(dst: *mut T, val: u8, count: usize) {
|
||||||
fn write_bytes<T>(dst: *mut T, val: u8, count: usize);
|
fn write_bytes<T>(dst: *mut T, val: u8, count: usize);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(debug_assertions)]
|
|
||||||
fn runtime_check<T>(ptr: *mut T) {
|
|
||||||
debug_assert!(
|
|
||||||
is_aligned_and_not_null(ptr),
|
|
||||||
"attempt to write to unaligned or null pointer"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
#[cfg(debug_assertions)]
|
|
||||||
const fn compiletime_check<T>(_ptr: *mut T) {}
|
|
||||||
#[cfg(debug_assertions)]
|
|
||||||
// SAFETY: runtime debug-assertions are a best-effort basis; it's fine to
|
|
||||||
// not do them during compile time
|
|
||||||
unsafe {
|
|
||||||
const_eval_select((dst,), compiletime_check, runtime_check);
|
|
||||||
}
|
|
||||||
|
|
||||||
// SAFETY: the safety contract for `write_bytes` must be upheld by the caller.
|
// SAFETY: the safety contract for `write_bytes` must be upheld by the caller.
|
||||||
unsafe { write_bytes(dst, val, count) }
|
unsafe {
|
||||||
|
assert_unsafe_precondition!(is_aligned_and_not_null(dst));
|
||||||
|
write_bytes(dst, val, count)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Selects which function to call depending on the context.
|
/// Selects which function to call depending on the context.
|
||||||
|
|
|
@ -52,9 +52,13 @@ macro_rules! nonzero_integers {
|
||||||
#[$const_new_unchecked_stability]
|
#[$const_new_unchecked_stability]
|
||||||
#[must_use]
|
#[must_use]
|
||||||
#[inline]
|
#[inline]
|
||||||
|
#[rustc_allow_const_fn_unstable(const_fn_fn_ptr_basics)] // required by assert_unsafe_precondition
|
||||||
pub const unsafe fn new_unchecked(n: $Int) -> Self {
|
pub const unsafe fn new_unchecked(n: $Int) -> Self {
|
||||||
// SAFETY: this is guaranteed to be safe by the caller.
|
// SAFETY: this is guaranteed to be safe by the caller.
|
||||||
unsafe { Self(n) }
|
unsafe {
|
||||||
|
core::intrinsics::assert_unsafe_precondition!(n != 0);
|
||||||
|
Self(n)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Creates a non-zero if the given value is not zero.
|
/// Creates a non-zero if the given value is not zero.
|
||||||
|
|
|
@ -332,7 +332,10 @@
|
||||||
use crate::cmp::Ordering;
|
use crate::cmp::Ordering;
|
||||||
use crate::fmt;
|
use crate::fmt;
|
||||||
use crate::hash;
|
use crate::hash;
|
||||||
use crate::intrinsics::{self, abort, is_aligned_and_not_null};
|
use crate::intrinsics::{
|
||||||
|
self, assert_unsafe_precondition, is_aligned_and_not_null, is_nonoverlapping,
|
||||||
|
};
|
||||||
|
|
||||||
use crate::mem::{self, MaybeUninit};
|
use crate::mem::{self, MaybeUninit};
|
||||||
|
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
|
@ -749,6 +752,16 @@ pub const unsafe fn swap_nonoverlapping<T>(x: *mut T, y: *mut T, count: usize) {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SAFETY: the caller must guarantee that `x` and `y` are
|
||||||
|
// valid for writes and properly aligned.
|
||||||
|
unsafe {
|
||||||
|
assert_unsafe_precondition!(
|
||||||
|
is_aligned_and_not_null(x)
|
||||||
|
&& is_aligned_and_not_null(y)
|
||||||
|
&& is_nonoverlapping(x, y, count)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
// NOTE(scottmcm) MIRI is disabled here as reading in smaller units is a
|
// NOTE(scottmcm) MIRI is disabled here as reading in smaller units is a
|
||||||
// pessimization for it. Also, if the type contains any unaligned pointers,
|
// pessimization for it. Also, if the type contains any unaligned pointers,
|
||||||
// copying those over multiple reads is difficult to support.
|
// copying those over multiple reads is difficult to support.
|
||||||
|
@ -839,6 +852,7 @@ pub const unsafe fn replace<T>(dst: *mut T, mut src: T) -> T {
|
||||||
// and cannot overlap `src` since `dst` must point to a distinct
|
// and cannot overlap `src` since `dst` must point to a distinct
|
||||||
// allocated object.
|
// allocated object.
|
||||||
unsafe {
|
unsafe {
|
||||||
|
assert_unsafe_precondition!(is_aligned_and_not_null(dst));
|
||||||
mem::swap(&mut *dst, &mut src); // cannot overlap
|
mem::swap(&mut *dst, &mut src); // cannot overlap
|
||||||
}
|
}
|
||||||
src
|
src
|
||||||
|
@ -1318,12 +1332,11 @@ pub const unsafe fn write_unaligned<T>(dst: *mut T, src: T) {
|
||||||
#[inline]
|
#[inline]
|
||||||
#[stable(feature = "volatile", since = "1.9.0")]
|
#[stable(feature = "volatile", since = "1.9.0")]
|
||||||
pub unsafe fn read_volatile<T>(src: *const T) -> T {
|
pub unsafe fn read_volatile<T>(src: *const T) -> T {
|
||||||
if cfg!(debug_assertions) && !is_aligned_and_not_null(src) {
|
|
||||||
// Not panicking to keep codegen impact smaller.
|
|
||||||
abort();
|
|
||||||
}
|
|
||||||
// SAFETY: the caller must uphold the safety contract for `volatile_load`.
|
// SAFETY: the caller must uphold the safety contract for `volatile_load`.
|
||||||
unsafe { intrinsics::volatile_load(src) }
|
unsafe {
|
||||||
|
assert_unsafe_precondition!(is_aligned_and_not_null(src));
|
||||||
|
intrinsics::volatile_load(src)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Performs a volatile write of a memory location with the given value without
|
/// Performs a volatile write of a memory location with the given value without
|
||||||
|
@ -1389,12 +1402,9 @@ pub unsafe fn read_volatile<T>(src: *const T) -> T {
|
||||||
#[inline]
|
#[inline]
|
||||||
#[stable(feature = "volatile", since = "1.9.0")]
|
#[stable(feature = "volatile", since = "1.9.0")]
|
||||||
pub unsafe fn write_volatile<T>(dst: *mut T, src: T) {
|
pub unsafe fn write_volatile<T>(dst: *mut T, src: T) {
|
||||||
if cfg!(debug_assertions) && !is_aligned_and_not_null(dst) {
|
|
||||||
// Not panicking to keep codegen impact smaller.
|
|
||||||
abort();
|
|
||||||
}
|
|
||||||
// SAFETY: the caller must uphold the safety contract for `volatile_store`.
|
// SAFETY: the caller must uphold the safety contract for `volatile_store`.
|
||||||
unsafe {
|
unsafe {
|
||||||
|
assert_unsafe_precondition!(is_aligned_and_not_null(dst));
|
||||||
intrinsics::volatile_store(dst, src);
|
intrinsics::volatile_store(dst, src);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
//! Indexing implementations for `[T]`.
|
//! Indexing implementations for `[T]`.
|
||||||
|
|
||||||
|
use crate::intrinsics::assert_unsafe_precondition;
|
||||||
use crate::intrinsics::const_eval_select;
|
use crate::intrinsics::const_eval_select;
|
||||||
use crate::ops;
|
use crate::ops;
|
||||||
use crate::ptr;
|
use crate::ptr;
|
||||||
|
@ -219,13 +220,19 @@ unsafe impl<T> const SliceIndex<[T]> for usize {
|
||||||
// cannot be longer than `isize::MAX`. They also guarantee that
|
// cannot be longer than `isize::MAX`. They also guarantee that
|
||||||
// `self` is in bounds of `slice` so `self` cannot overflow an `isize`,
|
// `self` is in bounds of `slice` so `self` cannot overflow an `isize`,
|
||||||
// so the call to `add` is safe.
|
// so the call to `add` is safe.
|
||||||
unsafe { slice.as_ptr().add(self) }
|
unsafe {
|
||||||
|
assert_unsafe_precondition!(self < slice.len());
|
||||||
|
slice.as_ptr().add(self)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
unsafe fn get_unchecked_mut(self, slice: *mut [T]) -> *mut T {
|
unsafe fn get_unchecked_mut(self, slice: *mut [T]) -> *mut T {
|
||||||
// SAFETY: see comments for `get_unchecked` above.
|
// SAFETY: see comments for `get_unchecked` above.
|
||||||
unsafe { slice.as_mut_ptr().add(self) }
|
unsafe {
|
||||||
|
assert_unsafe_precondition!(self < slice.len());
|
||||||
|
slice.as_mut_ptr().add(self)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
|
@ -272,13 +279,18 @@ unsafe impl<T> const SliceIndex<[T]> for ops::Range<usize> {
|
||||||
// cannot be longer than `isize::MAX`. They also guarantee that
|
// cannot be longer than `isize::MAX`. They also guarantee that
|
||||||
// `self` is in bounds of `slice` so `self` cannot overflow an `isize`,
|
// `self` is in bounds of `slice` so `self` cannot overflow an `isize`,
|
||||||
// so the call to `add` is safe.
|
// so the call to `add` is safe.
|
||||||
unsafe { ptr::slice_from_raw_parts(slice.as_ptr().add(self.start), self.end - self.start) }
|
|
||||||
|
unsafe {
|
||||||
|
assert_unsafe_precondition!(self.end >= self.start && self.end <= slice.len());
|
||||||
|
ptr::slice_from_raw_parts(slice.as_ptr().add(self.start), self.end - self.start)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
unsafe fn get_unchecked_mut(self, slice: *mut [T]) -> *mut [T] {
|
unsafe fn get_unchecked_mut(self, slice: *mut [T]) -> *mut [T] {
|
||||||
// SAFETY: see comments for `get_unchecked` above.
|
// SAFETY: see comments for `get_unchecked` above.
|
||||||
unsafe {
|
unsafe {
|
||||||
|
assert_unsafe_precondition!(self.end >= self.start && self.end <= slice.len());
|
||||||
ptr::slice_from_raw_parts_mut(slice.as_mut_ptr().add(self.start), self.end - self.start)
|
ptr::slice_from_raw_parts_mut(slice.as_mut_ptr().add(self.start), self.end - self.start)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,6 +7,7 @@
|
||||||
#![stable(feature = "rust1", since = "1.0.0")]
|
#![stable(feature = "rust1", since = "1.0.0")]
|
||||||
|
|
||||||
use crate::cmp::Ordering::{self, Greater, Less};
|
use crate::cmp::Ordering::{self, Greater, Less};
|
||||||
|
use crate::intrinsics::{assert_unsafe_precondition, exact_div};
|
||||||
use crate::marker::Copy;
|
use crate::marker::Copy;
|
||||||
use crate::mem;
|
use crate::mem;
|
||||||
use crate::num::NonZeroUsize;
|
use crate::num::NonZeroUsize;
|
||||||
|
@ -656,15 +657,10 @@ impl<T> [T] {
|
||||||
#[unstable(feature = "slice_swap_unchecked", issue = "88539")]
|
#[unstable(feature = "slice_swap_unchecked", issue = "88539")]
|
||||||
#[rustc_const_unstable(feature = "const_swap", issue = "83163")]
|
#[rustc_const_unstable(feature = "const_swap", issue = "83163")]
|
||||||
pub const unsafe fn swap_unchecked(&mut self, a: usize, b: usize) {
|
pub const unsafe fn swap_unchecked(&mut self, a: usize, b: usize) {
|
||||||
#[cfg(debug_assertions)]
|
|
||||||
{
|
|
||||||
let _ = &self[a];
|
|
||||||
let _ = &self[b];
|
|
||||||
}
|
|
||||||
|
|
||||||
let ptr = self.as_mut_ptr();
|
let ptr = self.as_mut_ptr();
|
||||||
// SAFETY: caller has to guarantee that `a < self.len()` and `b < self.len()`
|
// SAFETY: caller has to guarantee that `a < self.len()` and `b < self.len()`
|
||||||
unsafe {
|
unsafe {
|
||||||
|
assert_unsafe_precondition!(a < self.len() && b < self.len());
|
||||||
ptr::swap(ptr.add(a), ptr.add(b));
|
ptr::swap(ptr.add(a), ptr.add(b));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -970,11 +966,11 @@ impl<T> [T] {
|
||||||
#[inline]
|
#[inline]
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub unsafe fn as_chunks_unchecked<const N: usize>(&self) -> &[[T; N]] {
|
pub unsafe fn as_chunks_unchecked<const N: usize>(&self) -> &[[T; N]] {
|
||||||
debug_assert_ne!(N, 0);
|
// SAFETY: Caller must guarantee that `N` is nonzero and exactly divides the slice length
|
||||||
debug_assert_eq!(self.len() % N, 0);
|
let new_len = unsafe {
|
||||||
let new_len =
|
assert_unsafe_precondition!(N != 0 && self.len() % N == 0);
|
||||||
// SAFETY: Our precondition is exactly what's needed to call this
|
exact_div(self.len(), N)
|
||||||
unsafe { crate::intrinsics::exact_div(self.len(), N) };
|
};
|
||||||
// SAFETY: We cast a slice of `new_len * N` elements into
|
// SAFETY: We cast a slice of `new_len * N` elements into
|
||||||
// a slice of `new_len` many `N` elements chunks.
|
// a slice of `new_len` many `N` elements chunks.
|
||||||
unsafe { from_raw_parts(self.as_ptr().cast(), new_len) }
|
unsafe { from_raw_parts(self.as_ptr().cast(), new_len) }
|
||||||
|
@ -1109,11 +1105,11 @@ impl<T> [T] {
|
||||||
#[inline]
|
#[inline]
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub unsafe fn as_chunks_unchecked_mut<const N: usize>(&mut self) -> &mut [[T; N]] {
|
pub unsafe fn as_chunks_unchecked_mut<const N: usize>(&mut self) -> &mut [[T; N]] {
|
||||||
debug_assert_ne!(N, 0);
|
// SAFETY: Caller must guarantee that `N` is nonzero and exactly divides the slice length
|
||||||
debug_assert_eq!(self.len() % N, 0);
|
let new_len = unsafe {
|
||||||
let new_len =
|
assert_unsafe_precondition!(N != 0 && self.len() % N == 0);
|
||||||
// SAFETY: Our precondition is exactly what's needed to call this
|
exact_div(self.len(), N)
|
||||||
unsafe { crate::intrinsics::exact_div(self.len(), N) };
|
};
|
||||||
// SAFETY: We cast a slice of `new_len * N` elements into
|
// SAFETY: We cast a slice of `new_len * N` elements into
|
||||||
// a slice of `new_len` many `N` elements chunks.
|
// a slice of `new_len` many `N` elements chunks.
|
||||||
unsafe { from_raw_parts_mut(self.as_mut_ptr().cast(), new_len) }
|
unsafe { from_raw_parts_mut(self.as_mut_ptr().cast(), new_len) }
|
||||||
|
@ -1675,7 +1671,10 @@ impl<T> [T] {
|
||||||
//
|
//
|
||||||
// `[ptr; mid]` and `[mid; len]` are not overlapping, so returning a mutable reference
|
// `[ptr; mid]` and `[mid; len]` are not overlapping, so returning a mutable reference
|
||||||
// is fine.
|
// is fine.
|
||||||
unsafe { (from_raw_parts_mut(ptr, mid), from_raw_parts_mut(ptr.add(mid), len - mid)) }
|
unsafe {
|
||||||
|
assert_unsafe_precondition!(mid <= len);
|
||||||
|
(from_raw_parts_mut(ptr, mid), from_raw_parts_mut(ptr.add(mid), len - mid))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Divides one slice into an array and a remainder slice at an index.
|
/// Divides one slice into an array and a remainder slice at an index.
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
//! Free functions to create `&[T]` and `&mut [T]`.
|
//! Free functions to create `&[T]` and `&mut [T]`.
|
||||||
|
|
||||||
use crate::array;
|
use crate::array;
|
||||||
|
use crate::intrinsics::{assert_unsafe_precondition, is_aligned_and_not_null};
|
||||||
use crate::ops::Range;
|
use crate::ops::Range;
|
||||||
use crate::ptr;
|
use crate::ptr;
|
||||||
|
|
||||||
|
@ -87,10 +88,14 @@ use crate::ptr;
|
||||||
#[rustc_const_unstable(feature = "const_slice_from_raw_parts", issue = "67456")]
|
#[rustc_const_unstable(feature = "const_slice_from_raw_parts", issue = "67456")]
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub const unsafe fn from_raw_parts<'a, T>(data: *const T, len: usize) -> &'a [T] {
|
pub const unsafe fn from_raw_parts<'a, T>(data: *const T, len: usize) -> &'a [T] {
|
||||||
debug_check_data_len(data, len);
|
|
||||||
|
|
||||||
// SAFETY: the caller must uphold the safety contract for `from_raw_parts`.
|
// SAFETY: the caller must uphold the safety contract for `from_raw_parts`.
|
||||||
unsafe { &*ptr::slice_from_raw_parts(data, len) }
|
unsafe {
|
||||||
|
assert_unsafe_precondition!(
|
||||||
|
is_aligned_and_not_null(data)
|
||||||
|
&& crate::mem::size_of::<T>().saturating_mul(len) <= isize::MAX as usize
|
||||||
|
);
|
||||||
|
&*ptr::slice_from_raw_parts(data, len)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Performs the same functionality as [`from_raw_parts`], except that a
|
/// Performs the same functionality as [`from_raw_parts`], except that a
|
||||||
|
@ -127,46 +132,16 @@ pub const unsafe fn from_raw_parts<'a, T>(data: *const T, len: usize) -> &'a [T]
|
||||||
#[rustc_const_unstable(feature = "const_slice_from_raw_parts", issue = "67456")]
|
#[rustc_const_unstable(feature = "const_slice_from_raw_parts", issue = "67456")]
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub const unsafe fn from_raw_parts_mut<'a, T>(data: *mut T, len: usize) -> &'a mut [T] {
|
pub const unsafe fn from_raw_parts_mut<'a, T>(data: *mut T, len: usize) -> &'a mut [T] {
|
||||||
debug_check_data_len(data as _, len);
|
|
||||||
|
|
||||||
// SAFETY: the caller must uphold the safety contract for `from_raw_parts_mut`.
|
// SAFETY: the caller must uphold the safety contract for `from_raw_parts_mut`.
|
||||||
unsafe { &mut *ptr::slice_from_raw_parts_mut(data, len) }
|
|
||||||
}
|
|
||||||
|
|
||||||
// In debug builds checks that `data` pointer is aligned and non-null and that slice with given `len` would cover less than half the address space
|
|
||||||
#[cfg(debug_assertions)]
|
|
||||||
#[unstable(feature = "const_slice_from_raw_parts", issue = "67456")]
|
|
||||||
#[rustc_const_unstable(feature = "const_slice_from_raw_parts", issue = "67456")]
|
|
||||||
const fn debug_check_data_len<T>(data: *const T, len: usize) {
|
|
||||||
fn rt_check<T>(data: *const T) {
|
|
||||||
use crate::intrinsics::is_aligned_and_not_null;
|
|
||||||
|
|
||||||
assert!(is_aligned_and_not_null(data), "attempt to create unaligned or null slice");
|
|
||||||
}
|
|
||||||
|
|
||||||
const fn noop<T>(_: *const T) {}
|
|
||||||
|
|
||||||
// SAFETY:
|
|
||||||
//
|
|
||||||
// `rt_check` is just a debug assert to hint users that they are causing UB,
|
|
||||||
// it is not required for safety (the safety must be guatanteed by
|
|
||||||
// the `from_raw_parts[_mut]` caller).
|
|
||||||
//
|
|
||||||
// As per our safety precondition, we may assume that assertion above never fails.
|
|
||||||
// Therefore, noop and rt_check are observably equivalent.
|
|
||||||
unsafe {
|
unsafe {
|
||||||
crate::intrinsics::const_eval_select((data,), noop, rt_check);
|
assert_unsafe_precondition!(
|
||||||
|
is_aligned_and_not_null(data)
|
||||||
|
&& crate::mem::size_of::<T>().saturating_mul(len) <= isize::MAX as usize
|
||||||
|
);
|
||||||
|
&mut *ptr::slice_from_raw_parts_mut(data, len)
|
||||||
}
|
}
|
||||||
|
|
||||||
assert!(
|
|
||||||
crate::mem::size_of::<T>().saturating_mul(len) <= isize::MAX as usize,
|
|
||||||
"attempt to create slice covering at least half the address space"
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(not(debug_assertions))]
|
|
||||||
const fn debug_check_data_len<T>(_data: *const T, _len: usize) {}
|
|
||||||
|
|
||||||
/// Converts a reference to T into a slice of length 1 (without copying).
|
/// Converts a reference to T into a slice of length 1 (without copying).
|
||||||
#[stable(feature = "from_ref", since = "1.28.0")]
|
#[stable(feature = "from_ref", since = "1.28.0")]
|
||||||
#[rustc_const_unstable(feature = "const_slice_from_ref", issue = "90206")]
|
#[rustc_const_unstable(feature = "const_slice_from_ref", issue = "90206")]
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue