Make the stdlib largely conform to strict provenance.
Some things like the unwinders and system APIs are not fully conformant, this only covers a lot of low-hanging fruit.
This commit is contained in:
parent
5167b6891c
commit
c7de289e1c
30 changed files with 100 additions and 81 deletions
|
@ -158,6 +158,7 @@
|
||||||
#![feature(rustc_allow_const_fn_unstable)]
|
#![feature(rustc_allow_const_fn_unstable)]
|
||||||
#![feature(rustc_attrs)]
|
#![feature(rustc_attrs)]
|
||||||
#![feature(staged_api)]
|
#![feature(staged_api)]
|
||||||
|
#![feature(strict_provenance)]
|
||||||
#![cfg_attr(test, feature(test))]
|
#![cfg_attr(test, feature(test))]
|
||||||
#![feature(unboxed_closures)]
|
#![feature(unboxed_closures)]
|
||||||
#![feature(unsized_fn_params)]
|
#![feature(unsized_fn_params)]
|
||||||
|
|
|
@ -2115,13 +2115,12 @@ impl<T> Weak<T> {
|
||||||
#[rustc_const_unstable(feature = "const_weak_new", issue = "95091", reason = "recently added")]
|
#[rustc_const_unstable(feature = "const_weak_new", issue = "95091", reason = "recently added")]
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub const fn new() -> Weak<T> {
|
pub const fn new() -> Weak<T> {
|
||||||
Weak { ptr: unsafe { NonNull::new_unchecked(usize::MAX as *mut RcBox<T>) } }
|
Weak { ptr: unsafe { NonNull::new_unchecked(ptr::invalid_mut::<RcBox<T>>(usize::MAX)) } }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn is_dangling<T: ?Sized>(ptr: *mut T) -> bool {
|
pub(crate) fn is_dangling<T: ?Sized>(ptr: *mut T) -> bool {
|
||||||
let address = ptr as *mut () as usize;
|
(ptr as *mut ()).addr() == usize::MAX
|
||||||
address == usize::MAX
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Helper type to allow accessing the reference counts without
|
/// Helper type to allow accessing the reference counts without
|
||||||
|
|
|
@ -1044,7 +1044,7 @@ where
|
||||||
impl<T> Drop for MergeHole<T> {
|
impl<T> Drop for MergeHole<T> {
|
||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
// `T` is not a zero-sized type, so it's okay to divide by its size.
|
// `T` is not a zero-sized type, so it's okay to divide by its size.
|
||||||
let len = (self.end as usize - self.start as usize) / mem::size_of::<T>();
|
let len = (self.end.addr() - self.start.addr()) / mem::size_of::<T>();
|
||||||
unsafe {
|
unsafe {
|
||||||
ptr::copy_nonoverlapping(self.start, self.dest, len);
|
ptr::copy_nonoverlapping(self.start, self.dest, len);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1746,7 +1746,7 @@ impl<T> Weak<T> {
|
||||||
#[rustc_const_unstable(feature = "const_weak_new", issue = "95091", reason = "recently added")]
|
#[rustc_const_unstable(feature = "const_weak_new", issue = "95091", reason = "recently added")]
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub const fn new() -> Weak<T> {
|
pub const fn new() -> Weak<T> {
|
||||||
Weak { ptr: unsafe { NonNull::new_unchecked(usize::MAX as *mut ArcInner<T>) } }
|
Weak { ptr: unsafe { NonNull::new_unchecked(ptr::invalid_mut::<ArcInner<T>>(usize::MAX)) } }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -159,7 +159,7 @@ impl<T, A: Allocator> Iterator for IntoIter<T, A> {
|
||||||
#[inline]
|
#[inline]
|
||||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||||
let exact = if mem::size_of::<T>() == 0 {
|
let exact = if mem::size_of::<T>() == 0 {
|
||||||
(self.end as usize).wrapping_sub(self.ptr as usize)
|
self.end.addr().wrapping_sub(self.ptr.addr())
|
||||||
} else {
|
} else {
|
||||||
unsafe { self.end.offset_from(self.ptr) as usize }
|
unsafe { self.end.offset_from(self.ptr) as usize }
|
||||||
};
|
};
|
||||||
|
|
|
@ -194,7 +194,7 @@ impl Layout {
|
||||||
#[inline]
|
#[inline]
|
||||||
pub const fn dangling(&self) -> NonNull<u8> {
|
pub const fn dangling(&self) -> NonNull<u8> {
|
||||||
// SAFETY: align is guaranteed to be non-zero
|
// SAFETY: align is guaranteed to be non-zero
|
||||||
unsafe { NonNull::new_unchecked(self.align() as *mut u8) }
|
unsafe { NonNull::new_unchecked(crate::ptr::invalid_mut::<u8>(self.align())) }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Creates a layout describing the record that can hold a value
|
/// Creates a layout describing the record that can hold a value
|
||||||
|
|
|
@ -352,7 +352,11 @@ impl<'a> ArgumentV1<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn as_usize(&self) -> Option<usize> {
|
fn as_usize(&self) -> Option<usize> {
|
||||||
if self.formatter as usize == USIZE_MARKER as usize {
|
// We are type punning a bit here: USIZE_MARKER only takes an &usize but
|
||||||
|
// formatter takes an &Opaque. Rust understandably doesn't think we should compare
|
||||||
|
// the function pointers if they don't have the same signature, so we cast to
|
||||||
|
// pointers to convince it that we know what we're doing.
|
||||||
|
if self.formatter as *mut u8 == USIZE_MARKER as *mut u8 {
|
||||||
// SAFETY: The `formatter` field is only set to USIZE_MARKER if
|
// SAFETY: The `formatter` field is only set to USIZE_MARKER if
|
||||||
// the value is a usize, so this is safe
|
// the value is a usize, so this is safe
|
||||||
Some(unsafe { *(self.value as *const _ as *const usize) })
|
Some(unsafe { *(self.value as *const _ as *const usize) })
|
||||||
|
@ -2246,7 +2250,7 @@ impl<T: ?Sized> Pointer for *const T {
|
||||||
}
|
}
|
||||||
f.flags |= 1 << (FlagV1::Alternate as u32);
|
f.flags |= 1 << (FlagV1::Alternate as u32);
|
||||||
|
|
||||||
let ret = LowerHex::fmt(&(ptr as usize), f);
|
let ret = LowerHex::fmt(&(ptr.addr()), f);
|
||||||
|
|
||||||
f.width = old_width;
|
f.width = old_width;
|
||||||
f.flags = old_flags;
|
f.flags = old_flags;
|
||||||
|
|
|
@ -793,7 +793,7 @@ mod impls {
|
||||||
#[inline]
|
#[inline]
|
||||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||||
let (address, metadata) = self.to_raw_parts();
|
let (address, metadata) = self.to_raw_parts();
|
||||||
state.write_usize(address as usize);
|
state.write_usize(address.addr());
|
||||||
metadata.hash(state);
|
metadata.hash(state);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -803,7 +803,7 @@ mod impls {
|
||||||
#[inline]
|
#[inline]
|
||||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||||
let (address, metadata) = self.to_raw_parts();
|
let (address, metadata) = self.to_raw_parts();
|
||||||
state.write_usize(address as usize);
|
state.write_usize(address.addr());
|
||||||
metadata.hash(state);
|
metadata.hash(state);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1972,15 +1972,15 @@ extern "rust-intrinsic" {
|
||||||
/// Checks whether `ptr` is properly aligned with respect to
|
/// Checks whether `ptr` is properly aligned with respect to
|
||||||
/// `align_of::<T>()`.
|
/// `align_of::<T>()`.
|
||||||
pub(crate) fn is_aligned_and_not_null<T>(ptr: *const T) -> bool {
|
pub(crate) fn is_aligned_and_not_null<T>(ptr: *const T) -> bool {
|
||||||
!ptr.is_null() && ptr as usize % mem::align_of::<T>() == 0
|
!ptr.is_null() && ptr.addr() % mem::align_of::<T>() == 0
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Checks whether the regions of memory starting at `src` and `dst` of size
|
/// Checks whether the regions of memory starting at `src` and `dst` of size
|
||||||
/// `count * size_of::<T>()` do *not* overlap.
|
/// `count * size_of::<T>()` do *not* overlap.
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
pub(crate) fn is_nonoverlapping<T>(src: *const T, dst: *const T, count: usize) -> bool {
|
pub(crate) fn is_nonoverlapping<T>(src: *const T, dst: *const T, count: usize) -> bool {
|
||||||
let src_usize = src as usize;
|
let src_usize = src.addr();
|
||||||
let dst_usize = dst as usize;
|
let dst_usize = dst.addr();
|
||||||
let size = mem::size_of::<T>().checked_mul(count).unwrap();
|
let size = mem::size_of::<T>().checked_mul(count).unwrap();
|
||||||
let diff = if src_usize > dst_usize { src_usize - dst_usize } else { dst_usize - src_usize };
|
let diff = if src_usize > dst_usize { src_usize - dst_usize } else { dst_usize - src_usize };
|
||||||
// If the absolute distance between the ptrs is at least as big as the size of the buffer,
|
// If the absolute distance between the ptrs is at least as big as the size of the buffer,
|
||||||
|
|
|
@ -90,7 +90,7 @@ impl<T: Sized> NonNull<T> {
|
||||||
// to a *mut T. Therefore, `ptr` is not null and the conditions for
|
// to a *mut T. Therefore, `ptr` is not null and the conditions for
|
||||||
// calling new_unchecked() are respected.
|
// calling new_unchecked() are respected.
|
||||||
unsafe {
|
unsafe {
|
||||||
let ptr = mem::align_of::<T>() as *mut T;
|
let ptr = crate::ptr::invalid_mut::<T>(mem::align_of::<T>());
|
||||||
NonNull::new_unchecked(ptr)
|
NonNull::new_unchecked(ptr)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -469,7 +469,7 @@ impl<T> NonNull<[T]> {
|
||||||
/// use std::ptr::NonNull;
|
/// use std::ptr::NonNull;
|
||||||
///
|
///
|
||||||
/// let slice: NonNull<[i8]> = NonNull::slice_from_raw_parts(NonNull::dangling(), 3);
|
/// let slice: NonNull<[i8]> = NonNull::slice_from_raw_parts(NonNull::dangling(), 3);
|
||||||
/// assert_eq!(slice.as_non_null_ptr(), NonNull::new(1 as *mut i8).unwrap());
|
/// assert_eq!(slice.as_non_null_ptr(), NonNull::<i8>::dangling());
|
||||||
/// ```
|
/// ```
|
||||||
#[inline]
|
#[inline]
|
||||||
#[must_use]
|
#[must_use]
|
||||||
|
@ -489,7 +489,7 @@ impl<T> NonNull<[T]> {
|
||||||
/// use std::ptr::NonNull;
|
/// use std::ptr::NonNull;
|
||||||
///
|
///
|
||||||
/// let slice: NonNull<[i8]> = NonNull::slice_from_raw_parts(NonNull::dangling(), 3);
|
/// let slice: NonNull<[i8]> = NonNull::slice_from_raw_parts(NonNull::dangling(), 3);
|
||||||
/// assert_eq!(slice.as_mut_ptr(), 1 as *mut i8);
|
/// assert_eq!(slice.as_mut_ptr(), NonNull::<i8>::dangling().as_ptr());
|
||||||
/// ```
|
/// ```
|
||||||
#[inline]
|
#[inline]
|
||||||
#[must_use]
|
#[must_use]
|
||||||
|
|
|
@ -73,7 +73,7 @@ impl<T: Sized> Unique<T> {
|
||||||
pub const fn dangling() -> Self {
|
pub const fn dangling() -> Self {
|
||||||
// SAFETY: mem::align_of() returns a valid, non-null pointer. The
|
// SAFETY: mem::align_of() returns a valid, non-null pointer. The
|
||||||
// conditions to call new_unchecked() are thus respected.
|
// conditions to call new_unchecked() are thus respected.
|
||||||
unsafe { Unique::new_unchecked(mem::align_of::<T>() as *mut T) }
|
unsafe { Unique::new_unchecked(crate::ptr::invalid_mut::<T>(mem::align_of::<T>())) }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -294,7 +294,7 @@ fn is_ascii(s: &[u8]) -> bool {
|
||||||
// Paranoia check about alignment, since we're about to do a bunch of
|
// Paranoia check about alignment, since we're about to do a bunch of
|
||||||
// unaligned loads. In practice this should be impossible barring a bug in
|
// unaligned loads. In practice this should be impossible barring a bug in
|
||||||
// `align_offset` though.
|
// `align_offset` though.
|
||||||
debug_assert_eq!((word_ptr as usize) % mem::align_of::<usize>(), 0);
|
debug_assert_eq!((word_ptr.addr()) % mem::align_of::<usize>(), 0);
|
||||||
|
|
||||||
// Read subsequent words until the last aligned word, excluding the last
|
// Read subsequent words until the last aligned word, excluding the last
|
||||||
// aligned word by itself to be done in tail check later, to ensure that
|
// aligned word by itself to be done in tail check later, to ensure that
|
||||||
|
@ -302,9 +302,9 @@ fn is_ascii(s: &[u8]) -> bool {
|
||||||
while byte_pos < len - USIZE_SIZE {
|
while byte_pos < len - USIZE_SIZE {
|
||||||
debug_assert!(
|
debug_assert!(
|
||||||
// Sanity check that the read is in bounds
|
// Sanity check that the read is in bounds
|
||||||
(word_ptr as usize + USIZE_SIZE) <= (start.wrapping_add(len) as usize) &&
|
(word_ptr.addr() + USIZE_SIZE) <= (start.wrapping_add(len).addr()) &&
|
||||||
// And that our assumptions about `byte_pos` hold.
|
// And that our assumptions about `byte_pos` hold.
|
||||||
(word_ptr as usize) - (start as usize) == byte_pos
|
(word_ptr.addr()) - (start.addr()) == byte_pos
|
||||||
);
|
);
|
||||||
|
|
||||||
// SAFETY: We know `word_ptr` is properly aligned (because of
|
// SAFETY: We know `word_ptr` is properly aligned (because of
|
||||||
|
|
|
@ -20,13 +20,13 @@ macro_rules! len {
|
||||||
if size == 0 {
|
if size == 0 {
|
||||||
// This _cannot_ use `unchecked_sub` because we depend on wrapping
|
// This _cannot_ use `unchecked_sub` because we depend on wrapping
|
||||||
// to represent the length of long ZST slice iterators.
|
// to represent the length of long ZST slice iterators.
|
||||||
($self.end as usize).wrapping_sub(start.as_ptr() as usize)
|
($self.end.addr()).wrapping_sub(start.as_ptr().addr())
|
||||||
} else {
|
} else {
|
||||||
// We know that `start <= end`, so can do better than `offset_from`,
|
// We know that `start <= end`, so can do better than `offset_from`,
|
||||||
// which needs to deal in signed. By setting appropriate flags here
|
// which needs to deal in signed. By setting appropriate flags here
|
||||||
// we can tell LLVM this, which helps it remove bounds checks.
|
// we can tell LLVM this, which helps it remove bounds checks.
|
||||||
// SAFETY: By the type invariant, `start <= end`
|
// SAFETY: By the type invariant, `start <= end`
|
||||||
let diff = unsafe { unchecked_sub($self.end as usize, start.as_ptr() as usize) };
|
let diff = unsafe { unchecked_sub($self.end.addr(), start.as_ptr().addr()) };
|
||||||
// By also telling LLVM that the pointers are apart by an exact
|
// By also telling LLVM that the pointers are apart by an exact
|
||||||
// multiple of the type size, it can optimize `len() == 0` down to
|
// multiple of the type size, it can optimize `len() == 0` down to
|
||||||
// `start == end` instead of `(end - start) < size`.
|
// `start == end` instead of `(end - start) < size`.
|
||||||
|
|
|
@ -269,7 +269,7 @@ where
|
||||||
// Returns the number of elements between pointers `l` (inclusive) and `r` (exclusive).
|
// Returns the number of elements between pointers `l` (inclusive) and `r` (exclusive).
|
||||||
fn width<T>(l: *mut T, r: *mut T) -> usize {
|
fn width<T>(l: *mut T, r: *mut T) -> usize {
|
||||||
assert!(mem::size_of::<T>() > 0);
|
assert!(mem::size_of::<T>() > 0);
|
||||||
(r as usize - l as usize) / mem::size_of::<T>()
|
(r.addr() - l.addr()) / mem::size_of::<T>()
|
||||||
}
|
}
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
|
|
|
@ -293,7 +293,7 @@ impl Backtrace {
|
||||||
if !Backtrace::enabled() {
|
if !Backtrace::enabled() {
|
||||||
return Backtrace { inner: Inner::Disabled };
|
return Backtrace { inner: Inner::Disabled };
|
||||||
}
|
}
|
||||||
Backtrace::create(Backtrace::capture as usize)
|
Backtrace::create((Backtrace::capture as *mut ()).addr())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Forcibly captures a full backtrace, regardless of environment variable
|
/// Forcibly captures a full backtrace, regardless of environment variable
|
||||||
|
@ -308,7 +308,7 @@ impl Backtrace {
|
||||||
/// parts of code.
|
/// parts of code.
|
||||||
#[inline(never)] // want to make sure there's a frame here to remove
|
#[inline(never)] // want to make sure there's a frame here to remove
|
||||||
pub fn force_capture() -> Backtrace {
|
pub fn force_capture() -> Backtrace {
|
||||||
Backtrace::create(Backtrace::force_capture as usize)
|
Backtrace::create((Backtrace::force_capture as *mut ()).addr())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Forcibly captures a disabled backtrace, regardless of environment
|
/// Forcibly captures a disabled backtrace, regardless of environment
|
||||||
|
@ -330,7 +330,7 @@ impl Backtrace {
|
||||||
frame: RawFrame::Actual(frame.clone()),
|
frame: RawFrame::Actual(frame.clone()),
|
||||||
symbols: Vec::new(),
|
symbols: Vec::new(),
|
||||||
});
|
});
|
||||||
if frame.symbol_address() as usize == ip && actual_start.is_none() {
|
if frame.symbol_address().addr() == ip && actual_start.is_none() {
|
||||||
actual_start = Some(frames.len());
|
actual_start = Some(frames.len());
|
||||||
}
|
}
|
||||||
true
|
true
|
||||||
|
@ -493,7 +493,7 @@ impl RawFrame {
|
||||||
match self {
|
match self {
|
||||||
RawFrame::Actual(frame) => frame.ip(),
|
RawFrame::Actual(frame) => frame.ip(),
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
RawFrame::Fake => 1 as *mut c_void,
|
RawFrame::Fake => ptr::invalid_mut(1),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -106,7 +106,7 @@ use super::{Custom, ErrorData, ErrorKind, SimpleMessage};
|
||||||
use alloc::boxed::Box;
|
use alloc::boxed::Box;
|
||||||
use core::marker::PhantomData;
|
use core::marker::PhantomData;
|
||||||
use core::mem::{align_of, size_of};
|
use core::mem::{align_of, size_of};
|
||||||
use core::ptr::NonNull;
|
use core::ptr::{self, NonNull};
|
||||||
|
|
||||||
// The 2 least-significant bits are used as tag.
|
// The 2 least-significant bits are used as tag.
|
||||||
const TAG_MASK: usize = 0b11;
|
const TAG_MASK: usize = 0b11;
|
||||||
|
@ -136,7 +136,7 @@ impl Repr {
|
||||||
let p = Box::into_raw(b).cast::<u8>();
|
let p = Box::into_raw(b).cast::<u8>();
|
||||||
// Should only be possible if an allocator handed out a pointer with
|
// Should only be possible if an allocator handed out a pointer with
|
||||||
// wrong alignment.
|
// wrong alignment.
|
||||||
debug_assert_eq!((p as usize & TAG_MASK), 0);
|
debug_assert_eq!((p.addr() & TAG_MASK), 0);
|
||||||
// Note: We know `TAG_CUSTOM <= size_of::<Custom>()` (static_assert at
|
// Note: We know `TAG_CUSTOM <= size_of::<Custom>()` (static_assert at
|
||||||
// end of file), and both the start and end of the expression must be
|
// end of file), and both the start and end of the expression must be
|
||||||
// valid without address space wraparound due to `Box`'s semantics.
|
// valid without address space wraparound due to `Box`'s semantics.
|
||||||
|
@ -166,7 +166,7 @@ impl Repr {
|
||||||
pub(super) fn new_os(code: i32) -> Self {
|
pub(super) fn new_os(code: i32) -> Self {
|
||||||
let utagged = ((code as usize) << 32) | TAG_OS;
|
let utagged = ((code as usize) << 32) | TAG_OS;
|
||||||
// Safety: `TAG_OS` is not zero, so the result of the `|` is not 0.
|
// Safety: `TAG_OS` is not zero, so the result of the `|` is not 0.
|
||||||
let res = Self(unsafe { NonNull::new_unchecked(utagged as *mut ()) }, PhantomData);
|
let res = Self(unsafe { NonNull::new_unchecked(ptr::invalid_mut(utagged)) }, PhantomData);
|
||||||
// quickly smoke-check we encoded the right thing (This generally will
|
// quickly smoke-check we encoded the right thing (This generally will
|
||||||
// only run in libstd's tests, unless the user uses -Zbuild-std)
|
// only run in libstd's tests, unless the user uses -Zbuild-std)
|
||||||
debug_assert!(
|
debug_assert!(
|
||||||
|
@ -180,7 +180,7 @@ impl Repr {
|
||||||
pub(super) fn new_simple(kind: ErrorKind) -> Self {
|
pub(super) fn new_simple(kind: ErrorKind) -> Self {
|
||||||
let utagged = ((kind as usize) << 32) | TAG_SIMPLE;
|
let utagged = ((kind as usize) << 32) | TAG_SIMPLE;
|
||||||
// Safety: `TAG_SIMPLE` is not zero, so the result of the `|` is not 0.
|
// Safety: `TAG_SIMPLE` is not zero, so the result of the `|` is not 0.
|
||||||
let res = Self(unsafe { NonNull::new_unchecked(utagged as *mut ()) }, PhantomData);
|
let res = Self(unsafe { NonNull::new_unchecked(ptr::invalid_mut(utagged)) }, PhantomData);
|
||||||
// quickly smoke-check we encoded the right thing (This generally will
|
// quickly smoke-check we encoded the right thing (This generally will
|
||||||
// only run in libstd's tests, unless the user uses -Zbuild-std)
|
// only run in libstd's tests, unless the user uses -Zbuild-std)
|
||||||
debug_assert!(
|
debug_assert!(
|
||||||
|
@ -238,7 +238,7 @@ unsafe fn decode_repr<C, F>(ptr: NonNull<()>, make_custom: F) -> ErrorData<C>
|
||||||
where
|
where
|
||||||
F: FnOnce(*mut Custom) -> C,
|
F: FnOnce(*mut Custom) -> C,
|
||||||
{
|
{
|
||||||
let bits = ptr.as_ptr() as usize;
|
let bits = ptr.as_ptr().addr();
|
||||||
match bits & TAG_MASK {
|
match bits & TAG_MASK {
|
||||||
TAG_OS => {
|
TAG_OS => {
|
||||||
let code = ((bits as i64) >> 32) as i32;
|
let code = ((bits as i64) >> 32) as i32;
|
||||||
|
|
|
@ -275,6 +275,7 @@
|
||||||
#![feature(extend_one)]
|
#![feature(extend_one)]
|
||||||
#![feature(float_minimum_maximum)]
|
#![feature(float_minimum_maximum)]
|
||||||
#![feature(format_args_nl)]
|
#![feature(format_args_nl)]
|
||||||
|
#![feature(strict_provenance)]
|
||||||
#![feature(get_mut_unchecked)]
|
#![feature(get_mut_unchecked)]
|
||||||
#![feature(hashmap_internals)]
|
#![feature(hashmap_internals)]
|
||||||
#![feature(int_error_internals)]
|
#![feature(int_error_internals)]
|
||||||
|
|
|
@ -9,6 +9,7 @@ use crate::fs;
|
||||||
use crate::io;
|
use crate::io;
|
||||||
use crate::marker::PhantomData;
|
use crate::marker::PhantomData;
|
||||||
use crate::mem::forget;
|
use crate::mem::forget;
|
||||||
|
use crate::ptr;
|
||||||
use crate::sys::c;
|
use crate::sys::c;
|
||||||
use crate::sys::cvt;
|
use crate::sys::cvt;
|
||||||
use crate::sys_common::{AsInner, FromInner, IntoInner};
|
use crate::sys_common::{AsInner, FromInner, IntoInner};
|
||||||
|
@ -182,7 +183,7 @@ impl OwnedHandle {
|
||||||
return unsafe { Ok(Self::from_raw_handle(handle)) };
|
return unsafe { Ok(Self::from_raw_handle(handle)) };
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut ret = 0 as c::HANDLE;
|
let mut ret = ptr::null_mut();
|
||||||
cvt(unsafe {
|
cvt(unsafe {
|
||||||
let cur_proc = c::GetCurrentProcess();
|
let cur_proc = c::GetCurrentProcess();
|
||||||
c::DuplicateHandle(
|
c::DuplicateHandle(
|
||||||
|
|
|
@ -129,6 +129,7 @@ impl OwnedSocket {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// FIXME(strict_provenance_magic): we defined RawSocket to be a u64 ;-;
|
||||||
#[cfg(not(target_vendor = "uwp"))]
|
#[cfg(not(target_vendor = "uwp"))]
|
||||||
pub(crate) fn set_no_inherit(&self) -> io::Result<()> {
|
pub(crate) fn set_no_inherit(&self) -> io::Result<()> {
|
||||||
cvt(unsafe {
|
cvt(unsafe {
|
||||||
|
|
|
@ -1449,8 +1449,8 @@ impl PathBuf {
|
||||||
};
|
};
|
||||||
|
|
||||||
// truncate until right after the file stem
|
// truncate until right after the file stem
|
||||||
let end_file_stem = file_stem[file_stem.len()..].as_ptr() as usize;
|
let end_file_stem = file_stem[file_stem.len()..].as_ptr().addr();
|
||||||
let start = os_str_as_u8_slice(&self.inner).as_ptr() as usize;
|
let start = os_str_as_u8_slice(&self.inner).as_ptr().addr();
|
||||||
let v = self.as_mut_vec();
|
let v = self.as_mut_vec();
|
||||||
v.truncate(end_file_stem.wrapping_sub(start));
|
v.truncate(end_file_stem.wrapping_sub(start));
|
||||||
|
|
||||||
|
|
|
@ -91,9 +91,12 @@ use crate::cell::Cell;
|
||||||
use crate::fmt;
|
use crate::fmt;
|
||||||
use crate::marker;
|
use crate::marker;
|
||||||
use crate::panic::{RefUnwindSafe, UnwindSafe};
|
use crate::panic::{RefUnwindSafe, UnwindSafe};
|
||||||
use crate::sync::atomic::{AtomicBool, AtomicUsize, Ordering};
|
use crate::ptr;
|
||||||
|
use crate::sync::atomic::{AtomicBool, AtomicPtr, Ordering};
|
||||||
use crate::thread::{self, Thread};
|
use crate::thread::{self, Thread};
|
||||||
|
|
||||||
|
type Masked = ();
|
||||||
|
|
||||||
/// A synchronization primitive which can be used to run a one-time global
|
/// A synchronization primitive which can be used to run a one-time global
|
||||||
/// initialization. Useful for one-time initialization for FFI or related
|
/// initialization. Useful for one-time initialization for FFI or related
|
||||||
/// functionality. This type can only be constructed with [`Once::new()`].
|
/// functionality. This type can only be constructed with [`Once::new()`].
|
||||||
|
@ -113,7 +116,7 @@ use crate::thread::{self, Thread};
|
||||||
pub struct Once {
|
pub struct Once {
|
||||||
// `state_and_queue` is actually a pointer to a `Waiter` with extra state
|
// `state_and_queue` is actually a pointer to a `Waiter` with extra state
|
||||||
// bits, so we add the `PhantomData` appropriately.
|
// bits, so we add the `PhantomData` appropriately.
|
||||||
state_and_queue: AtomicUsize,
|
state_and_queue: AtomicPtr<Masked>,
|
||||||
_marker: marker::PhantomData<*const Waiter>,
|
_marker: marker::PhantomData<*const Waiter>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -136,7 +139,7 @@ impl RefUnwindSafe for Once {}
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct OnceState {
|
pub struct OnceState {
|
||||||
poisoned: bool,
|
poisoned: bool,
|
||||||
set_state_on_drop_to: Cell<usize>,
|
set_state_on_drop_to: Cell<*mut Masked>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Initialization value for static [`Once`] values.
|
/// Initialization value for static [`Once`] values.
|
||||||
|
@ -184,8 +187,8 @@ struct Waiter {
|
||||||
// Every node is a struct on the stack of a waiting thread.
|
// Every node is a struct on the stack of a waiting thread.
|
||||||
// Will wake up the waiters when it gets dropped, i.e. also on panic.
|
// Will wake up the waiters when it gets dropped, i.e. also on panic.
|
||||||
struct WaiterQueue<'a> {
|
struct WaiterQueue<'a> {
|
||||||
state_and_queue: &'a AtomicUsize,
|
state_and_queue: &'a AtomicPtr<Masked>,
|
||||||
set_state_on_drop_to: usize,
|
set_state_on_drop_to: *mut Masked,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Once {
|
impl Once {
|
||||||
|
@ -195,7 +198,10 @@ impl Once {
|
||||||
#[rustc_const_stable(feature = "const_once_new", since = "1.32.0")]
|
#[rustc_const_stable(feature = "const_once_new", since = "1.32.0")]
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub const fn new() -> Once {
|
pub const fn new() -> Once {
|
||||||
Once { state_and_queue: AtomicUsize::new(INCOMPLETE), _marker: marker::PhantomData }
|
Once {
|
||||||
|
state_and_queue: AtomicPtr::new(ptr::invalid_mut(INCOMPLETE)),
|
||||||
|
_marker: marker::PhantomData,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Performs an initialization routine once and only once. The given closure
|
/// Performs an initialization routine once and only once. The given closure
|
||||||
|
@ -376,7 +382,7 @@ impl Once {
|
||||||
// operations visible to us, and, this being a fast path, weaker
|
// operations visible to us, and, this being a fast path, weaker
|
||||||
// ordering helps with performance. This `Acquire` synchronizes with
|
// ordering helps with performance. This `Acquire` synchronizes with
|
||||||
// `Release` operations on the slow path.
|
// `Release` operations on the slow path.
|
||||||
self.state_and_queue.load(Ordering::Acquire) == COMPLETE
|
self.state_and_queue.load(Ordering::Acquire).addr() == COMPLETE
|
||||||
}
|
}
|
||||||
|
|
||||||
// This is a non-generic function to reduce the monomorphization cost of
|
// This is a non-generic function to reduce the monomorphization cost of
|
||||||
|
@ -395,7 +401,7 @@ impl Once {
|
||||||
fn call_inner(&self, ignore_poisoning: bool, init: &mut dyn FnMut(&OnceState)) {
|
fn call_inner(&self, ignore_poisoning: bool, init: &mut dyn FnMut(&OnceState)) {
|
||||||
let mut state_and_queue = self.state_and_queue.load(Ordering::Acquire);
|
let mut state_and_queue = self.state_and_queue.load(Ordering::Acquire);
|
||||||
loop {
|
loop {
|
||||||
match state_and_queue {
|
match state_and_queue.addr() {
|
||||||
COMPLETE => break,
|
COMPLETE => break,
|
||||||
POISONED if !ignore_poisoning => {
|
POISONED if !ignore_poisoning => {
|
||||||
// Panic to propagate the poison.
|
// Panic to propagate the poison.
|
||||||
|
@ -405,7 +411,7 @@ impl Once {
|
||||||
// Try to register this thread as the one RUNNING.
|
// Try to register this thread as the one RUNNING.
|
||||||
let exchange_result = self.state_and_queue.compare_exchange(
|
let exchange_result = self.state_and_queue.compare_exchange(
|
||||||
state_and_queue,
|
state_and_queue,
|
||||||
RUNNING,
|
ptr::invalid_mut(RUNNING),
|
||||||
Ordering::Acquire,
|
Ordering::Acquire,
|
||||||
Ordering::Acquire,
|
Ordering::Acquire,
|
||||||
);
|
);
|
||||||
|
@ -417,13 +423,13 @@ impl Once {
|
||||||
// wake them up on drop.
|
// wake them up on drop.
|
||||||
let mut waiter_queue = WaiterQueue {
|
let mut waiter_queue = WaiterQueue {
|
||||||
state_and_queue: &self.state_and_queue,
|
state_and_queue: &self.state_and_queue,
|
||||||
set_state_on_drop_to: POISONED,
|
set_state_on_drop_to: ptr::invalid_mut(POISONED),
|
||||||
};
|
};
|
||||||
// Run the initialization function, letting it know if we're
|
// Run the initialization function, letting it know if we're
|
||||||
// poisoned or not.
|
// poisoned or not.
|
||||||
let init_state = OnceState {
|
let init_state = OnceState {
|
||||||
poisoned: state_and_queue == POISONED,
|
poisoned: state_and_queue.addr() == POISONED,
|
||||||
set_state_on_drop_to: Cell::new(COMPLETE),
|
set_state_on_drop_to: Cell::new(ptr::invalid_mut(COMPLETE)),
|
||||||
};
|
};
|
||||||
init(&init_state);
|
init(&init_state);
|
||||||
waiter_queue.set_state_on_drop_to = init_state.set_state_on_drop_to.get();
|
waiter_queue.set_state_on_drop_to = init_state.set_state_on_drop_to.get();
|
||||||
|
@ -432,7 +438,7 @@ impl Once {
|
||||||
_ => {
|
_ => {
|
||||||
// All other values must be RUNNING with possibly a
|
// All other values must be RUNNING with possibly a
|
||||||
// pointer to the waiter queue in the more significant bits.
|
// pointer to the waiter queue in the more significant bits.
|
||||||
assert!(state_and_queue & STATE_MASK == RUNNING);
|
assert!(state_and_queue.addr() & STATE_MASK == RUNNING);
|
||||||
wait(&self.state_and_queue, state_and_queue);
|
wait(&self.state_and_queue, state_and_queue);
|
||||||
state_and_queue = self.state_and_queue.load(Ordering::Acquire);
|
state_and_queue = self.state_and_queue.load(Ordering::Acquire);
|
||||||
}
|
}
|
||||||
|
@ -441,13 +447,13 @@ impl Once {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn wait(state_and_queue: &AtomicUsize, mut current_state: usize) {
|
fn wait(state_and_queue: &AtomicPtr<Masked>, mut current_state: *mut Masked) {
|
||||||
// Note: the following code was carefully written to avoid creating a
|
// Note: the following code was carefully written to avoid creating a
|
||||||
// mutable reference to `node` that gets aliased.
|
// mutable reference to `node` that gets aliased.
|
||||||
loop {
|
loop {
|
||||||
// Don't queue this thread if the status is no longer running,
|
// Don't queue this thread if the status is no longer running,
|
||||||
// otherwise we will not be woken up.
|
// otherwise we will not be woken up.
|
||||||
if current_state & STATE_MASK != RUNNING {
|
if current_state.addr() & STATE_MASK != RUNNING {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -455,15 +461,15 @@ fn wait(state_and_queue: &AtomicUsize, mut current_state: usize) {
|
||||||
let node = Waiter {
|
let node = Waiter {
|
||||||
thread: Cell::new(Some(thread::current())),
|
thread: Cell::new(Some(thread::current())),
|
||||||
signaled: AtomicBool::new(false),
|
signaled: AtomicBool::new(false),
|
||||||
next: (current_state & !STATE_MASK) as *const Waiter,
|
next: current_state.with_addr(current_state.addr() & !STATE_MASK) as *const Waiter,
|
||||||
};
|
};
|
||||||
let me = &node as *const Waiter as usize;
|
let me = &node as *const Waiter as *const Masked as *mut Masked;
|
||||||
|
|
||||||
// Try to slide in the node at the head of the linked list, making sure
|
// Try to slide in the node at the head of the linked list, making sure
|
||||||
// that another thread didn't just replace the head of the linked list.
|
// that another thread didn't just replace the head of the linked list.
|
||||||
let exchange_result = state_and_queue.compare_exchange(
|
let exchange_result = state_and_queue.compare_exchange(
|
||||||
current_state,
|
current_state,
|
||||||
me | RUNNING,
|
me.with_addr(me.addr() | RUNNING),
|
||||||
Ordering::Release,
|
Ordering::Release,
|
||||||
Ordering::Relaxed,
|
Ordering::Relaxed,
|
||||||
);
|
);
|
||||||
|
@ -502,7 +508,7 @@ impl Drop for WaiterQueue<'_> {
|
||||||
self.state_and_queue.swap(self.set_state_on_drop_to, Ordering::AcqRel);
|
self.state_and_queue.swap(self.set_state_on_drop_to, Ordering::AcqRel);
|
||||||
|
|
||||||
// We should only ever see an old state which was RUNNING.
|
// We should only ever see an old state which was RUNNING.
|
||||||
assert_eq!(state_and_queue & STATE_MASK, RUNNING);
|
assert_eq!(state_and_queue.addr() & STATE_MASK, RUNNING);
|
||||||
|
|
||||||
// Walk the entire linked list of waiters and wake them up (in lifo
|
// Walk the entire linked list of waiters and wake them up (in lifo
|
||||||
// order, last to register is first to wake up).
|
// order, last to register is first to wake up).
|
||||||
|
@ -511,7 +517,8 @@ impl Drop for WaiterQueue<'_> {
|
||||||
// free `node` if there happens to be has a spurious wakeup.
|
// free `node` if there happens to be has a spurious wakeup.
|
||||||
// So we have to take out the `thread` field and copy the pointer to
|
// So we have to take out the `thread` field and copy the pointer to
|
||||||
// `next` first.
|
// `next` first.
|
||||||
let mut queue = (state_and_queue & !STATE_MASK) as *const Waiter;
|
let mut queue =
|
||||||
|
state_and_queue.with_addr(state_and_queue.addr() & !STATE_MASK) as *const Waiter;
|
||||||
while !queue.is_null() {
|
while !queue.is_null() {
|
||||||
let next = (*queue).next;
|
let next = (*queue).next;
|
||||||
let thread = (*queue).thread.take().unwrap();
|
let thread = (*queue).thread.take().unwrap();
|
||||||
|
@ -568,6 +575,6 @@ impl OnceState {
|
||||||
/// Poison the associated [`Once`] without explicitly panicking.
|
/// Poison the associated [`Once`] without explicitly panicking.
|
||||||
// NOTE: This is currently only exposed for the `lazy` module
|
// NOTE: This is currently only exposed for the `lazy` module
|
||||||
pub(crate) fn poison(&self) {
|
pub(crate) fn poison(&self) {
|
||||||
self.set_state_on_drop_to.set(POISONED);
|
self.set_state_on_drop_to.set(ptr::invalid_mut(POISONED));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -159,7 +159,7 @@ unsafe fn allocate(layout: Layout, zeroed: bool) -> *mut u8 {
|
||||||
// Create a correctly aligned pointer offset from the start of the allocated block,
|
// Create a correctly aligned pointer offset from the start of the allocated block,
|
||||||
// and write a header before it.
|
// and write a header before it.
|
||||||
|
|
||||||
let offset = layout.align() - (ptr as usize & (layout.align() - 1));
|
let offset = layout.align() - (ptr.addr() & (layout.align() - 1));
|
||||||
// SAFETY: `MIN_ALIGN` <= `offset` <= `layout.align()` and the size of the allocated
|
// SAFETY: `MIN_ALIGN` <= `offset` <= `layout.align()` and the size of the allocated
|
||||||
// block is `layout.align() + layout.size()`. `aligned` will thus be a correctly aligned
|
// block is `layout.align() + layout.size()`. `aligned` will thus be a correctly aligned
|
||||||
// pointer inside the allocated block with at least `layout.size()` bytes after it and at
|
// pointer inside the allocated block with at least `layout.size()` bytes after it and at
|
||||||
|
|
|
@ -173,7 +173,7 @@ pub const PROGRESS_CONTINUE: DWORD = 0;
|
||||||
|
|
||||||
pub const E_NOTIMPL: HRESULT = 0x80004001u32 as HRESULT;
|
pub const E_NOTIMPL: HRESULT = 0x80004001u32 as HRESULT;
|
||||||
|
|
||||||
pub const INVALID_HANDLE_VALUE: HANDLE = !0 as HANDLE;
|
pub const INVALID_HANDLE_VALUE: HANDLE = ptr::invalid_mut(!0);
|
||||||
|
|
||||||
pub const FACILITY_NT_BIT: DWORD = 0x1000_0000;
|
pub const FACILITY_NT_BIT: DWORD = 0x1000_0000;
|
||||||
|
|
||||||
|
|
|
@ -88,7 +88,7 @@ macro_rules! compat_fn {
|
||||||
let symbol_name: *const u8 = concat!(stringify!($symbol), "\0").as_ptr();
|
let symbol_name: *const u8 = concat!(stringify!($symbol), "\0").as_ptr();
|
||||||
let module_handle = $crate::sys::c::GetModuleHandleA(module_name as *const i8);
|
let module_handle = $crate::sys::c::GetModuleHandleA(module_name as *const i8);
|
||||||
if !module_handle.is_null() {
|
if !module_handle.is_null() {
|
||||||
match $crate::sys::c::GetProcAddress(module_handle, symbol_name as *const i8) as usize {
|
match $crate::sys::c::GetProcAddress(module_handle, symbol_name as *const i8).addr() {
|
||||||
0 => {}
|
0 => {}
|
||||||
n => {
|
n => {
|
||||||
PTR = Some(mem::transmute::<usize, F>(n));
|
PTR = Some(mem::transmute::<usize, F>(n));
|
||||||
|
|
|
@ -57,6 +57,9 @@ pub struct DirEntry {
|
||||||
data: c::WIN32_FIND_DATAW,
|
data: c::WIN32_FIND_DATAW,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
unsafe impl Send for OpenOptions {}
|
||||||
|
unsafe impl Sync for OpenOptions {}
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct OpenOptions {
|
pub struct OpenOptions {
|
||||||
// generic
|
// generic
|
||||||
|
@ -72,7 +75,7 @@ pub struct OpenOptions {
|
||||||
attributes: c::DWORD,
|
attributes: c::DWORD,
|
||||||
share_mode: c::DWORD,
|
share_mode: c::DWORD,
|
||||||
security_qos_flags: c::DWORD,
|
security_qos_flags: c::DWORD,
|
||||||
security_attributes: usize, // FIXME: should be a reference
|
security_attributes: c::LPSECURITY_ATTRIBUTES,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||||
|
@ -187,7 +190,7 @@ impl OpenOptions {
|
||||||
share_mode: c::FILE_SHARE_READ | c::FILE_SHARE_WRITE | c::FILE_SHARE_DELETE,
|
share_mode: c::FILE_SHARE_READ | c::FILE_SHARE_WRITE | c::FILE_SHARE_DELETE,
|
||||||
attributes: 0,
|
attributes: 0,
|
||||||
security_qos_flags: 0,
|
security_qos_flags: 0,
|
||||||
security_attributes: 0,
|
security_attributes: ptr::null_mut(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -228,7 +231,7 @@ impl OpenOptions {
|
||||||
self.security_qos_flags = flags | c::SECURITY_SQOS_PRESENT;
|
self.security_qos_flags = flags | c::SECURITY_SQOS_PRESENT;
|
||||||
}
|
}
|
||||||
pub fn security_attributes(&mut self, attrs: c::LPSECURITY_ATTRIBUTES) {
|
pub fn security_attributes(&mut self, attrs: c::LPSECURITY_ATTRIBUTES) {
|
||||||
self.security_attributes = attrs as usize;
|
self.security_attributes = attrs;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_access_mode(&self) -> io::Result<c::DWORD> {
|
fn get_access_mode(&self) -> io::Result<c::DWORD> {
|
||||||
|
@ -289,7 +292,7 @@ impl File {
|
||||||
path.as_ptr(),
|
path.as_ptr(),
|
||||||
opts.get_access_mode()?,
|
opts.get_access_mode()?,
|
||||||
opts.share_mode,
|
opts.share_mode,
|
||||||
opts.security_attributes as *mut _,
|
opts.security_attributes,
|
||||||
opts.get_creation_mode()?,
|
opts.get_creation_mode()?,
|
||||||
opts.get_flags_and_attributes(),
|
opts.get_flags_and_attributes(),
|
||||||
ptr::null_mut(),
|
ptr::null_mut(),
|
||||||
|
|
|
@ -136,7 +136,7 @@ pub fn unrolled_find_u16s(needle: u16, haystack: &[u16]) -> Option<usize> {
|
||||||
($($n:literal,)+) => {
|
($($n:literal,)+) => {
|
||||||
$(
|
$(
|
||||||
if start[$n] == needle {
|
if start[$n] == needle {
|
||||||
return Some((&start[$n] as *const u16 as usize - ptr as usize) / 2);
|
return Some(((&start[$n] as *const u16).addr() - ptr.addr()) / 2);
|
||||||
}
|
}
|
||||||
)+
|
)+
|
||||||
}
|
}
|
||||||
|
@ -149,7 +149,7 @@ pub fn unrolled_find_u16s(needle: u16, haystack: &[u16]) -> Option<usize> {
|
||||||
|
|
||||||
for c in start {
|
for c in start {
|
||||||
if *c == needle {
|
if *c == needle {
|
||||||
return Some((c as *const u16 as usize - ptr as usize) / 2);
|
return Some(((c as *const u16).addr() - ptr.addr()) / 2);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
None
|
None
|
||||||
|
|
|
@ -134,7 +134,7 @@ impl Drop for Env {
|
||||||
pub fn env() -> Env {
|
pub fn env() -> Env {
|
||||||
unsafe {
|
unsafe {
|
||||||
let ch = c::GetEnvironmentStringsW();
|
let ch = c::GetEnvironmentStringsW();
|
||||||
if ch as usize == 0 {
|
if ch.is_null() {
|
||||||
panic!("failure getting env string from OS: {}", io::Error::last_os_error());
|
panic!("failure getting env string from OS: {}", io::Error::last_os_error());
|
||||||
}
|
}
|
||||||
Env { base: ch, cur: ch }
|
Env { base: ch, cur: ch }
|
||||||
|
|
|
@ -60,7 +60,7 @@
|
||||||
use crate::convert::TryFrom;
|
use crate::convert::TryFrom;
|
||||||
use crate::ptr;
|
use crate::ptr;
|
||||||
use crate::sync::atomic::{
|
use crate::sync::atomic::{
|
||||||
AtomicI8, AtomicUsize,
|
AtomicI8, AtomicPtr,
|
||||||
Ordering::{Acquire, Relaxed, Release},
|
Ordering::{Acquire, Relaxed, Release},
|
||||||
};
|
};
|
||||||
use crate::sys::{c, dur2timeout};
|
use crate::sys::{c, dur2timeout};
|
||||||
|
@ -217,8 +217,8 @@ impl Parker {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn keyed_event_handle() -> c::HANDLE {
|
fn keyed_event_handle() -> c::HANDLE {
|
||||||
const INVALID: usize = !0;
|
const INVALID: c::HANDLE = ptr::invalid_mut(!0);
|
||||||
static HANDLE: AtomicUsize = AtomicUsize::new(INVALID);
|
static HANDLE: AtomicPtr<libc::c_void> = AtomicPtr::new(INVALID);
|
||||||
match HANDLE.load(Relaxed) {
|
match HANDLE.load(Relaxed) {
|
||||||
INVALID => {
|
INVALID => {
|
||||||
let mut handle = c::INVALID_HANDLE_VALUE;
|
let mut handle = c::INVALID_HANDLE_VALUE;
|
||||||
|
@ -233,7 +233,7 @@ fn keyed_event_handle() -> c::HANDLE {
|
||||||
r => panic!("Unable to create keyed event handle: error {r}"),
|
r => panic!("Unable to create keyed event handle: error {r}"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
match HANDLE.compare_exchange(INVALID, handle as usize, Relaxed, Relaxed) {
|
match HANDLE.compare_exchange(INVALID, handle, Relaxed, Relaxed) {
|
||||||
Ok(_) => handle,
|
Ok(_) => handle,
|
||||||
Err(h) => {
|
Err(h) => {
|
||||||
// Lost the race to another thread initializing HANDLE before we did.
|
// Lost the race to another thread initializing HANDLE before we did.
|
||||||
|
@ -241,10 +241,10 @@ fn keyed_event_handle() -> c::HANDLE {
|
||||||
unsafe {
|
unsafe {
|
||||||
c::CloseHandle(handle);
|
c::CloseHandle(handle);
|
||||||
}
|
}
|
||||||
h as c::HANDLE
|
h
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
handle => handle as c::HANDLE,
|
handle => handle,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
use crate::sync::atomic::{AtomicUsize, Ordering};
|
use crate::ptr;
|
||||||
|
use crate::sync::atomic::{AtomicPtr, Ordering};
|
||||||
use crate::sys::locks as imp;
|
use crate::sys::locks as imp;
|
||||||
use crate::sys_common::mutex::MovableMutex;
|
use crate::sys_common::mutex::MovableMutex;
|
||||||
|
|
||||||
|
@ -13,17 +14,18 @@ impl CondvarCheck for Box<imp::Mutex> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct SameMutexCheck {
|
pub struct SameMutexCheck {
|
||||||
addr: AtomicUsize,
|
addr: AtomicPtr<()>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
impl SameMutexCheck {
|
impl SameMutexCheck {
|
||||||
pub const fn new() -> Self {
|
pub const fn new() -> Self {
|
||||||
Self { addr: AtomicUsize::new(0) }
|
Self { addr: AtomicPtr::new(ptr::null_mut()) }
|
||||||
}
|
}
|
||||||
pub fn verify(&self, mutex: &MovableMutex) {
|
pub fn verify(&self, mutex: &MovableMutex) {
|
||||||
let addr = mutex.raw() as *const imp::Mutex as usize;
|
let addr = mutex.raw() as *const imp::Mutex as *const () as *mut _;
|
||||||
match self.addr.compare_exchange(0, addr, Ordering::SeqCst, Ordering::SeqCst) {
|
match self.addr.compare_exchange(ptr::null_mut(), addr, Ordering::SeqCst, Ordering::SeqCst)
|
||||||
|
{
|
||||||
Ok(_) => {} // Stored the address
|
Ok(_) => {} // Stored the address
|
||||||
Err(n) if n == addr => {} // Lost a race to store the same address
|
Err(n) if n == addr => {} // Lost a race to store the same address
|
||||||
_ => panic!("attempted to use a condition variable with two mutexes"),
|
_ => panic!("attempted to use a condition variable with two mutexes"),
|
||||||
|
|
|
@ -1071,7 +1071,7 @@ pub mod os {
|
||||||
pub unsafe fn get(&'static self, init: impl FnOnce() -> T) -> Option<&'static T> {
|
pub unsafe fn get(&'static self, init: impl FnOnce() -> T) -> Option<&'static T> {
|
||||||
// SAFETY: See the documentation for this method.
|
// SAFETY: See the documentation for this method.
|
||||||
let ptr = unsafe { self.os.get() as *mut Value<T> };
|
let ptr = unsafe { self.os.get() as *mut Value<T> };
|
||||||
if ptr as usize > 1 {
|
if ptr.addr() > 1 {
|
||||||
// SAFETY: the check ensured the pointer is safe (its destructor
|
// SAFETY: the check ensured the pointer is safe (its destructor
|
||||||
// is not running) + it is coming from a trusted source (self).
|
// is not running) + it is coming from a trusted source (self).
|
||||||
if let Some(ref value) = unsafe { (*ptr).inner.get() } {
|
if let Some(ref value) = unsafe { (*ptr).inner.get() } {
|
||||||
|
@ -1090,7 +1090,7 @@ pub mod os {
|
||||||
// SAFETY: No mutable references are ever handed out meaning getting
|
// SAFETY: No mutable references are ever handed out meaning getting
|
||||||
// the value is ok.
|
// the value is ok.
|
||||||
let ptr = unsafe { self.os.get() as *mut Value<T> };
|
let ptr = unsafe { self.os.get() as *mut Value<T> };
|
||||||
if ptr as usize == 1 {
|
if ptr.addr() == 1 {
|
||||||
// destructor is running
|
// destructor is running
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
@ -1130,7 +1130,7 @@ pub mod os {
|
||||||
unsafe {
|
unsafe {
|
||||||
let ptr = Box::from_raw(ptr as *mut Value<T>);
|
let ptr = Box::from_raw(ptr as *mut Value<T>);
|
||||||
let key = ptr.key;
|
let key = ptr.key;
|
||||||
key.os.set(1 as *mut u8);
|
key.os.set(ptr::invalid_mut(1));
|
||||||
drop(ptr);
|
drop(ptr);
|
||||||
key.os.set(ptr::null_mut());
|
key.os.set(ptr::null_mut());
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue