Rollup merge of #41064 - Gankro:ptr-redux, r=alexcrichton
refactor NonZero, Shared, and Unique APIs Major difference is that I removed Deref impls, as apparently LLVM has trouble maintaining metadata with a `&ptr -> &ptr` API. This was cited as a blocker for ever stabilizing this API. It wasn't that ergonomic anyway. * Added `get` to NonZero to replace Deref impl * Added `ptr` getter to Shared/Unique to replace Deref impl * Added Unique's `get` and `get_mut` conveniences to Shared * Deprecated `as_mut_ptr` on Shared in favour of `ptr` Note that Shared used to primarily expose only `*const` but there isn't a good justification for that, so I made it `*mut`.
This commit is contained in:
commit
6ace8a76cb
22 changed files with 278 additions and 197 deletions
|
@ -1 +1 @@
|
|||
Subproject commit 616b98444ff4eb5260deee95ee3e090dfd98b947
|
||||
Subproject commit 6fa139b1630a9bb95dcd60cfc90aff9c19e54580
|
|
@ -277,8 +277,7 @@ impl<T> Arc<T> {
|
|||
atomic::fence(Acquire);
|
||||
|
||||
unsafe {
|
||||
let ptr = *this.ptr;
|
||||
let elem = ptr::read(&(*ptr).data);
|
||||
let elem = ptr::read(&this.ptr.as_ref().data);
|
||||
|
||||
// Make a weak pointer to clean up the implicit strong-weak reference
|
||||
let _weak = Weak { ptr: this.ptr };
|
||||
|
@ -306,7 +305,7 @@ impl<T> Arc<T> {
|
|||
/// ```
|
||||
#[stable(feature = "rc_raw", since = "1.17.0")]
|
||||
pub fn into_raw(this: Self) -> *const T {
|
||||
let ptr = unsafe { &(**this.ptr).data as *const _ };
|
||||
let ptr: *const T = &*this;
|
||||
mem::forget(this);
|
||||
ptr
|
||||
}
|
||||
|
@ -345,7 +344,7 @@ impl<T> Arc<T> {
|
|||
// `data` field from the pointer.
|
||||
let ptr = (ptr as *const u8).offset(-offset_of!(ArcInner<T>, data));
|
||||
Arc {
|
||||
ptr: Shared::new(ptr as *const _),
|
||||
ptr: Shared::new(ptr as *mut u8 as *mut _),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -452,17 +451,17 @@ impl<T: ?Sized> Arc<T> {
|
|||
// `ArcInner` structure itself is `Sync` because the inner data is
|
||||
// `Sync` as well, so we're ok loaning out an immutable pointer to these
|
||||
// contents.
|
||||
unsafe { &**self.ptr }
|
||||
unsafe { self.ptr.as_ref() }
|
||||
}
|
||||
|
||||
// Non-inlined part of `drop`.
|
||||
#[inline(never)]
|
||||
unsafe fn drop_slow(&mut self) {
|
||||
let ptr = self.ptr.as_mut_ptr();
|
||||
let ptr = self.ptr.as_ptr();
|
||||
|
||||
// Destroy the data at this time, even though we may not free the box
|
||||
// allocation itself (there may still be weak pointers lying around).
|
||||
ptr::drop_in_place(&mut (*ptr).data);
|
||||
ptr::drop_in_place(&mut self.ptr.as_mut().data);
|
||||
|
||||
if self.inner().weak.fetch_sub(1, Release) == 1 {
|
||||
atomic::fence(Acquire);
|
||||
|
@ -488,9 +487,7 @@ impl<T: ?Sized> Arc<T> {
|
|||
/// assert!(!Arc::ptr_eq(&five, &other_five));
|
||||
/// ```
|
||||
pub fn ptr_eq(this: &Self, other: &Self) -> bool {
|
||||
let this_ptr: *const ArcInner<T> = *this.ptr;
|
||||
let other_ptr: *const ArcInner<T> = *other.ptr;
|
||||
this_ptr == other_ptr
|
||||
this.ptr.as_ptr() == other.ptr.as_ptr()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -621,7 +618,7 @@ impl<T: Clone> Arc<T> {
|
|||
// here (due to zeroing) because data is no longer accessed by
|
||||
// other threads (due to there being no more strong refs at this
|
||||
// point).
|
||||
let mut swap = Arc::new(ptr::read(&(**weak.ptr).data));
|
||||
let mut swap = Arc::new(ptr::read(&weak.ptr.as_ref().data));
|
||||
mem::swap(this, &mut swap);
|
||||
mem::forget(swap);
|
||||
}
|
||||
|
@ -634,8 +631,7 @@ impl<T: Clone> Arc<T> {
|
|||
// As with `get_mut()`, the unsafety is ok because our reference was
|
||||
// either unique to begin with, or became one upon cloning the contents.
|
||||
unsafe {
|
||||
let inner = &mut *this.ptr.as_mut_ptr();
|
||||
&mut inner.data
|
||||
&mut this.ptr.as_mut().data
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -677,8 +673,7 @@ impl<T: ?Sized> Arc<T> {
|
|||
// the Arc itself to be `mut`, so we're returning the only possible
|
||||
// reference to the inner data.
|
||||
unsafe {
|
||||
let inner = &mut *this.ptr.as_mut_ptr();
|
||||
Some(&mut inner.data)
|
||||
Some(&mut this.ptr.as_mut().data)
|
||||
}
|
||||
} else {
|
||||
None
|
||||
|
@ -878,7 +873,7 @@ impl<T: ?Sized> Weak<T> {
|
|||
#[inline]
|
||||
fn inner(&self) -> &ArcInner<T> {
|
||||
// See comments above for why this is "safe"
|
||||
unsafe { &**self.ptr }
|
||||
unsafe { self.ptr.as_ref() }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -962,7 +957,7 @@ impl<T: ?Sized> Drop for Weak<T> {
|
|||
/// assert!(other_weak_foo.upgrade().is_none());
|
||||
/// ```
|
||||
fn drop(&mut self) {
|
||||
let ptr = *self.ptr;
|
||||
let ptr = self.ptr.as_ptr();
|
||||
|
||||
// If we find out that we were the last weak pointer, then its time to
|
||||
// deallocate the data entirely. See the discussion in Arc::drop() about
|
||||
|
@ -1143,7 +1138,7 @@ impl<T: ?Sized + fmt::Debug> fmt::Debug for Arc<T> {
|
|||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
impl<T: ?Sized> fmt::Pointer for Arc<T> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fmt::Pointer::fmt(&*self.ptr, f)
|
||||
fmt::Pointer::fmt(&self.ptr, f)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -156,7 +156,7 @@ fn make_place<T>() -> IntermediateBox<T> {
|
|||
let align = mem::align_of::<T>();
|
||||
|
||||
let p = if size == 0 {
|
||||
heap::EMPTY as *mut u8
|
||||
mem::align_of::<T>() as *mut u8
|
||||
} else {
|
||||
let p = unsafe { heap::allocate(size, align) };
|
||||
if p.is_null() {
|
||||
|
|
|
@ -138,7 +138,9 @@ pub fn usable_size(size: usize, align: usize) -> usize {
|
|||
///
|
||||
/// This preserves the non-null invariant for types like `Box<T>`. The address
|
||||
/// may overlap with non-zero-size memory allocations.
|
||||
pub const EMPTY: *mut () = 0x1 as *mut ();
|
||||
#[rustc_deprecated(since = "1.19", reason = "Use Unique/Shared::empty() instead")]
|
||||
#[unstable(feature = "heap_api", issue = "27700")]
|
||||
pub const EMPTY: *mut () = 1 as *mut ();
|
||||
|
||||
/// The allocator for unique pointers.
|
||||
// This function must not unwind. If it does, MIR trans will fail.
|
||||
|
@ -147,7 +149,7 @@ pub const EMPTY: *mut () = 0x1 as *mut ();
|
|||
#[inline]
|
||||
unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
|
||||
if size == 0 {
|
||||
EMPTY as *mut u8
|
||||
align as *mut u8
|
||||
} else {
|
||||
let ptr = allocate(size, align);
|
||||
if ptr.is_null() {
|
||||
|
|
|
@ -22,13 +22,13 @@ use core::cmp;
|
|||
/// involved. This type is excellent for building your own data structures like Vec and VecDeque.
|
||||
/// In particular:
|
||||
///
|
||||
/// * Produces heap::EMPTY on zero-sized types
|
||||
/// * Produces heap::EMPTY on zero-length allocations
|
||||
/// * Produces Unique::empty() on zero-sized types
|
||||
/// * Produces Unique::empty() on zero-length allocations
|
||||
/// * Catches all overflows in capacity computations (promotes them to "capacity overflow" panics)
|
||||
/// * Guards against 32-bit systems allocating more than isize::MAX bytes
|
||||
/// * Guards against overflowing your length
|
||||
/// * Aborts on OOM
|
||||
/// * Avoids freeing heap::EMPTY
|
||||
/// * Avoids freeing Unique::empty()
|
||||
/// * Contains a ptr::Unique and thus endows the user with all related benefits
|
||||
///
|
||||
/// This type does not in anyway inspect the memory that it manages. When dropped it *will*
|
||||
|
@ -55,17 +55,15 @@ impl<T> RawVec<T> {
|
|||
/// it makes a RawVec with capacity `usize::MAX`. Useful for implementing
|
||||
/// delayed allocation.
|
||||
pub fn new() -> Self {
|
||||
unsafe {
|
||||
// !0 is usize::MAX. This branch should be stripped at compile time.
|
||||
let cap = if mem::size_of::<T>() == 0 { !0 } else { 0 };
|
||||
|
||||
// heap::EMPTY doubles as "unallocated" and "zero-sized allocation"
|
||||
// Unique::empty() doubles as "unallocated" and "zero-sized allocation"
|
||||
RawVec {
|
||||
ptr: Unique::new(heap::EMPTY as *mut T),
|
||||
ptr: Unique::empty(),
|
||||
cap: cap,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a RawVec with exactly the capacity and alignment requirements
|
||||
/// for a `[T; cap]`. This is equivalent to calling RawVec::new when `cap` is 0
|
||||
|
@ -101,7 +99,7 @@ impl<T> RawVec<T> {
|
|||
|
||||
// handles ZSTs and `cap = 0` alike
|
||||
let ptr = if alloc_size == 0 {
|
||||
heap::EMPTY as *mut u8
|
||||
mem::align_of::<T>() as *mut u8
|
||||
} else {
|
||||
let align = mem::align_of::<T>();
|
||||
let ptr = if zeroed {
|
||||
|
@ -148,10 +146,10 @@ impl<T> RawVec<T> {
|
|||
|
||||
impl<T> RawVec<T> {
|
||||
/// Gets a raw pointer to the start of the allocation. Note that this is
|
||||
/// heap::EMPTY if `cap = 0` or T is zero-sized. In the former case, you must
|
||||
/// Unique::empty() if `cap = 0` or T is zero-sized. In the former case, you must
|
||||
/// be careful.
|
||||
pub fn ptr(&self) -> *mut T {
|
||||
*self.ptr
|
||||
self.ptr.as_ptr()
|
||||
}
|
||||
|
||||
/// Gets the capacity of the allocation.
|
||||
|
@ -563,7 +561,7 @@ unsafe impl<#[may_dangle] T> Drop for RawVec<T> {
|
|||
|
||||
let num_bytes = elem_size * self.cap;
|
||||
unsafe {
|
||||
heap::deallocate(*self.ptr as *mut _, num_bytes, align);
|
||||
heap::deallocate(self.ptr() as *mut u8, num_bytes, align);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -230,7 +230,7 @@ use core::cell::Cell;
|
|||
use core::cmp::Ordering;
|
||||
use core::fmt;
|
||||
use core::hash::{Hash, Hasher};
|
||||
use core::intrinsics::{abort, assume};
|
||||
use core::intrinsics::abort;
|
||||
use core::marker;
|
||||
use core::marker::Unsize;
|
||||
use core::mem::{self, align_of_val, forget, size_of, size_of_val, uninitialized};
|
||||
|
@ -358,7 +358,7 @@ impl<T> Rc<T> {
|
|||
/// ```
|
||||
#[stable(feature = "rc_raw", since = "1.17.0")]
|
||||
pub fn into_raw(this: Self) -> *const T {
|
||||
let ptr = unsafe { &mut (*this.ptr.as_mut_ptr()).value as *const _ };
|
||||
let ptr: *const T = &*this;
|
||||
mem::forget(this);
|
||||
ptr
|
||||
}
|
||||
|
@ -395,7 +395,11 @@ impl<T> Rc<T> {
|
|||
pub unsafe fn from_raw(ptr: *const T) -> Self {
|
||||
// To find the corresponding pointer to the `RcBox` we need to subtract the offset of the
|
||||
// `value` field from the pointer.
|
||||
Rc { ptr: Shared::new((ptr as *const u8).offset(-offset_of!(RcBox<T>, value)) as *const _) }
|
||||
|
||||
let ptr = (ptr as *const u8).offset(-offset_of!(RcBox<T>, value));
|
||||
Rc {
|
||||
ptr: Shared::new(ptr as *mut u8 as *mut _)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -451,7 +455,7 @@ impl<T> Rc<[T]> {
|
|||
// Free the original allocation without freeing its (moved) contents.
|
||||
box_free(Box::into_raw(value));
|
||||
|
||||
Rc { ptr: Shared::new(ptr as *const _) }
|
||||
Rc { ptr: Shared::new(ptr as *mut _) }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -553,8 +557,9 @@ impl<T: ?Sized> Rc<T> {
|
|||
#[stable(feature = "rc_unique", since = "1.4.0")]
|
||||
pub fn get_mut(this: &mut Self) -> Option<&mut T> {
|
||||
if Rc::is_unique(this) {
|
||||
let inner = unsafe { &mut *this.ptr.as_mut_ptr() };
|
||||
Some(&mut inner.value)
|
||||
unsafe {
|
||||
Some(&mut this.ptr.as_mut().value)
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
@ -578,9 +583,7 @@ impl<T: ?Sized> Rc<T> {
|
|||
/// assert!(!Rc::ptr_eq(&five, &other_five));
|
||||
/// ```
|
||||
pub fn ptr_eq(this: &Self, other: &Self) -> bool {
|
||||
let this_ptr: *const RcBox<T> = *this.ptr;
|
||||
let other_ptr: *const RcBox<T> = *other.ptr;
|
||||
this_ptr == other_ptr
|
||||
this.ptr.as_ptr() == other.ptr.as_ptr()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -623,7 +626,7 @@ impl<T: Clone> Rc<T> {
|
|||
} else if Rc::weak_count(this) != 0 {
|
||||
// Can just steal the data, all that's left is Weaks
|
||||
unsafe {
|
||||
let mut swap = Rc::new(ptr::read(&(**this.ptr).value));
|
||||
let mut swap = Rc::new(ptr::read(&this.ptr.as_ref().value));
|
||||
mem::swap(this, &mut swap);
|
||||
swap.dec_strong();
|
||||
// Remove implicit strong-weak ref (no need to craft a fake
|
||||
|
@ -637,8 +640,9 @@ impl<T: Clone> Rc<T> {
|
|||
// reference count is guaranteed to be 1 at this point, and we required
|
||||
// the `Rc<T>` itself to be `mut`, so we're returning the only possible
|
||||
// reference to the inner value.
|
||||
let inner = unsafe { &mut *this.ptr.as_mut_ptr() };
|
||||
&mut inner.value
|
||||
unsafe {
|
||||
&mut this.ptr.as_mut().value
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -683,12 +687,12 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc<T> {
|
|||
/// ```
|
||||
fn drop(&mut self) {
|
||||
unsafe {
|
||||
let ptr = self.ptr.as_mut_ptr();
|
||||
let ptr = self.ptr.as_ptr();
|
||||
|
||||
self.dec_strong();
|
||||
if self.strong() == 0 {
|
||||
// destroy the contained object
|
||||
ptr::drop_in_place(&mut (*ptr).value);
|
||||
ptr::drop_in_place(self.ptr.as_mut());
|
||||
|
||||
// remove the implicit "strong weak" pointer now that we've
|
||||
// destroyed the contents.
|
||||
|
@ -925,7 +929,7 @@ impl<T: ?Sized + fmt::Debug> fmt::Debug for Rc<T> {
|
|||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
impl<T: ?Sized> fmt::Pointer for Rc<T> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fmt::Pointer::fmt(&*self.ptr, f)
|
||||
fmt::Pointer::fmt(&self.ptr, f)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1067,7 +1071,7 @@ impl<T: ?Sized> Drop for Weak<T> {
|
|||
/// ```
|
||||
fn drop(&mut self) {
|
||||
unsafe {
|
||||
let ptr = *self.ptr;
|
||||
let ptr = self.ptr.as_ptr();
|
||||
|
||||
self.dec_weak();
|
||||
// the weak count starts at 1, and will only go to zero if all
|
||||
|
@ -1175,12 +1179,7 @@ impl<T: ?Sized> RcBoxPtr<T> for Rc<T> {
|
|||
#[inline(always)]
|
||||
fn inner(&self) -> &RcBox<T> {
|
||||
unsafe {
|
||||
// Safe to assume this here, as if it weren't true, we'd be breaking
|
||||
// the contract anyway.
|
||||
// This allows the null check to be elided in the destructor if we
|
||||
// manipulated the reference count in the same function.
|
||||
assume(!(*(&self.ptr as *const _ as *const *const ())).is_null());
|
||||
&(**self.ptr)
|
||||
self.ptr.as_ref()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1189,12 +1188,7 @@ impl<T: ?Sized> RcBoxPtr<T> for Weak<T> {
|
|||
#[inline(always)]
|
||||
fn inner(&self) -> &RcBox<T> {
|
||||
unsafe {
|
||||
// Safe to assume this here, as if it weren't true, we'd be breaking
|
||||
// the contract anyway.
|
||||
// This allows the null check to be elided in the destructor if we
|
||||
// manipulated the reference count in the same function.
|
||||
assume(!(*(&self.ptr as *const _ as *const *const ())).is_null());
|
||||
&(**self.ptr)
|
||||
self.ptr.as_ref()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -31,7 +31,6 @@
|
|||
#![feature(alloc)]
|
||||
#![feature(core_intrinsics)]
|
||||
#![feature(dropck_eyepatch)]
|
||||
#![feature(heap_api)]
|
||||
#![feature(generic_param_attrs)]
|
||||
#![feature(staged_api)]
|
||||
#![cfg_attr(test, feature(test))]
|
||||
|
@ -48,7 +47,6 @@ use std::mem;
|
|||
use std::ptr;
|
||||
use std::slice;
|
||||
|
||||
use alloc::heap;
|
||||
use alloc::raw_vec::RawVec;
|
||||
|
||||
/// An arena that can hold objects of only one type.
|
||||
|
@ -140,7 +138,7 @@ impl<T> TypedArena<T> {
|
|||
unsafe {
|
||||
if mem::size_of::<T>() == 0 {
|
||||
self.ptr.set(intrinsics::arith_offset(self.ptr.get() as *mut u8, 1) as *mut T);
|
||||
let ptr = heap::EMPTY as *mut T;
|
||||
let ptr = mem::align_of::<T>() as *mut T;
|
||||
// Don't drop the object. This `write` is equivalent to `forget`.
|
||||
ptr::write(ptr, object);
|
||||
&mut *ptr
|
||||
|
|
|
@ -152,12 +152,12 @@ impl<K, V> BoxedNode<K, V> {
|
|||
}
|
||||
|
||||
unsafe fn from_ptr(ptr: NonZero<*const LeafNode<K, V>>) -> Self {
|
||||
BoxedNode { ptr: Unique::new(*ptr as *mut LeafNode<K, V>) }
|
||||
BoxedNode { ptr: Unique::new(ptr.get() as *mut LeafNode<K, V>) }
|
||||
}
|
||||
|
||||
fn as_ptr(&self) -> NonZero<*const LeafNode<K, V>> {
|
||||
unsafe {
|
||||
NonZero::new(*self.ptr as *const LeafNode<K, V>)
|
||||
NonZero::new(self.ptr.as_ptr())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -241,7 +241,7 @@ impl<K, V> Root<K, V> {
|
|||
pub fn pop_level(&mut self) {
|
||||
debug_assert!(self.height > 0);
|
||||
|
||||
let top = *self.node.ptr as *mut u8;
|
||||
let top = self.node.ptr.as_ptr() as *mut u8;
|
||||
|
||||
self.node = unsafe {
|
||||
BoxedNode::from_ptr(self.as_mut()
|
||||
|
@ -308,7 +308,7 @@ unsafe impl<K: Send, V: Send, Type> Send
|
|||
impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Internal> {
|
||||
fn as_internal(&self) -> &InternalNode<K, V> {
|
||||
unsafe {
|
||||
&*(*self.node as *const InternalNode<K, V>)
|
||||
&*(self.node.get() as *const InternalNode<K, V>)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -316,7 +316,7 @@ impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Internal> {
|
|||
impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
|
||||
fn as_internal_mut(&mut self) -> &mut InternalNode<K, V> {
|
||||
unsafe {
|
||||
&mut *(*self.node as *mut InternalNode<K, V>)
|
||||
&mut *(self.node.get() as *mut InternalNode<K, V>)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -358,7 +358,7 @@ impl<BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
|
|||
|
||||
fn as_leaf(&self) -> &LeafNode<K, V> {
|
||||
unsafe {
|
||||
&**self.node
|
||||
&*self.node.get()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -510,7 +510,7 @@ impl<'a, K, V, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
|
|||
|
||||
fn as_leaf_mut(&mut self) -> &mut LeafNode<K, V> {
|
||||
unsafe {
|
||||
&mut *(*self.node as *mut LeafNode<K, V>)
|
||||
&mut *(self.node.get() as *mut LeafNode<K, V>)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1253,13 +1253,13 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::
|
|||
}
|
||||
|
||||
heap::deallocate(
|
||||
*right_node.node as *mut u8,
|
||||
right_node.node.get() as *mut u8,
|
||||
mem::size_of::<InternalNode<K, V>>(),
|
||||
mem::align_of::<InternalNode<K, V>>()
|
||||
);
|
||||
} else {
|
||||
heap::deallocate(
|
||||
*right_node.node as *mut u8,
|
||||
right_node.node.get() as *mut u8,
|
||||
mem::size_of::<LeafNode<K, V>>(),
|
||||
mem::align_of::<LeafNode<K, V>>()
|
||||
);
|
||||
|
|
|
@ -161,7 +161,7 @@ impl<T> LinkedList<T> {
|
|||
|
||||
match self.head {
|
||||
None => self.tail = node,
|
||||
Some(head) => (*head.as_mut_ptr()).prev = node,
|
||||
Some(mut head) => head.as_mut().prev = node,
|
||||
}
|
||||
|
||||
self.head = node;
|
||||
|
@ -173,12 +173,12 @@ impl<T> LinkedList<T> {
|
|||
#[inline]
|
||||
fn pop_front_node(&mut self) -> Option<Box<Node<T>>> {
|
||||
self.head.map(|node| unsafe {
|
||||
let node = Box::from_raw(node.as_mut_ptr());
|
||||
let node = Box::from_raw(node.as_ptr());
|
||||
self.head = node.next;
|
||||
|
||||
match self.head {
|
||||
None => self.tail = None,
|
||||
Some(head) => (*head.as_mut_ptr()).prev = None,
|
||||
Some(mut head) => head.as_mut().prev = None,
|
||||
}
|
||||
|
||||
self.len -= 1;
|
||||
|
@ -196,7 +196,7 @@ impl<T> LinkedList<T> {
|
|||
|
||||
match self.tail {
|
||||
None => self.head = node,
|
||||
Some(tail) => (*tail.as_mut_ptr()).next = node,
|
||||
Some(mut tail) => tail.as_mut().next = node,
|
||||
}
|
||||
|
||||
self.tail = node;
|
||||
|
@ -208,12 +208,12 @@ impl<T> LinkedList<T> {
|
|||
#[inline]
|
||||
fn pop_back_node(&mut self) -> Option<Box<Node<T>>> {
|
||||
self.tail.map(|node| unsafe {
|
||||
let node = Box::from_raw(node.as_mut_ptr());
|
||||
let node = Box::from_raw(node.as_ptr());
|
||||
self.tail = node.prev;
|
||||
|
||||
match self.tail {
|
||||
None => self.head = None,
|
||||
Some(tail) => (*tail.as_mut_ptr()).next = None,
|
||||
Some(mut tail) => tail.as_mut().next = None,
|
||||
}
|
||||
|
||||
self.len -= 1;
|
||||
|
@ -285,11 +285,11 @@ impl<T> LinkedList<T> {
|
|||
pub fn append(&mut self, other: &mut Self) {
|
||||
match self.tail {
|
||||
None => mem::swap(self, other),
|
||||
Some(tail) => {
|
||||
if let Some(other_head) = other.head.take() {
|
||||
Some(mut tail) => {
|
||||
if let Some(mut other_head) = other.head.take() {
|
||||
unsafe {
|
||||
(*tail.as_mut_ptr()).next = Some(other_head);
|
||||
(*other_head.as_mut_ptr()).prev = Some(tail);
|
||||
tail.as_mut().next = Some(other_head);
|
||||
other_head.as_mut().prev = Some(tail);
|
||||
}
|
||||
|
||||
self.tail = other.tail.take();
|
||||
|
@ -477,7 +477,9 @@ impl<T> LinkedList<T> {
|
|||
#[inline]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
pub fn front(&self) -> Option<&T> {
|
||||
self.head.map(|node| unsafe { &(**node).element })
|
||||
unsafe {
|
||||
self.head.as_ref().map(|node| &node.as_ref().element)
|
||||
}
|
||||
}
|
||||
|
||||
/// Provides a mutable reference to the front element, or `None` if the list
|
||||
|
@ -503,7 +505,9 @@ impl<T> LinkedList<T> {
|
|||
#[inline]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
pub fn front_mut(&mut self) -> Option<&mut T> {
|
||||
self.head.map(|node| unsafe { &mut (*node.as_mut_ptr()).element })
|
||||
unsafe {
|
||||
self.head.as_mut().map(|node| &mut node.as_mut().element)
|
||||
}
|
||||
}
|
||||
|
||||
/// Provides a reference to the back element, or `None` if the list is
|
||||
|
@ -523,7 +527,9 @@ impl<T> LinkedList<T> {
|
|||
#[inline]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
pub fn back(&self) -> Option<&T> {
|
||||
self.tail.map(|node| unsafe { &(**node).element })
|
||||
unsafe {
|
||||
self.tail.as_ref().map(|node| &node.as_ref().element)
|
||||
}
|
||||
}
|
||||
|
||||
/// Provides a mutable reference to the back element, or `None` if the list
|
||||
|
@ -549,7 +555,9 @@ impl<T> LinkedList<T> {
|
|||
#[inline]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
pub fn back_mut(&mut self) -> Option<&mut T> {
|
||||
self.tail.map(|node| unsafe { &mut (*node.as_mut_ptr()).element })
|
||||
unsafe {
|
||||
self.tail.as_mut().map(|node| &mut node.as_mut().element)
|
||||
}
|
||||
}
|
||||
|
||||
/// Adds an element first in the list.
|
||||
|
@ -694,9 +702,9 @@ impl<T> LinkedList<T> {
|
|||
let second_part_head;
|
||||
|
||||
unsafe {
|
||||
second_part_head = (*split_node.unwrap().as_mut_ptr()).next.take();
|
||||
if let Some(head) = second_part_head {
|
||||
(*head.as_mut_ptr()).prev = None;
|
||||
second_part_head = split_node.unwrap().as_mut().next.take();
|
||||
if let Some(mut head) = second_part_head {
|
||||
head.as_mut().prev = None;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -788,7 +796,8 @@ impl<'a, T> Iterator for Iter<'a, T> {
|
|||
None
|
||||
} else {
|
||||
self.head.map(|node| unsafe {
|
||||
let node = &**node;
|
||||
// Need an unbound lifetime to get 'a
|
||||
let node = &*node.as_ptr();
|
||||
self.len -= 1;
|
||||
self.head = node.next;
|
||||
&node.element
|
||||
|
@ -810,7 +819,8 @@ impl<'a, T> DoubleEndedIterator for Iter<'a, T> {
|
|||
None
|
||||
} else {
|
||||
self.tail.map(|node| unsafe {
|
||||
let node = &**node;
|
||||
// Need an unbound lifetime to get 'a
|
||||
let node = &*node.as_ptr();
|
||||
self.len -= 1;
|
||||
self.tail = node.prev;
|
||||
&node.element
|
||||
|
@ -835,7 +845,8 @@ impl<'a, T> Iterator for IterMut<'a, T> {
|
|||
None
|
||||
} else {
|
||||
self.head.map(|node| unsafe {
|
||||
let node = &mut *node.as_mut_ptr();
|
||||
// Need an unbound lifetime to get 'a
|
||||
let node = &mut *node.as_ptr();
|
||||
self.len -= 1;
|
||||
self.head = node.next;
|
||||
&mut node.element
|
||||
|
@ -857,7 +868,8 @@ impl<'a, T> DoubleEndedIterator for IterMut<'a, T> {
|
|||
None
|
||||
} else {
|
||||
self.tail.map(|node| unsafe {
|
||||
let node = &mut *node.as_mut_ptr();
|
||||
// Need an unbound lifetime to get 'a
|
||||
let node = &mut *node.as_ptr();
|
||||
self.len -= 1;
|
||||
self.tail = node.prev;
|
||||
&mut node.element
|
||||
|
@ -903,8 +915,8 @@ impl<'a, T> IterMut<'a, T> {
|
|||
pub fn insert_next(&mut self, element: T) {
|
||||
match self.head {
|
||||
None => self.list.push_back(element),
|
||||
Some(head) => unsafe {
|
||||
let prev = match (**head).prev {
|
||||
Some(mut head) => unsafe {
|
||||
let mut prev = match head.as_ref().prev {
|
||||
None => return self.list.push_front(element),
|
||||
Some(prev) => prev,
|
||||
};
|
||||
|
@ -915,8 +927,8 @@ impl<'a, T> IterMut<'a, T> {
|
|||
element: element,
|
||||
})));
|
||||
|
||||
(*prev.as_mut_ptr()).next = node;
|
||||
(*head.as_mut_ptr()).prev = node;
|
||||
prev.as_mut().next = node;
|
||||
head.as_mut().prev = node;
|
||||
|
||||
self.list.len += 1;
|
||||
},
|
||||
|
@ -948,7 +960,9 @@ impl<'a, T> IterMut<'a, T> {
|
|||
if self.len == 0 {
|
||||
None
|
||||
} else {
|
||||
self.head.map(|node| unsafe { &mut (*node.as_mut_ptr()).element })
|
||||
unsafe {
|
||||
self.head.as_mut().map(|node| &mut node.as_mut().element)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1276,21 +1290,21 @@ mod tests {
|
|||
assert_eq!(0, list.len);
|
||||
return;
|
||||
}
|
||||
Some(node) => node_ptr = &**node,
|
||||
Some(node) => node_ptr = &*node.as_ptr(),
|
||||
}
|
||||
loop {
|
||||
match (last_ptr, node_ptr.prev) {
|
||||
(None, None) => {}
|
||||
(None, _) => panic!("prev link for head"),
|
||||
(Some(p), Some(pptr)) => {
|
||||
assert_eq!(p as *const Node<T>, *pptr as *const Node<T>);
|
||||
assert_eq!(p as *const Node<T>, pptr.as_ptr() as *const Node<T>);
|
||||
}
|
||||
_ => panic!("prev link is none, not good"),
|
||||
}
|
||||
match node_ptr.next {
|
||||
Some(next) => {
|
||||
last_ptr = Some(node_ptr);
|
||||
node_ptr = &**next;
|
||||
node_ptr = &*next.as_ptr();
|
||||
len += 1;
|
||||
}
|
||||
None => {
|
||||
|
|
|
@ -67,7 +67,6 @@
|
|||
#![stable(feature = "rust1", since = "1.0.0")]
|
||||
|
||||
use alloc::boxed::Box;
|
||||
use alloc::heap::EMPTY;
|
||||
use alloc::raw_vec::RawVec;
|
||||
use borrow::ToOwned;
|
||||
use borrow::Cow;
|
||||
|
@ -1776,9 +1775,9 @@ impl<T> SpecExtend<T, IntoIter<T>> for Vec<T> {
|
|||
// A common case is passing a vector into a function which immediately
|
||||
// re-collects into a vector. We can short circuit this if the IntoIter
|
||||
// has not been advanced at all.
|
||||
if *iterator.buf == iterator.ptr as *mut T {
|
||||
if iterator.buf.as_ptr() as *const _ == iterator.ptr {
|
||||
unsafe {
|
||||
let vec = Vec::from_raw_parts(*iterator.buf as *mut T,
|
||||
let vec = Vec::from_raw_parts(iterator.buf.as_ptr(),
|
||||
iterator.len(),
|
||||
iterator.cap);
|
||||
mem::forget(iterator);
|
||||
|
@ -2192,7 +2191,8 @@ impl<T> Iterator for IntoIter<T> {
|
|||
self.ptr = arith_offset(self.ptr as *const i8, 1) as *mut T;
|
||||
|
||||
// Use a non-null pointer value
|
||||
Some(ptr::read(EMPTY as *mut T))
|
||||
// (self.ptr might be null because of wrapping)
|
||||
Some(ptr::read(1 as *mut T))
|
||||
} else {
|
||||
let old = self.ptr;
|
||||
self.ptr = self.ptr.offset(1);
|
||||
|
@ -2231,7 +2231,8 @@ impl<T> DoubleEndedIterator for IntoIter<T> {
|
|||
self.end = arith_offset(self.end as *const i8, -1) as *mut T;
|
||||
|
||||
// Use a non-null pointer value
|
||||
Some(ptr::read(EMPTY as *mut T))
|
||||
// (self.end might be null because of wrapping)
|
||||
Some(ptr::read(1 as *mut T))
|
||||
} else {
|
||||
self.end = self.end.offset(-1);
|
||||
|
||||
|
@ -2269,7 +2270,7 @@ unsafe impl<#[may_dangle] T> Drop for IntoIter<T> {
|
|||
for _x in self.by_ref() {}
|
||||
|
||||
// RawVec handles deallocation
|
||||
let _ = unsafe { RawVec::from_raw_parts(self.buf.as_mut_ptr(), self.cap) };
|
||||
let _ = unsafe { RawVec::from_raw_parts(self.buf.as_ptr(), self.cap) };
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2334,7 +2335,7 @@ impl<'a, T> Drop for Drain<'a, T> {
|
|||
|
||||
if self.tail_len > 0 {
|
||||
unsafe {
|
||||
let source_vec = &mut *self.vec.as_mut_ptr();
|
||||
let source_vec = self.vec.as_mut();
|
||||
// memmove back untouched tail, update to new length
|
||||
let start = source_vec.len();
|
||||
let tail = self.tail_start;
|
||||
|
@ -2456,8 +2457,7 @@ impl<'a, I: Iterator> Drop for Splice<'a, I> {
|
|||
|
||||
unsafe {
|
||||
if self.drain.tail_len == 0 {
|
||||
let vec = &mut *self.drain.vec.as_mut_ptr();
|
||||
vec.extend(self.replace_with.by_ref());
|
||||
self.drain.vec.as_mut().extend(self.replace_with.by_ref());
|
||||
return
|
||||
}
|
||||
|
||||
|
@ -2498,7 +2498,7 @@ impl<'a, T> Drain<'a, T> {
|
|||
/// Fill that range as much as possible with new elements from the `replace_with` iterator.
|
||||
/// Return whether we filled the entire range. (`replace_with.next()` didn’t return `None`.)
|
||||
unsafe fn fill<I: Iterator<Item=T>>(&mut self, replace_with: &mut I) -> bool {
|
||||
let vec = &mut *self.vec.as_mut_ptr();
|
||||
let vec = self.vec.as_mut();
|
||||
let range_start = vec.len;
|
||||
let range_end = self.tail_start;
|
||||
let range_slice = slice::from_raw_parts_mut(
|
||||
|
@ -2518,7 +2518,7 @@ impl<'a, T> Drain<'a, T> {
|
|||
|
||||
/// Make room for inserting more elements before the tail.
|
||||
unsafe fn move_tail(&mut self, extra_capacity: usize) {
|
||||
let vec = &mut *self.vec.as_mut_ptr();
|
||||
let vec = self.vec.as_mut();
|
||||
let used_capacity = self.tail_start + self.tail_len;
|
||||
vec.buf.reserve(used_capacity, extra_capacity);
|
||||
|
||||
|
|
|
@ -2160,7 +2160,7 @@ impl<'a, T: 'a> Drop for Drain<'a, T> {
|
|||
fn drop(&mut self) {
|
||||
for _ in self.by_ref() {}
|
||||
|
||||
let source_deque = unsafe { &mut *self.deque.as_mut_ptr() };
|
||||
let source_deque = unsafe { self.deque.as_mut() };
|
||||
|
||||
// T = source_deque_tail; H = source_deque_head; t = drain_tail; h = drain_head
|
||||
//
|
||||
|
|
|
@ -132,7 +132,6 @@
|
|||
//! use std::cell::Cell;
|
||||
//! use std::ptr::Shared;
|
||||
//! use std::intrinsics::abort;
|
||||
//! use std::intrinsics::assume;
|
||||
//!
|
||||
//! struct Rc<T: ?Sized> {
|
||||
//! ptr: Shared<RcBox<T>>
|
||||
|
@ -171,8 +170,7 @@
|
|||
//! impl<T: ?Sized> RcBoxPtr<T> for Rc<T> {
|
||||
//! fn inner(&self) -> &RcBox<T> {
|
||||
//! unsafe {
|
||||
//! assume(!(*(&self.ptr as *const _ as *const *const ())).is_null());
|
||||
//! &(**self.ptr)
|
||||
//! self.ptr.as_ref()
|
||||
//! }
|
||||
//! }
|
||||
//! }
|
||||
|
|
|
@ -13,7 +13,7 @@
|
|||
reason = "needs an RFC to flesh out the design",
|
||||
issue = "27730")]
|
||||
|
||||
use ops::{CoerceUnsized, Deref};
|
||||
use ops::CoerceUnsized;
|
||||
|
||||
/// Unsafe trait to indicate what types are usable with the NonZero struct
|
||||
pub unsafe trait Zeroable {}
|
||||
|
@ -46,15 +46,10 @@ impl<T: Zeroable> NonZero<T> {
|
|||
pub const unsafe fn new(inner: T) -> NonZero<T> {
|
||||
NonZero(inner)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Zeroable> Deref for NonZero<T> {
|
||||
type Target = T;
|
||||
|
||||
#[inline]
|
||||
fn deref(&self) -> &T {
|
||||
let NonZero(ref inner) = *self;
|
||||
inner
|
||||
/// Gets the inner value.
|
||||
pub fn get(self) -> T {
|
||||
self.0
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
#![stable(feature = "rust1", since = "1.0.0")]
|
||||
|
||||
use intrinsics;
|
||||
use ops::{CoerceUnsized, Deref};
|
||||
use ops::CoerceUnsized;
|
||||
use fmt;
|
||||
use hash;
|
||||
use marker::{PhantomData, Unsize};
|
||||
|
@ -957,13 +957,25 @@ impl<T: ?Sized> PartialOrd for *mut T {
|
|||
}
|
||||
|
||||
/// A wrapper around a raw non-null `*mut T` that indicates that the possessor
|
||||
/// of this wrapper owns the referent. This in turn implies that the
|
||||
/// `Unique<T>` is `Send`/`Sync` if `T` is `Send`/`Sync`, unlike a raw
|
||||
/// `*mut T` (which conveys no particular ownership semantics). It
|
||||
/// also implies that the referent of the pointer should not be
|
||||
/// modified without a unique path to the `Unique` reference. Useful
|
||||
/// for building abstractions like `Vec<T>` or `Box<T>`, which
|
||||
/// internally use raw pointers to manage the memory that they own.
|
||||
/// of this wrapper owns the referent. Useful for building abstractions like
|
||||
/// `Box<T>`, `Vec<T>`, `String`, and `HashMap<K, V>`.
|
||||
///
|
||||
/// Unlike `*mut T`, `Unique<T>` behaves "as if" it were an instance of `T`.
|
||||
/// It implements `Send`/`Sync` if `T` is `Send`/`Sync`. It also implies
|
||||
/// the kind of strong aliasing guarantees an instance of `T` can expect:
|
||||
/// the referent of the pointer should not be modified without a unique path to
|
||||
/// its owning Unique.
|
||||
///
|
||||
/// If you're uncertain of whether it's correct to use `Unique` for your purposes,
|
||||
/// consider using `Shared`, which has weaker semantics.
|
||||
///
|
||||
/// Unlike `*mut T`, the pointer must always be non-null, even if the pointer
|
||||
/// is never dereferenced. This is so that enums may use this forbidden value
|
||||
/// as a discriminant -- `Option<Unique<T>>` has the same size as `Unique<T>`.
|
||||
/// However the pointer may still dangle if it isn't dereferenced.
|
||||
///
|
||||
/// Unlike `*mut T`, `Unique<T>` is covariant over `T`. This should always be correct
|
||||
/// for any type which upholds Unique's aliasing requirements.
|
||||
#[allow(missing_debug_implementations)]
|
||||
#[unstable(feature = "unique", reason = "needs an RFC to flesh out design",
|
||||
issue = "27730")]
|
||||
|
@ -991,6 +1003,20 @@ unsafe impl<T: Send + ?Sized> Send for Unique<T> { }
|
|||
#[unstable(feature = "unique", issue = "27730")]
|
||||
unsafe impl<T: Sync + ?Sized> Sync for Unique<T> { }
|
||||
|
||||
#[unstable(feature = "unique", issue = "27730")]
|
||||
impl<T: Sized> Unique<T> {
|
||||
/// Creates a new `Shared` that is dangling, but well-aligned.
|
||||
///
|
||||
/// This is useful for initializing types which lazily allocate, like
|
||||
/// `Vec::new` does.
|
||||
pub fn empty() -> Self {
|
||||
unsafe {
|
||||
let ptr = mem::align_of::<T>() as *mut T;
|
||||
Unique::new(ptr)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[unstable(feature = "unique", issue = "27730")]
|
||||
impl<T: ?Sized> Unique<T> {
|
||||
/// Creates a new `Unique`.
|
||||
|
@ -1002,41 +1028,72 @@ impl<T: ?Sized> Unique<T> {
|
|||
Unique { pointer: NonZero::new(ptr), _marker: PhantomData }
|
||||
}
|
||||
|
||||
/// Acquires the underlying `*mut` pointer.
|
||||
pub fn as_ptr(self) -> *mut T {
|
||||
self.pointer.get() as *mut T
|
||||
}
|
||||
|
||||
/// Dereferences the content.
|
||||
pub unsafe fn get(&self) -> &T {
|
||||
&**self.pointer
|
||||
///
|
||||
/// The resulting lifetime is bound to self so this behaves "as if"
|
||||
/// it were actually an instance of T that is getting borrowed. If a longer
|
||||
/// (unbound) lifetime is needed, use `&*my_ptr.ptr()`.
|
||||
pub unsafe fn as_ref(&self) -> &T {
|
||||
&*self.as_ptr()
|
||||
}
|
||||
|
||||
/// Mutably dereferences the content.
|
||||
pub unsafe fn get_mut(&mut self) -> &mut T {
|
||||
&mut ***self
|
||||
///
|
||||
/// The resulting lifetime is bound to self so this behaves "as if"
|
||||
/// it were actually an instance of T that is getting borrowed. If a longer
|
||||
/// (unbound) lifetime is needed, use `&mut *my_ptr.ptr()`.
|
||||
pub unsafe fn as_mut(&mut self) -> &mut T {
|
||||
&mut *self.as_ptr()
|
||||
}
|
||||
}
|
||||
|
||||
#[unstable(feature = "shared", issue = "27730")]
|
||||
impl<T: ?Sized> Clone for Unique<T> {
|
||||
fn clone(&self) -> Self {
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
#[unstable(feature = "shared", issue = "27730")]
|
||||
impl<T: ?Sized> Copy for Unique<T> { }
|
||||
|
||||
#[unstable(feature = "unique", issue = "27730")]
|
||||
impl<T: ?Sized, U: ?Sized> CoerceUnsized<Unique<U>> for Unique<T> where T: Unsize<U> { }
|
||||
|
||||
#[unstable(feature = "unique", issue= "27730")]
|
||||
impl<T:?Sized> Deref for Unique<T> {
|
||||
type Target = *mut T;
|
||||
|
||||
#[inline]
|
||||
fn deref(&self) -> &*mut T {
|
||||
unsafe { mem::transmute(&*self.pointer) }
|
||||
}
|
||||
}
|
||||
|
||||
#[unstable(feature = "unique", issue = "27730")]
|
||||
impl<T> fmt::Pointer for Unique<T> {
|
||||
impl<T: ?Sized> fmt::Pointer for Unique<T> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fmt::Pointer::fmt(&*self.pointer, f)
|
||||
fmt::Pointer::fmt(&self.as_ptr(), f)
|
||||
}
|
||||
}
|
||||
|
||||
/// A wrapper around a raw non-null `*mut T` that indicates that the possessor
|
||||
/// A wrapper around a raw `*mut T` that indicates that the possessor
|
||||
/// of this wrapper has shared ownership of the referent. Useful for
|
||||
/// building abstractions like `Rc<T>` or `Arc<T>`, which internally
|
||||
/// use raw pointers to manage the memory that they own.
|
||||
/// building abstractions like `Rc<T>`, `Arc<T>`, or doubly-linked lists, which
|
||||
/// internally use aliased raw pointers to manage the memory that they own.
|
||||
///
|
||||
/// This is similar to `Unique`, except that it doesn't make any aliasing
|
||||
/// guarantees, and doesn't derive Send and Sync. Note that unlike `&T`,
|
||||
/// Shared has no special mutability requirements. Shared may mutate data
|
||||
/// aliased by other Shared pointers. More precise rules require Rust to
|
||||
/// develop an actual aliasing model.
|
||||
///
|
||||
/// Unlike `*mut T`, the pointer must always be non-null, even if the pointer
|
||||
/// is never dereferenced. This is so that enums may use this forbidden value
|
||||
/// as a discriminant -- `Option<Shared<T>>` has the same size as `Shared<T>`.
|
||||
/// However the pointer may still dangle if it isn't dereferenced.
|
||||
///
|
||||
/// Unlike `*mut T`, `Shared<T>` is covariant over `T`. If this is incorrect
|
||||
/// for your use case, you should include some PhantomData in your type to
|
||||
/// provide invariance, such as `PhantomData<Cell<T>>` or `PhantomData<&'a mut T>`.
|
||||
/// Usually this won't be necessary; covariance is correct for Rc, Arc, and LinkedList
|
||||
/// because they provide a public API that follows the normal shared XOR mutable
|
||||
/// rules of Rust.
|
||||
#[allow(missing_debug_implementations)]
|
||||
#[unstable(feature = "shared", reason = "needs an RFC to flesh out design",
|
||||
issue = "27730")]
|
||||
|
@ -1060,6 +1117,20 @@ impl<T: ?Sized> !Send for Shared<T> { }
|
|||
#[unstable(feature = "shared", issue = "27730")]
|
||||
impl<T: ?Sized> !Sync for Shared<T> { }
|
||||
|
||||
#[unstable(feature = "shared", issue = "27730")]
|
||||
impl<T: Sized> Shared<T> {
|
||||
/// Creates a new `Shared` that is dangling, but well-aligned.
|
||||
///
|
||||
/// This is useful for initializing types which lazily allocate, like
|
||||
/// `Vec::new` does.
|
||||
pub fn empty() -> Self {
|
||||
unsafe {
|
||||
let ptr = mem::align_of::<T>() as *mut T;
|
||||
Shared::new(ptr)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[unstable(feature = "shared", issue = "27730")]
|
||||
impl<T: ?Sized> Shared<T> {
|
||||
/// Creates a new `Shared`.
|
||||
|
@ -1067,16 +1138,38 @@ impl<T: ?Sized> Shared<T> {
|
|||
/// # Safety
|
||||
///
|
||||
/// `ptr` must be non-null.
|
||||
pub unsafe fn new(ptr: *const T) -> Self {
|
||||
pub unsafe fn new(ptr: *mut T) -> Self {
|
||||
Shared { pointer: NonZero::new(ptr), _marker: PhantomData }
|
||||
}
|
||||
}
|
||||
|
||||
#[unstable(feature = "shared", issue = "27730")]
|
||||
impl<T: ?Sized> Shared<T> {
|
||||
/// Acquires the underlying `*mut` pointer.
|
||||
pub fn as_ptr(self) -> *mut T {
|
||||
self.pointer.get() as *mut T
|
||||
}
|
||||
|
||||
/// Dereferences the content.
|
||||
///
|
||||
/// The resulting lifetime is bound to self so this behaves "as if"
|
||||
/// it were actually an instance of T that is getting borrowed. If a longer
|
||||
/// (unbound) lifetime is needed, use `&*my_ptr.ptr()`.
|
||||
pub unsafe fn as_ref(&self) -> &T {
|
||||
&*self.as_ptr()
|
||||
}
|
||||
|
||||
/// Mutably dereferences the content.
|
||||
///
|
||||
/// The resulting lifetime is bound to self so this behaves "as if"
|
||||
/// it were actually an instance of T that is getting borrowed. If a longer
|
||||
/// (unbound) lifetime is needed, use `&mut *my_ptr.ptr_mut()`.
|
||||
pub unsafe fn as_mut(&mut self) -> &mut T {
|
||||
&mut *self.as_ptr()
|
||||
}
|
||||
|
||||
/// Acquires the underlying pointer as a `*mut` pointer.
|
||||
#[rustc_deprecated(since = "1.19", reason = "renamed to `as_ptr` for ergonomics/consistency")]
|
||||
#[unstable(feature = "shared", issue = "27730")]
|
||||
pub unsafe fn as_mut_ptr(&self) -> *mut T {
|
||||
**self as _
|
||||
self.as_ptr()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1094,18 +1187,8 @@ impl<T: ?Sized> Copy for Shared<T> { }
|
|||
impl<T: ?Sized, U: ?Sized> CoerceUnsized<Shared<U>> for Shared<T> where T: Unsize<U> { }
|
||||
|
||||
#[unstable(feature = "shared", issue = "27730")]
|
||||
impl<T: ?Sized> Deref for Shared<T> {
|
||||
type Target = *const T;
|
||||
|
||||
#[inline]
|
||||
fn deref(&self) -> &*const T {
|
||||
unsafe { mem::transmute(&*self.pointer) }
|
||||
}
|
||||
}
|
||||
|
||||
#[unstable(feature = "shared", issue = "27730")]
|
||||
impl<T> fmt::Pointer for Shared<T> {
|
||||
impl<T: ?Sized> fmt::Pointer for Shared<T> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fmt::Pointer::fmt(&*self.pointer, f)
|
||||
fmt::Pointer::fmt(&self.as_ptr(), f)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -31,12 +31,12 @@ fn test_match_on_nonzero_option() {
|
|||
NonZero::new(42)
|
||||
});
|
||||
match a {
|
||||
Some(val) => assert_eq!(*val, 42),
|
||||
Some(val) => assert_eq!(val.get(), 42),
|
||||
None => panic!("unexpected None while matching on Some(NonZero(_))")
|
||||
}
|
||||
|
||||
match unsafe { Some(NonZero::new(43)) } {
|
||||
Some(val) => assert_eq!(*val, 43),
|
||||
Some(val) => assert_eq!(val.get(), 43),
|
||||
None => panic!("unexpected None while matching on Some(NonZero(_))")
|
||||
}
|
||||
}
|
||||
|
|
|
@ -166,10 +166,10 @@ fn test_set_memory() {
|
|||
|
||||
#[test]
|
||||
fn test_unsized_unique() {
|
||||
let xs: &mut [i32] = &mut [1, 2, 3];
|
||||
let ptr = unsafe { Unique::new(xs as *mut [i32]) };
|
||||
let ys = unsafe { &mut **ptr };
|
||||
let zs: &mut [i32] = &mut [1, 2, 3];
|
||||
let xs: &[i32] = &[1, 2, 3];
|
||||
let ptr = unsafe { Unique::new(xs as *const [i32] as *mut [i32]) };
|
||||
let ys = unsafe { ptr.as_ref() };
|
||||
let zs: &[i32] = &[1, 2, 3];
|
||||
assert!(ys == zs);
|
||||
}
|
||||
|
||||
|
|
|
@ -62,14 +62,14 @@ pub struct Bytes {
|
|||
impl Deref for Bytes {
|
||||
type Target = [u8];
|
||||
fn deref(&self) -> &[u8] {
|
||||
unsafe { slice::from_raw_parts(*self.ptr, self.len) }
|
||||
unsafe { slice::from_raw_parts(self.ptr.as_ptr(), self.len) }
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for Bytes {
|
||||
fn drop(&mut self) {
|
||||
unsafe {
|
||||
libc::free(*self.ptr as *mut _);
|
||||
libc::free(self.ptr.as_ptr() as *mut _);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -72,7 +72,7 @@ impl<'tcx> From<ty::Region<'tcx>> for Kind<'tcx> {
|
|||
impl<'tcx> Kind<'tcx> {
|
||||
#[inline]
|
||||
unsafe fn downcast<T>(self, tag: usize) -> Option<&'tcx T> {
|
||||
let ptr = *self.ptr;
|
||||
let ptr = self.ptr.get();
|
||||
if ptr & TAG_MASK == tag {
|
||||
Some(&*((ptr & !TAG_MASK) as *const _))
|
||||
} else {
|
||||
|
@ -102,7 +102,7 @@ impl<'tcx> fmt::Debug for Kind<'tcx> {
|
|||
} else if let Some(r) = self.as_region() {
|
||||
write!(f, "{:?}", r)
|
||||
} else {
|
||||
write!(f, "<unknwon @ {:p}>", *self.ptr as *const ())
|
||||
write!(f, "<unknwon @ {:p}>", self.ptr.get() as *const ())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -43,7 +43,7 @@ mod indexes {
|
|||
unsafe { $Index(NonZero::new(idx + 1)) }
|
||||
}
|
||||
fn index(self) -> usize {
|
||||
*self.0 - 1
|
||||
self.0.get() - 1
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -255,7 +255,7 @@ impl<'a, A: Array> Drop for Drain<'a, A> {
|
|||
|
||||
if self.tail_len > 0 {
|
||||
unsafe {
|
||||
let source_array_vec = &mut *self.array_vec.as_mut_ptr();
|
||||
let source_array_vec = self.array_vec.as_mut();
|
||||
// memmove back untouched tail, update to new length
|
||||
let start = source_array_vec.len();
|
||||
let tail = self.tail_start;
|
||||
|
|
|
@ -23,6 +23,6 @@ impl NodeIndex {
|
|||
}
|
||||
|
||||
pub fn get(self) -> usize {
|
||||
(*self.index - 1) as usize
|
||||
(self.index.get() - 1) as usize
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use alloc::heap::{EMPTY, allocate, deallocate};
|
||||
use alloc::heap::{allocate, deallocate};
|
||||
|
||||
use cmp;
|
||||
use hash::{BuildHasher, Hash, Hasher};
|
||||
|
@ -33,6 +33,7 @@ use self::BucketState::*;
|
|||
type HashUint = usize;
|
||||
|
||||
const EMPTY_BUCKET: HashUint = 0;
|
||||
const EMPTY: usize = 1;
|
||||
|
||||
/// Special `Unique<HashUint>` that uses the lower bit of the pointer
|
||||
/// to expose a boolean tag.
|
||||
|
@ -49,24 +50,25 @@ impl TaggedHashUintPtr {
|
|||
|
||||
#[inline]
|
||||
fn set_tag(&mut self, value: bool) {
|
||||
let usize_ptr = &*self.0 as *const *mut HashUint as *mut usize;
|
||||
let mut usize_ptr = self.0.as_ptr() as usize;
|
||||
unsafe {
|
||||
if value {
|
||||
*usize_ptr |= 1;
|
||||
usize_ptr |= 1;
|
||||
} else {
|
||||
*usize_ptr &= !1;
|
||||
usize_ptr &= !1;
|
||||
}
|
||||
self.0 = Unique::new(usize_ptr as *mut HashUint)
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn tag(&self) -> bool {
|
||||
(*self.0 as usize) & 1 == 1
|
||||
(self.0.as_ptr() as usize) & 1 == 1
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn ptr(&self) -> *mut HashUint {
|
||||
(*self.0 as usize & !1) as *mut HashUint
|
||||
(self.0.as_ptr() as usize & !1) as *mut HashUint
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1112,10 +1114,12 @@ impl<'a, K, V> Iterator for Drain<'a, K, V> {
|
|||
|
||||
#[inline]
|
||||
fn next(&mut self) -> Option<(SafeHash, K, V)> {
|
||||
self.iter.next().map(|raw| unsafe {
|
||||
(*self.table.as_mut_ptr()).size -= 1;
|
||||
self.iter.next().map(|raw| {
|
||||
unsafe {
|
||||
self.table.as_mut().size -= 1;
|
||||
let (k, v) = ptr::read(raw.pair());
|
||||
(SafeHash { hash: ptr::replace(&mut *raw.hash(), EMPTY_BUCKET) }, k, v)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue