libcore: Make it unsafe to create NonZero and impl Deref.
This commit is contained in:
parent
4af50548b9
commit
466135bfef
4 changed files with 55 additions and 59 deletions
|
@ -164,7 +164,7 @@ impl<T> Arc<T> {
|
|||
weak: atomic::AtomicUint::new(1),
|
||||
data: data,
|
||||
};
|
||||
Arc { _ptr: NonZero(unsafe { mem::transmute(x) }) }
|
||||
Arc { _ptr: unsafe { NonZero::new(mem::transmute(x)) } }
|
||||
}
|
||||
|
||||
/// Downgrades the `Arc<T>` to a `Weak<T>` reference.
|
||||
|
@ -193,8 +193,7 @@ impl<T> Arc<T> {
|
|||
// pointer is valid. Furthermore, we know that the `ArcInner` structure itself is `Sync`
|
||||
// because the inner data is `Sync` as well, so we're ok loaning out an immutable pointer
|
||||
// to these contents.
|
||||
let NonZero(ptr) = self._ptr;
|
||||
unsafe { &*ptr }
|
||||
unsafe { &**self._ptr }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -281,8 +280,7 @@ impl<T: Send + Sync + Clone> Arc<T> {
|
|||
// pointer that will ever be returned to T. Our reference count is guaranteed to be 1 at
|
||||
// this point, and we required the Arc itself to be `mut`, so we're returning the only
|
||||
// possible reference to the inner data.
|
||||
let NonZero(ptr) = self._ptr;
|
||||
let inner = unsafe { &mut *ptr };
|
||||
let inner = unsafe { &mut **self._ptr };
|
||||
&mut inner.data
|
||||
}
|
||||
}
|
||||
|
@ -317,7 +315,7 @@ impl<T: Sync + Send> Drop for Arc<T> {
|
|||
fn drop(&mut self) {
|
||||
// This structure has #[unsafe_no_drop_flag], so this drop glue may run more than once (but
|
||||
// it is guaranteed to be zeroed after the first if it's run more than once)
|
||||
let NonZero(ptr) = self._ptr;
|
||||
let ptr = *self._ptr;
|
||||
if ptr.is_null() { return }
|
||||
|
||||
// Because `fetch_sub` is already atomic, we do not need to synchronize with other threads
|
||||
|
@ -388,8 +386,7 @@ impl<T: Sync + Send> Weak<T> {
|
|||
#[inline]
|
||||
fn inner(&self) -> &ArcInner<T> {
|
||||
// See comments above for why this is "safe"
|
||||
let NonZero(ptr) = self._ptr;
|
||||
unsafe { &*ptr }
|
||||
unsafe { &**self._ptr }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -445,7 +442,7 @@ impl<T: Sync + Send> Drop for Weak<T> {
|
|||
/// } // implicit drop
|
||||
/// ```
|
||||
fn drop(&mut self) {
|
||||
let NonZero(ptr) = self._ptr;
|
||||
let ptr = *self._ptr;
|
||||
|
||||
// see comments above for why this check is here
|
||||
if ptr.is_null() { return }
|
||||
|
|
|
@ -195,7 +195,7 @@ impl<T> Rc<T> {
|
|||
// there is an implicit weak pointer owned by all the strong pointers, which
|
||||
// ensures that the weak destructor never frees the allocation while the strong
|
||||
// destructor is running, even if the weak pointer is stored inside the strong one.
|
||||
_ptr: NonZero(transmute(box RcBox {
|
||||
_ptr: NonZero::new(transmute(box RcBox {
|
||||
value: value,
|
||||
strong: Cell::new(1),
|
||||
weak: Cell::new(1)
|
||||
|
@ -280,8 +280,7 @@ pub fn try_unwrap<T>(rc: Rc<T>) -> Result<T, Rc<T>> {
|
|||
let val = ptr::read(&*rc); // copy the contained object
|
||||
// destruct the box and skip our Drop
|
||||
// we can ignore the refcounts because we know we're unique
|
||||
let NonZero(ptr) = rc._ptr;
|
||||
deallocate(ptr as *mut u8, size_of::<RcBox<T>>(),
|
||||
deallocate(*rc._ptr as *mut u8, size_of::<RcBox<T>>(),
|
||||
min_align_of::<RcBox<T>>());
|
||||
forget(rc);
|
||||
Ok(val)
|
||||
|
@ -311,10 +310,7 @@ pub fn try_unwrap<T>(rc: Rc<T>) -> Result<T, Rc<T>> {
|
|||
#[experimental]
|
||||
pub fn get_mut<'a, T>(rc: &'a mut Rc<T>) -> Option<&'a mut T> {
|
||||
if is_unique(rc) {
|
||||
let inner = unsafe {
|
||||
let NonZero(ptr) = rc._ptr;
|
||||
&mut *ptr
|
||||
};
|
||||
let inner = unsafe { &mut **rc._ptr };
|
||||
Some(&mut inner.value)
|
||||
} else {
|
||||
None
|
||||
|
@ -346,10 +342,7 @@ impl<T: Clone> Rc<T> {
|
|||
// pointer that will ever be returned to T. Our reference count is guaranteed to be 1 at
|
||||
// this point, and we required the `Rc<T>` itself to be `mut`, so we're returning the only
|
||||
// possible reference to the inner value.
|
||||
let inner = unsafe {
|
||||
let NonZero(ptr) = self._ptr;
|
||||
&mut *ptr
|
||||
};
|
||||
let inner = unsafe { &mut **self._ptr };
|
||||
&mut inner.value
|
||||
}
|
||||
}
|
||||
|
@ -397,7 +390,7 @@ impl<T> Drop for Rc<T> {
|
|||
/// ```
|
||||
fn drop(&mut self) {
|
||||
unsafe {
|
||||
let NonZero(ptr) = self._ptr;
|
||||
let ptr = *self._ptr;
|
||||
if !ptr.is_null() {
|
||||
self.dec_strong();
|
||||
if self.strong() == 0 {
|
||||
|
@ -689,7 +682,7 @@ impl<T> Drop for Weak<T> {
|
|||
/// ```
|
||||
fn drop(&mut self) {
|
||||
unsafe {
|
||||
let NonZero(ptr) = self._ptr;
|
||||
let ptr = *self._ptr;
|
||||
if !ptr.is_null() {
|
||||
self.dec_weak();
|
||||
// the weak count starts at 1, and will only go to zero if all the strong pointers
|
||||
|
@ -750,18 +743,12 @@ trait RcBoxPtr<T> {
|
|||
|
||||
impl<T> RcBoxPtr<T> for Rc<T> {
|
||||
#[inline(always)]
|
||||
fn inner(&self) -> &RcBox<T> {
|
||||
let NonZero(ptr) = self._ptr;
|
||||
unsafe { &(*ptr) }
|
||||
}
|
||||
fn inner(&self) -> &RcBox<T> { unsafe { &(**self._ptr) } }
|
||||
}
|
||||
|
||||
impl<T> RcBoxPtr<T> for Weak<T> {
|
||||
#[inline(always)]
|
||||
fn inner(&self) -> &RcBox<T> {
|
||||
let NonZero(ptr) = self._ptr;
|
||||
unsafe { &(*ptr) }
|
||||
}
|
||||
fn inner(&self) -> &RcBox<T> { unsafe { &(**self._ptr) } }
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
|
|
@ -176,7 +176,7 @@ impl<T> Vec<T> {
|
|||
// non-null value which is fine since we never call deallocate on the ptr
|
||||
// if cap is 0. The reason for this is because the pointer of a slice
|
||||
// being NULL would break the null pointer optimization for enums.
|
||||
Vec { ptr: NonZero(EMPTY as *mut T), len: 0, cap: 0 }
|
||||
Vec { ptr: unsafe { NonZero::new(EMPTY as *mut T) }, len: 0, cap: 0 }
|
||||
}
|
||||
|
||||
/// Constructs a new, empty `Vec<T>` with the specified capacity.
|
||||
|
@ -209,7 +209,7 @@ impl<T> Vec<T> {
|
|||
#[stable]
|
||||
pub fn with_capacity(capacity: uint) -> Vec<T> {
|
||||
if mem::size_of::<T>() == 0 {
|
||||
Vec { ptr: NonZero(EMPTY as *mut T), len: 0, cap: uint::MAX }
|
||||
Vec { ptr: unsafe { NonZero::new(EMPTY as *mut T) }, len: 0, cap: uint::MAX }
|
||||
} else if capacity == 0 {
|
||||
Vec::new()
|
||||
} else {
|
||||
|
@ -217,7 +217,7 @@ impl<T> Vec<T> {
|
|||
.expect("capacity overflow");
|
||||
let ptr = unsafe { allocate(size, mem::min_align_of::<T>()) };
|
||||
if ptr.is_null() { ::alloc::oom() }
|
||||
Vec { ptr: NonZero(ptr as *mut T), len: 0, cap: capacity }
|
||||
Vec { ptr: unsafe { NonZero::new(ptr as *mut T) }, len: 0, cap: capacity }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -284,7 +284,7 @@ impl<T> Vec<T> {
|
|||
#[unstable = "needs finalization"]
|
||||
pub unsafe fn from_raw_parts(ptr: *mut T, length: uint,
|
||||
capacity: uint) -> Vec<T> {
|
||||
Vec { ptr: NonZero(ptr), len: length, cap: capacity }
|
||||
Vec { ptr: NonZero::new(ptr), len: length, cap: capacity }
|
||||
}
|
||||
|
||||
/// Creates a vector by copying the elements from a raw pointer.
|
||||
|
@ -792,11 +792,10 @@ impl<T> Vec<T> {
|
|||
pub fn shrink_to_fit(&mut self) {
|
||||
if mem::size_of::<T>() == 0 { return }
|
||||
|
||||
let NonZero(ptr) = self.ptr;
|
||||
if self.len == 0 {
|
||||
if self.cap != 0 {
|
||||
unsafe {
|
||||
dealloc(ptr, self.cap)
|
||||
dealloc(*self.ptr, self.cap)
|
||||
}
|
||||
self.cap = 0;
|
||||
}
|
||||
|
@ -804,12 +803,12 @@ impl<T> Vec<T> {
|
|||
unsafe {
|
||||
// Overflow check is unnecessary as the vector is already at
|
||||
// least this large.
|
||||
let ptr = reallocate(ptr as *mut u8,
|
||||
let ptr = reallocate(*self.ptr as *mut u8,
|
||||
self.cap * mem::size_of::<T>(),
|
||||
self.len * mem::size_of::<T>(),
|
||||
mem::min_align_of::<T>()) as *mut T;
|
||||
if ptr.is_null() { ::alloc::oom() }
|
||||
self.ptr = NonZero(ptr);
|
||||
self.ptr = NonZero::new(ptr);
|
||||
}
|
||||
self.cap = self.len;
|
||||
}
|
||||
|
@ -867,10 +866,9 @@ impl<T> Vec<T> {
|
|||
#[inline]
|
||||
#[stable]
|
||||
pub fn as_mut_slice<'a>(&'a mut self) -> &'a mut [T] {
|
||||
let NonZero(ptr) = self.ptr;
|
||||
unsafe {
|
||||
mem::transmute(RawSlice {
|
||||
data: ptr as *const T,
|
||||
data: *self.ptr as *const T,
|
||||
len: self.len,
|
||||
})
|
||||
}
|
||||
|
@ -893,7 +891,7 @@ impl<T> Vec<T> {
|
|||
#[unstable = "matches collection reform specification, waiting for dust to settle"]
|
||||
pub fn into_iter(self) -> IntoIter<T> {
|
||||
unsafe {
|
||||
let NonZero(ptr) = self.ptr;
|
||||
let ptr = *self.ptr;
|
||||
let cap = self.cap;
|
||||
let begin = ptr as *const T;
|
||||
let end = if mem::size_of::<T>() == 0 {
|
||||
|
@ -1113,16 +1111,15 @@ impl<T> Vec<T> {
|
|||
let size = max(old_size, 2 * mem::size_of::<T>()) * 2;
|
||||
if old_size > size { panic!("capacity overflow") }
|
||||
unsafe {
|
||||
let NonZero(ptr) = self.ptr;
|
||||
let ptr = alloc_or_realloc(ptr, old_size, size);
|
||||
let ptr = alloc_or_realloc(*self.ptr, old_size, size);
|
||||
if ptr.is_null() { ::alloc::oom() }
|
||||
self.ptr = NonZero(ptr);
|
||||
self.ptr = NonZero::new(ptr);
|
||||
}
|
||||
self.cap = max(self.cap, 2) * 2;
|
||||
}
|
||||
|
||||
unsafe {
|
||||
let NonZero(end) = self.ptr.offset(self.len as int);
|
||||
let end = *self.ptr.offset(self.len as int);
|
||||
ptr::write(&mut *end, value);
|
||||
self.len += 1;
|
||||
}
|
||||
|
@ -1167,11 +1164,11 @@ impl<T> Vec<T> {
|
|||
#[unstable = "matches collection reform specification, waiting for dust to settle"]
|
||||
pub fn drain<'a>(&'a mut self) -> Drain<'a, T> {
|
||||
unsafe {
|
||||
let begin = self.ptr.0 as *const T;
|
||||
let begin = *self.ptr as *const T;
|
||||
let end = if mem::size_of::<T>() == 0 {
|
||||
(self.ptr.0 as uint + self.len()) as *const T
|
||||
(*self.ptr as uint + self.len()) as *const T
|
||||
} else {
|
||||
self.ptr.0.offset(self.len() as int) as *const T
|
||||
(*self.ptr).offset(self.len() as int) as *const T
|
||||
};
|
||||
self.set_len(0);
|
||||
Drain {
|
||||
|
@ -1236,10 +1233,9 @@ impl<T> Vec<T> {
|
|||
let size = capacity.checked_mul(mem::size_of::<T>())
|
||||
.expect("capacity overflow");
|
||||
unsafe {
|
||||
let NonZero(ptr) = self.ptr;
|
||||
let ptr = alloc_or_realloc(ptr, self.cap * mem::size_of::<T>(), size);
|
||||
let ptr = alloc_or_realloc(*self.ptr, self.cap * mem::size_of::<T>(), size);
|
||||
if ptr.is_null() { ::alloc::oom() }
|
||||
self.ptr = NonZero(ptr);
|
||||
self.ptr = NonZero::new(ptr);
|
||||
}
|
||||
self.cap = capacity;
|
||||
}
|
||||
|
@ -1360,10 +1356,9 @@ impl<T> AsSlice<T> for Vec<T> {
|
|||
#[inline]
|
||||
#[stable]
|
||||
fn as_slice<'a>(&'a self) -> &'a [T] {
|
||||
let NonZero(ptr) = self.ptr;
|
||||
unsafe {
|
||||
mem::transmute(RawSlice {
|
||||
data: ptr as *const T,
|
||||
data: *self.ptr as *const T,
|
||||
len: self.len
|
||||
})
|
||||
}
|
||||
|
@ -1388,8 +1383,7 @@ impl<T> Drop for Vec<T> {
|
|||
for x in self.iter() {
|
||||
ptr::read(x);
|
||||
}
|
||||
let NonZero(ptr) = self.ptr;
|
||||
dealloc(ptr, self.cap)
|
||||
dealloc(*self.ptr, self.cap)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1427,7 +1421,7 @@ impl<T> IntoIter<T> {
|
|||
for _x in self { }
|
||||
let IntoIter { allocation, cap, ptr: _ptr, end: _end } = self;
|
||||
mem::forget(self);
|
||||
Vec { ptr: NonZero(allocation), cap: cap, len: 0 }
|
||||
Vec { ptr: NonZero::new(allocation), cap: cap, len: 0 }
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -90,10 +90,10 @@
|
|||
use mem;
|
||||
use clone::Clone;
|
||||
use intrinsics;
|
||||
use kinds::Copy;
|
||||
use kinds::{Copy, Send, Sync};
|
||||
use ops::Deref;
|
||||
use option::Option;
|
||||
use option::Option::{Some, None};
|
||||
use kinds::{Send, Sync};
|
||||
|
||||
use cmp::{PartialEq, Eq, Ord, PartialOrd, Equiv};
|
||||
use cmp::Ordering;
|
||||
|
@ -115,7 +115,25 @@ pub use intrinsics::set_memory;
|
|||
/// NULL or 0 that might allow certain optimizations.
|
||||
#[lang="non_zero"]
|
||||
#[deriving(Clone, PartialEq, Eq, PartialOrd)]
|
||||
pub struct NonZero<T>(pub T);
|
||||
#[experimental]
|
||||
pub struct NonZero<T>(T);
|
||||
|
||||
impl<T> NonZero<T> {
|
||||
/// Create an instance of NonZero with the provided value.
|
||||
/// You must indeed ensure that the value is actually "non-zero".
|
||||
#[inline(always)]
|
||||
pub unsafe fn new(inner: T) -> NonZero<T> {
|
||||
NonZero(inner)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Deref<T> for NonZero<T> {
|
||||
#[inline]
|
||||
fn deref<'a>(&'a self) -> &'a T {
|
||||
let NonZero(ref inner) = *self;
|
||||
inner
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Copy> Copy for NonZero<T> {}
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue