Mark Arc::from_inner
/ Rc::from_inner
as unsafe
While it's an internal function, it is easy to create invalid Arc/Rcs to a dangling pointer with it. Fixes https://github.com/rust-lang/rust/issues/89740
This commit is contained in:
parent
0c87288f92
commit
2e2c38e59b
2 changed files with 45 additions and 32 deletions
|
@ -333,12 +333,12 @@ impl<T: ?Sized> Rc<T> {
|
||||||
unsafe { self.ptr.as_ref() }
|
unsafe { self.ptr.as_ref() }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn from_inner(ptr: NonNull<RcBox<T>>) -> Self {
|
unsafe fn from_inner(ptr: NonNull<RcBox<T>>) -> Self {
|
||||||
Self { ptr, phantom: PhantomData }
|
Self { ptr, phantom: PhantomData }
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe fn from_ptr(ptr: *mut RcBox<T>) -> Self {
|
unsafe fn from_ptr(ptr: *mut RcBox<T>) -> Self {
|
||||||
Self::from_inner(unsafe { NonNull::new_unchecked(ptr) })
|
unsafe { Self::from_inner(NonNull::new_unchecked(ptr)) }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -359,9 +359,11 @@ impl<T> Rc<T> {
|
||||||
// pointers, which ensures that the weak destructor never frees
|
// pointers, which ensures that the weak destructor never frees
|
||||||
// the allocation while the strong destructor is running, even
|
// the allocation while the strong destructor is running, even
|
||||||
// if the weak pointer is stored inside the strong one.
|
// if the weak pointer is stored inside the strong one.
|
||||||
Self::from_inner(
|
unsafe {
|
||||||
Box::leak(box RcBox { strong: Cell::new(1), weak: Cell::new(1), value }).into(),
|
Self::from_inner(
|
||||||
)
|
Box::leak(box RcBox { strong: Cell::new(1), weak: Cell::new(1), value }).into(),
|
||||||
|
)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Constructs a new `Rc<T>` using a weak reference to itself. Attempting
|
/// Constructs a new `Rc<T>` using a weak reference to itself. Attempting
|
||||||
|
@ -412,16 +414,16 @@ impl<T> Rc<T> {
|
||||||
// otherwise.
|
// otherwise.
|
||||||
let data = data_fn(&weak);
|
let data = data_fn(&weak);
|
||||||
|
|
||||||
unsafe {
|
let strong = unsafe {
|
||||||
let inner = init_ptr.as_ptr();
|
let inner = init_ptr.as_ptr();
|
||||||
ptr::write(ptr::addr_of_mut!((*inner).value), data);
|
ptr::write(ptr::addr_of_mut!((*inner).value), data);
|
||||||
|
|
||||||
let prev_value = (*inner).strong.get();
|
let prev_value = (*inner).strong.get();
|
||||||
debug_assert_eq!(prev_value, 0, "No prior strong references should exist");
|
debug_assert_eq!(prev_value, 0, "No prior strong references should exist");
|
||||||
(*inner).strong.set(1);
|
(*inner).strong.set(1);
|
||||||
}
|
|
||||||
|
|
||||||
let strong = Rc::from_inner(init_ptr);
|
Rc::from_inner(init_ptr)
|
||||||
|
};
|
||||||
|
|
||||||
// Strong references should collectively own a shared weak reference,
|
// Strong references should collectively own a shared weak reference,
|
||||||
// so don't run the destructor for our old weak reference.
|
// so don't run the destructor for our old weak reference.
|
||||||
|
@ -511,10 +513,12 @@ impl<T> Rc<T> {
|
||||||
// pointers, which ensures that the weak destructor never frees
|
// pointers, which ensures that the weak destructor never frees
|
||||||
// the allocation while the strong destructor is running, even
|
// the allocation while the strong destructor is running, even
|
||||||
// if the weak pointer is stored inside the strong one.
|
// if the weak pointer is stored inside the strong one.
|
||||||
Ok(Self::from_inner(
|
unsafe {
|
||||||
Box::leak(Box::try_new(RcBox { strong: Cell::new(1), weak: Cell::new(1), value })?)
|
Ok(Self::from_inner(
|
||||||
.into(),
|
Box::leak(Box::try_new(RcBox { strong: Cell::new(1), weak: Cell::new(1), value })?)
|
||||||
))
|
.into(),
|
||||||
|
))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Constructs a new `Rc` with uninitialized contents, returning an error if the allocation fails
|
/// Constructs a new `Rc` with uninitialized contents, returning an error if the allocation fails
|
||||||
|
@ -733,7 +737,7 @@ impl<T> Rc<mem::MaybeUninit<T>> {
|
||||||
#[unstable(feature = "new_uninit", issue = "63291")]
|
#[unstable(feature = "new_uninit", issue = "63291")]
|
||||||
#[inline]
|
#[inline]
|
||||||
pub unsafe fn assume_init(self) -> Rc<T> {
|
pub unsafe fn assume_init(self) -> Rc<T> {
|
||||||
Rc::from_inner(mem::ManuallyDrop::new(self).ptr.cast())
|
unsafe { Rc::from_inner(mem::ManuallyDrop::new(self).ptr.cast()) }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1199,9 +1203,11 @@ impl Rc<dyn Any> {
|
||||||
/// ```
|
/// ```
|
||||||
pub fn downcast<T: Any>(self) -> Result<Rc<T>, Rc<dyn Any>> {
|
pub fn downcast<T: Any>(self) -> Result<Rc<T>, Rc<dyn Any>> {
|
||||||
if (*self).is::<T>() {
|
if (*self).is::<T>() {
|
||||||
let ptr = self.ptr.cast::<RcBox<T>>();
|
unsafe {
|
||||||
forget(self);
|
let ptr = self.ptr.cast::<RcBox<T>>();
|
||||||
Ok(Rc::from_inner(ptr))
|
forget(self);
|
||||||
|
Ok(Rc::from_inner(ptr))
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
Err(self)
|
Err(self)
|
||||||
}
|
}
|
||||||
|
@ -1474,8 +1480,10 @@ impl<T: ?Sized> Clone for Rc<T> {
|
||||||
/// ```
|
/// ```
|
||||||
#[inline]
|
#[inline]
|
||||||
fn clone(&self) -> Rc<T> {
|
fn clone(&self) -> Rc<T> {
|
||||||
self.inner().inc_strong();
|
unsafe {
|
||||||
Self::from_inner(self.ptr)
|
self.inner().inc_strong();
|
||||||
|
Self::from_inner(self.ptr)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2225,11 +2233,14 @@ impl<T: ?Sized> Weak<T> {
|
||||||
#[stable(feature = "rc_weak", since = "1.4.0")]
|
#[stable(feature = "rc_weak", since = "1.4.0")]
|
||||||
pub fn upgrade(&self) -> Option<Rc<T>> {
|
pub fn upgrade(&self) -> Option<Rc<T>> {
|
||||||
let inner = self.inner()?;
|
let inner = self.inner()?;
|
||||||
|
|
||||||
if inner.strong() == 0 {
|
if inner.strong() == 0 {
|
||||||
None
|
None
|
||||||
} else {
|
} else {
|
||||||
inner.inc_strong();
|
unsafe {
|
||||||
Some(Rc::from_inner(self.ptr))
|
inner.inc_strong();
|
||||||
|
Some(Rc::from_inner(self.ptr))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -252,7 +252,7 @@ impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Arc<U>> for Arc<T> {}
|
||||||
impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Arc<U>> for Arc<T> {}
|
impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Arc<U>> for Arc<T> {}
|
||||||
|
|
||||||
impl<T: ?Sized> Arc<T> {
|
impl<T: ?Sized> Arc<T> {
|
||||||
fn from_inner(ptr: NonNull<ArcInner<T>>) -> Self {
|
unsafe fn from_inner(ptr: NonNull<ArcInner<T>>) -> Self {
|
||||||
Self { ptr, phantom: PhantomData }
|
Self { ptr, phantom: PhantomData }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -348,7 +348,7 @@ impl<T> Arc<T> {
|
||||||
weak: atomic::AtomicUsize::new(1),
|
weak: atomic::AtomicUsize::new(1),
|
||||||
data,
|
data,
|
||||||
};
|
};
|
||||||
Self::from_inner(Box::leak(x).into())
|
unsafe { Self::from_inner(Box::leak(x).into()) }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Constructs a new `Arc<T>` using a weak reference to itself. Attempting
|
/// Constructs a new `Arc<T>` using a weak reference to itself. Attempting
|
||||||
|
@ -397,7 +397,7 @@ impl<T> Arc<T> {
|
||||||
|
|
||||||
// Now we can properly initialize the inner value and turn our weak
|
// Now we can properly initialize the inner value and turn our weak
|
||||||
// reference into a strong reference.
|
// reference into a strong reference.
|
||||||
unsafe {
|
let strong = unsafe {
|
||||||
let inner = init_ptr.as_ptr();
|
let inner = init_ptr.as_ptr();
|
||||||
ptr::write(ptr::addr_of_mut!((*inner).data), data);
|
ptr::write(ptr::addr_of_mut!((*inner).data), data);
|
||||||
|
|
||||||
|
@ -415,9 +415,9 @@ impl<T> Arc<T> {
|
||||||
// possible with safe code alone.
|
// possible with safe code alone.
|
||||||
let prev_value = (*inner).strong.fetch_add(1, Release);
|
let prev_value = (*inner).strong.fetch_add(1, Release);
|
||||||
debug_assert_eq!(prev_value, 0, "No prior strong references should exist");
|
debug_assert_eq!(prev_value, 0, "No prior strong references should exist");
|
||||||
}
|
|
||||||
|
|
||||||
let strong = Arc::from_inner(init_ptr);
|
Arc::from_inner(init_ptr)
|
||||||
|
};
|
||||||
|
|
||||||
// Strong references should collectively own a shared weak reference,
|
// Strong references should collectively own a shared weak reference,
|
||||||
// so don't run the destructor for our old weak reference.
|
// so don't run the destructor for our old weak reference.
|
||||||
|
@ -526,7 +526,7 @@ impl<T> Arc<T> {
|
||||||
weak: atomic::AtomicUsize::new(1),
|
weak: atomic::AtomicUsize::new(1),
|
||||||
data,
|
data,
|
||||||
})?;
|
})?;
|
||||||
Ok(Self::from_inner(Box::leak(x).into()))
|
unsafe { Ok(Self::from_inner(Box::leak(x).into())) }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Constructs a new `Arc` with uninitialized contents, returning an error
|
/// Constructs a new `Arc` with uninitialized contents, returning an error
|
||||||
|
@ -737,7 +737,7 @@ impl<T> Arc<mem::MaybeUninit<T>> {
|
||||||
#[unstable(feature = "new_uninit", issue = "63291")]
|
#[unstable(feature = "new_uninit", issue = "63291")]
|
||||||
#[inline]
|
#[inline]
|
||||||
pub unsafe fn assume_init(self) -> Arc<T> {
|
pub unsafe fn assume_init(self) -> Arc<T> {
|
||||||
Arc::from_inner(mem::ManuallyDrop::new(self).ptr.cast())
|
unsafe { Arc::from_inner(mem::ManuallyDrop::new(self).ptr.cast()) }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1327,7 +1327,7 @@ impl<T: ?Sized> Clone for Arc<T> {
|
||||||
abort();
|
abort();
|
||||||
}
|
}
|
||||||
|
|
||||||
Self::from_inner(self.ptr)
|
unsafe { Self::from_inner(self.ptr) }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1654,9 +1654,11 @@ impl Arc<dyn Any + Send + Sync> {
|
||||||
T: Any + Send + Sync + 'static,
|
T: Any + Send + Sync + 'static,
|
||||||
{
|
{
|
||||||
if (*self).is::<T>() {
|
if (*self).is::<T>() {
|
||||||
let ptr = self.ptr.cast::<ArcInner<T>>();
|
unsafe {
|
||||||
mem::forget(self);
|
let ptr = self.ptr.cast::<ArcInner<T>>();
|
||||||
Ok(Arc::from_inner(ptr))
|
mem::forget(self);
|
||||||
|
Ok(Arc::from_inner(ptr))
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
Err(self)
|
Err(self)
|
||||||
}
|
}
|
||||||
|
@ -1880,7 +1882,7 @@ impl<T: ?Sized> Weak<T> {
|
||||||
// value can be initialized after `Weak` references have already been created. In that case, we
|
// value can be initialized after `Weak` references have already been created. In that case, we
|
||||||
// expect to observe the fully initialized value.
|
// expect to observe the fully initialized value.
|
||||||
match inner.strong.compare_exchange_weak(n, n + 1, Acquire, Relaxed) {
|
match inner.strong.compare_exchange_weak(n, n + 1, Acquire, Relaxed) {
|
||||||
Ok(_) => return Some(Arc::from_inner(self.ptr)), // null checked above
|
Ok(_) => return Some(unsafe { Arc::from_inner(self.ptr) }), // null checked above
|
||||||
Err(old) => n = old,
|
Err(old) => n = old,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue