diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs index b267377d90a..c81405d6119 100644 --- a/library/alloc/src/rc.rs +++ b/library/alloc/src/rc.rs @@ -1048,14 +1048,16 @@ impl Rc { } } else if Rc::weak_count(this) != 0 { // Can just steal the data, all that's left is Weaks + let mut rc = Self::new_uninit(); unsafe { - let mut swap = Rc::new(ptr::read(&this.ptr.as_ref().value)); - mem::swap(this, &mut swap); - swap.inner().dec_strong(); + let data = Rc::get_mut_unchecked(&mut rc); + data.as_mut_ptr().copy_from_nonoverlapping(&**this, 1); + + this.inner().dec_strong(); // Remove implicit strong-weak ref (no need to craft a fake // Weak here -- we know other Weaks can clean up for us) - swap.inner().dec_weak(); - forget(swap); + this.inner().dec_weak(); + ptr::write(this, rc.assume_init()); } } // This unsafety is ok because we're guaranteed that the pointer diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs index deeb6941fcf..5bfcbeb82a1 100644 --- a/library/alloc/src/sync.rs +++ b/library/alloc/src/sync.rs @@ -1392,17 +1392,14 @@ impl Arc { // Materialize our own implicit weak pointer, so that it can clean // up the ArcInner as needed. - let weak = Weak { ptr: this.ptr }; + let _weak = Weak { ptr: this.ptr }; - // mark the data itself as already deallocated + // Can just steal the data, all that's left is Weaks + let mut arc = Self::new_uninit(); unsafe { - // there is no data race in the implicit write caused by `read` - // here (due to zeroing) because data is no longer accessed by - // other threads (due to there being no more strong refs at this - // point). - let mut swap = Arc::new(ptr::read(&weak.ptr.as_ref().data)); - mem::swap(this, &mut swap); - mem::forget(swap); + let data = Arc::get_mut_unchecked(&mut arc); + data.as_mut_ptr().copy_from_nonoverlapping(&**this, 1); + ptr::write(this, arc.assume_init()); } } else { // We were the sole reference of either kind; bump back up the