1
Fork 0

#[deny(unsafe_op_in_unsafe_fn)] in liballoc

This commit is contained in:
LeSeulArtichaut 2020-05-28 23:27:00 +02:00
parent a39c7787ba
commit 39e29ce4d0
19 changed files with 391 additions and 263 deletions

View file

@ -77,7 +77,7 @@ pub struct Global;
#[stable(feature = "global_alloc", since = "1.28.0")] #[stable(feature = "global_alloc", since = "1.28.0")]
#[inline] #[inline]
pub unsafe fn alloc(layout: Layout) -> *mut u8 { pub unsafe fn alloc(layout: Layout) -> *mut u8 {
__rust_alloc(layout.size(), layout.align()) unsafe { __rust_alloc(layout.size(), layout.align()) }
} }
/// Deallocate memory with the global allocator. /// Deallocate memory with the global allocator.
@ -99,7 +99,7 @@ pub unsafe fn alloc(layout: Layout) -> *mut u8 {
#[stable(feature = "global_alloc", since = "1.28.0")] #[stable(feature = "global_alloc", since = "1.28.0")]
#[inline] #[inline]
pub unsafe fn dealloc(ptr: *mut u8, layout: Layout) { pub unsafe fn dealloc(ptr: *mut u8, layout: Layout) {
__rust_dealloc(ptr, layout.size(), layout.align()) unsafe { __rust_dealloc(ptr, layout.size(), layout.align()) }
} }
/// Reallocate memory with the global allocator. /// Reallocate memory with the global allocator.
@ -121,7 +121,7 @@ pub unsafe fn dealloc(ptr: *mut u8, layout: Layout) {
#[stable(feature = "global_alloc", since = "1.28.0")] #[stable(feature = "global_alloc", since = "1.28.0")]
#[inline] #[inline]
pub unsafe fn realloc(ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 { pub unsafe fn realloc(ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 {
__rust_realloc(ptr, layout.size(), layout.align(), new_size) unsafe { __rust_realloc(ptr, layout.size(), layout.align(), new_size) }
} }
/// Allocate zero-initialized memory with the global allocator. /// Allocate zero-initialized memory with the global allocator.
@ -158,7 +158,7 @@ pub unsafe fn realloc(ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8
#[stable(feature = "global_alloc", since = "1.28.0")] #[stable(feature = "global_alloc", since = "1.28.0")]
#[inline] #[inline]
pub unsafe fn alloc_zeroed(layout: Layout) -> *mut u8 { pub unsafe fn alloc_zeroed(layout: Layout) -> *mut u8 {
__rust_alloc_zeroed(layout.size(), layout.align()) unsafe { __rust_alloc_zeroed(layout.size(), layout.align()) }
} }
#[unstable(feature = "allocator_api", issue = "32838")] #[unstable(feature = "allocator_api", issue = "32838")]
@ -183,7 +183,7 @@ unsafe impl AllocRef for Global {
#[inline] #[inline]
unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: Layout) { unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: Layout) {
if layout.size() != 0 { if layout.size() != 0 {
dealloc(ptr.as_ptr(), layout) unsafe { dealloc(ptr.as_ptr(), layout) }
} }
} }
@ -209,17 +209,20 @@ unsafe impl AllocRef for Global {
match placement { match placement {
ReallocPlacement::InPlace => Err(AllocErr), ReallocPlacement::InPlace => Err(AllocErr),
ReallocPlacement::MayMove if layout.size() == 0 => { ReallocPlacement::MayMove if layout.size() == 0 => {
let new_layout = Layout::from_size_align_unchecked(new_size, layout.align()); let new_layout =
unsafe { Layout::from_size_align_unchecked(new_size, layout.align()) };
self.alloc(new_layout, init) self.alloc(new_layout, init)
} }
ReallocPlacement::MayMove => { ReallocPlacement::MayMove => {
// `realloc` probably checks for `new_size > size` or something similar. // `realloc` probably checks for `new_size > size` or something similar.
intrinsics::assume(new_size > size); unsafe {
let ptr = realloc(ptr.as_ptr(), layout, new_size); intrinsics::assume(new_size > size);
let memory = let ptr = realloc(ptr.as_ptr(), layout, new_size);
MemoryBlock { ptr: NonNull::new(ptr).ok_or(AllocErr)?, size: new_size }; let memory =
init.init_offset(memory, size); MemoryBlock { ptr: NonNull::new(ptr).ok_or(AllocErr)?, size: new_size };
Ok(memory) init.init_offset(memory, size);
Ok(memory)
}
} }
} }
} }
@ -245,14 +248,18 @@ unsafe impl AllocRef for Global {
match placement { match placement {
ReallocPlacement::InPlace => Err(AllocErr), ReallocPlacement::InPlace => Err(AllocErr),
ReallocPlacement::MayMove if new_size == 0 => { ReallocPlacement::MayMove if new_size == 0 => {
self.dealloc(ptr, layout); unsafe {
self.dealloc(ptr, layout);
}
Ok(MemoryBlock { ptr: layout.dangling(), size: 0 }) Ok(MemoryBlock { ptr: layout.dangling(), size: 0 })
} }
ReallocPlacement::MayMove => { ReallocPlacement::MayMove => {
// `realloc` probably checks for `new_size < size` or something similar. // `realloc` probably checks for `new_size < size` or something similar.
intrinsics::assume(new_size < size); unsafe {
let ptr = realloc(ptr.as_ptr(), layout, new_size); intrinsics::assume(new_size < size);
Ok(MemoryBlock { ptr: NonNull::new(ptr).ok_or(AllocErr)?, size: new_size }) let ptr = realloc(ptr.as_ptr(), layout, new_size);
Ok(MemoryBlock { ptr: NonNull::new(ptr).ok_or(AllocErr)?, size: new_size })
}
} }
} }
} }
@ -264,7 +271,7 @@ unsafe impl AllocRef for Global {
#[lang = "exchange_malloc"] #[lang = "exchange_malloc"]
#[inline] #[inline]
unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 { unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
let layout = Layout::from_size_align_unchecked(size, align); let layout = unsafe { Layout::from_size_align_unchecked(size, align) };
match Global.alloc(layout, AllocInit::Uninitialized) { match Global.alloc(layout, AllocInit::Uninitialized) {
Ok(memory) => memory.ptr.as_ptr(), Ok(memory) => memory.ptr.as_ptr(),
Err(_) => handle_alloc_error(layout), Err(_) => handle_alloc_error(layout),
@ -279,10 +286,12 @@ unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
// For example if `Box` is changed to `struct Box<T: ?Sized, A: AllocRef>(Unique<T>, A)`, // For example if `Box` is changed to `struct Box<T: ?Sized, A: AllocRef>(Unique<T>, A)`,
// this function has to be changed to `fn box_free<T: ?Sized, A: AllocRef>(Unique<T>, A)` as well. // this function has to be changed to `fn box_free<T: ?Sized, A: AllocRef>(Unique<T>, A)` as well.
pub(crate) unsafe fn box_free<T: ?Sized>(ptr: Unique<T>) { pub(crate) unsafe fn box_free<T: ?Sized>(ptr: Unique<T>) {
let size = size_of_val(ptr.as_ref()); unsafe {
let align = min_align_of_val(ptr.as_ref()); let size = size_of_val(ptr.as_ref());
let layout = Layout::from_size_align_unchecked(size, align); let align = min_align_of_val(ptr.as_ref());
Global.dealloc(ptr.cast().into(), layout) let layout = Layout::from_size_align_unchecked(size, align);
Global.dealloc(ptr.cast().into(), layout)
}
} }
/// Abort on memory allocation error or failure. /// Abort on memory allocation error or failure.

View file

@ -311,7 +311,7 @@ impl<T> Box<mem::MaybeUninit<T>> {
#[unstable(feature = "new_uninit", issue = "63291")] #[unstable(feature = "new_uninit", issue = "63291")]
#[inline] #[inline]
pub unsafe fn assume_init(self) -> Box<T> { pub unsafe fn assume_init(self) -> Box<T> {
Box::from_raw(Box::into_raw(self) as *mut T) unsafe { Box::from_raw(Box::into_raw(self) as *mut T) }
} }
} }
@ -349,7 +349,7 @@ impl<T> Box<[mem::MaybeUninit<T>]> {
#[unstable(feature = "new_uninit", issue = "63291")] #[unstable(feature = "new_uninit", issue = "63291")]
#[inline] #[inline]
pub unsafe fn assume_init(self) -> Box<[T]> { pub unsafe fn assume_init(self) -> Box<[T]> {
Box::from_raw(Box::into_raw(self) as *mut [T]) unsafe { Box::from_raw(Box::into_raw(self) as *mut [T]) }
} }
} }
@ -393,7 +393,7 @@ impl<T: ?Sized> Box<T> {
#[stable(feature = "box_raw", since = "1.4.0")] #[stable(feature = "box_raw", since = "1.4.0")]
#[inline] #[inline]
pub unsafe fn from_raw(raw: *mut T) -> Self { pub unsafe fn from_raw(raw: *mut T) -> Self {
Box(Unique::new_unchecked(raw)) Box(unsafe { Unique::new_unchecked(raw) })
} }
/// Consumes the `Box`, returning a wrapped raw pointer. /// Consumes the `Box`, returning a wrapped raw pointer.

View file

@ -1003,7 +1003,7 @@ impl<'a, T> Hole<'a, T> {
unsafe fn new(data: &'a mut [T], pos: usize) -> Self { unsafe fn new(data: &'a mut [T], pos: usize) -> Self {
debug_assert!(pos < data.len()); debug_assert!(pos < data.len());
// SAFE: pos should be inside the slice // SAFE: pos should be inside the slice
let elt = ptr::read(data.get_unchecked(pos)); let elt = unsafe { ptr::read(data.get_unchecked(pos)) };
Hole { data, elt: ManuallyDrop::new(elt), pos } Hole { data, elt: ManuallyDrop::new(elt), pos }
} }
@ -1025,7 +1025,7 @@ impl<'a, T> Hole<'a, T> {
unsafe fn get(&self, index: usize) -> &T { unsafe fn get(&self, index: usize) -> &T {
debug_assert!(index != self.pos); debug_assert!(index != self.pos);
debug_assert!(index < self.data.len()); debug_assert!(index < self.data.len());
self.data.get_unchecked(index) unsafe { self.data.get_unchecked(index) }
} }
/// Move hole to new location /// Move hole to new location
@ -1035,9 +1035,11 @@ impl<'a, T> Hole<'a, T> {
unsafe fn move_to(&mut self, index: usize) { unsafe fn move_to(&mut self, index: usize) {
debug_assert!(index != self.pos); debug_assert!(index != self.pos);
debug_assert!(index < self.data.len()); debug_assert!(index < self.data.len());
let index_ptr: *const _ = self.data.get_unchecked(index); unsafe {
let hole_ptr = self.data.get_unchecked_mut(self.pos); let index_ptr: *const _ = self.data.get_unchecked(index);
ptr::copy_nonoverlapping(index_ptr, hole_ptr, 1); let hole_ptr = self.data.get_unchecked_mut(self.pos);
ptr::copy_nonoverlapping(index_ptr, hole_ptr, 1);
}
self.pos = index; self.pos = index;
} }
} }

View file

@ -1725,7 +1725,7 @@ impl<'a, K: 'a, V: 'a> DrainFilterInner<'a, K, V> {
&mut self, &mut self,
) -> Option<Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::KV>> { ) -> Option<Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::KV>> {
let edge = self.cur_leaf_edge.as_ref()?; let edge = self.cur_leaf_edge.as_ref()?;
ptr::read(edge).next_kv().ok() unsafe { ptr::read(edge).next_kv().ok() }
} }
/// Implementation of a typical `DrainFilter::next` method, given the predicate. /// Implementation of a typical `DrainFilter::next` method, given the predicate.
@ -1808,7 +1808,7 @@ impl<'a, K, V> Range<'a, K, V> {
} }
unsafe fn next_unchecked(&mut self) -> (&'a K, &'a V) { unsafe fn next_unchecked(&mut self) -> (&'a K, &'a V) {
unwrap_unchecked(self.front.as_mut()).next_unchecked() unsafe { unwrap_unchecked(self.front.as_mut()).next_unchecked() }
} }
} }
@ -1821,7 +1821,7 @@ impl<'a, K, V> DoubleEndedIterator for Range<'a, K, V> {
impl<'a, K, V> Range<'a, K, V> { impl<'a, K, V> Range<'a, K, V> {
unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a V) { unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a V) {
unwrap_unchecked(self.back.as_mut()).next_back_unchecked() unsafe { unwrap_unchecked(self.back.as_mut()).next_back_unchecked() }
} }
} }
@ -1859,7 +1859,7 @@ impl<'a, K, V> RangeMut<'a, K, V> {
} }
unsafe fn next_unchecked(&mut self) -> (&'a mut K, &'a mut V) { unsafe fn next_unchecked(&mut self) -> (&'a mut K, &'a mut V) {
unwrap_unchecked(self.front.as_mut()).next_unchecked() unsafe { unwrap_unchecked(self.front.as_mut()).next_unchecked() }
} }
} }
@ -1880,7 +1880,7 @@ impl<K, V> FusedIterator for RangeMut<'_, K, V> {}
impl<'a, K, V> RangeMut<'a, K, V> { impl<'a, K, V> RangeMut<'a, K, V> {
unsafe fn next_back_unchecked(&mut self) -> (&'a mut K, &'a mut V) { unsafe fn next_back_unchecked(&mut self) -> (&'a mut K, &'a mut V) {
unwrap_unchecked(self.back.as_mut()).next_back_unchecked() unsafe { unwrap_unchecked(self.back.as_mut()).next_back_unchecked() }
} }
} }

View file

@ -19,7 +19,9 @@ pub unsafe fn unwrap_unchecked<T>(val: Option<T>) -> T {
if cfg!(debug_assertions) { if cfg!(debug_assertions) {
panic!("'unchecked' unwrap on None in BTreeMap"); panic!("'unchecked' unwrap on None in BTreeMap");
} else { } else {
core::intrinsics::unreachable(); unsafe {
core::intrinsics::unreachable();
}
} }
}) })
} }

View file

@ -64,8 +64,10 @@ macro_rules! def_next_kv_uncheched_dealloc {
edge = match edge.$adjacent_kv() { edge = match edge.$adjacent_kv() {
Ok(internal_kv) => return internal_kv, Ok(internal_kv) => return internal_kv,
Err(last_edge) => { Err(last_edge) => {
let parent_edge = last_edge.into_node().deallocate_and_ascend(); unsafe {
unwrap_unchecked(parent_edge).forget_node_type() let parent_edge = last_edge.into_node().deallocate_and_ascend();
unwrap_unchecked(parent_edge).forget_node_type()
}
} }
} }
} }
@ -82,9 +84,11 @@ def_next_kv_uncheched_dealloc! {unsafe fn next_back_kv_unchecked_dealloc: left_k
/// Safety: The change closure must not panic. /// Safety: The change closure must not panic.
#[inline] #[inline]
unsafe fn replace<T, R>(v: &mut T, change: impl FnOnce(T) -> (T, R)) -> R { unsafe fn replace<T, R>(v: &mut T, change: impl FnOnce(T) -> (T, R)) -> R {
let value = ptr::read(v); let value = unsafe { ptr::read(v) };
let (new_value, ret) = change(value); let (new_value, ret) = change(value);
ptr::write(v, new_value); unsafe {
ptr::write(v, new_value);
}
ret ret
} }
@ -93,22 +97,26 @@ impl<'a, K, V> Handle<NodeRef<marker::Immut<'a>, K, V, marker::Leaf>, marker::Ed
/// key and value in between. /// key and value in between.
/// Unsafe because the caller must ensure that the leaf edge is not the last one in the tree. /// Unsafe because the caller must ensure that the leaf edge is not the last one in the tree.
pub unsafe fn next_unchecked(&mut self) -> (&'a K, &'a V) { pub unsafe fn next_unchecked(&mut self) -> (&'a K, &'a V) {
replace(self, |leaf_edge| { unsafe {
let kv = leaf_edge.next_kv(); replace(self, |leaf_edge| {
let kv = unwrap_unchecked(kv.ok()); let kv = leaf_edge.next_kv();
(kv.next_leaf_edge(), kv.into_kv()) let kv = unwrap_unchecked(kv.ok());
}) (kv.next_leaf_edge(), kv.into_kv())
})
}
} }
/// Moves the leaf edge handle to the previous leaf edge and returns references to the /// Moves the leaf edge handle to the previous leaf edge and returns references to the
/// key and value in between. /// key and value in between.
/// Unsafe because the caller must ensure that the leaf edge is not the first one in the tree. /// Unsafe because the caller must ensure that the leaf edge is not the first one in the tree.
pub unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a V) { pub unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a V) {
replace(self, |leaf_edge| { unsafe {
let kv = leaf_edge.next_back_kv(); replace(self, |leaf_edge| {
let kv = unwrap_unchecked(kv.ok()); let kv = leaf_edge.next_back_kv();
(kv.next_back_leaf_edge(), kv.into_kv()) let kv = unwrap_unchecked(kv.ok());
}) (kv.next_back_leaf_edge(), kv.into_kv())
})
}
} }
} }
@ -119,14 +127,16 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge
/// - The caller must ensure that the leaf edge is not the last one in the tree. /// - The caller must ensure that the leaf edge is not the last one in the tree.
/// - Using the updated handle may well invalidate the returned references. /// - Using the updated handle may well invalidate the returned references.
pub unsafe fn next_unchecked(&mut self) -> (&'a mut K, &'a mut V) { pub unsafe fn next_unchecked(&mut self) -> (&'a mut K, &'a mut V) {
let kv = replace(self, |leaf_edge| { unsafe {
let kv = leaf_edge.next_kv(); let kv = replace(self, |leaf_edge| {
let kv = unwrap_unchecked(kv.ok()); let kv = leaf_edge.next_kv();
(ptr::read(&kv).next_leaf_edge(), kv) let kv = unwrap_unchecked(kv.ok());
}); (ptr::read(&kv).next_leaf_edge(), kv)
// Doing the descend (and perhaps another move) invalidates the references });
// returned by `into_kv_mut`, so we have to do this last. // Doing the descend (and perhaps another move) invalidates the references
kv.into_kv_mut() // returned by `into_kv_mut`, so we have to do this last.
kv.into_kv_mut()
}
} }
/// Moves the leaf edge handle to the previous leaf and returns references to the /// Moves the leaf edge handle to the previous leaf and returns references to the
@ -135,14 +145,16 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge
/// - The caller must ensure that the leaf edge is not the first one in the tree. /// - The caller must ensure that the leaf edge is not the first one in the tree.
/// - Using the updated handle may well invalidate the returned references. /// - Using the updated handle may well invalidate the returned references.
pub unsafe fn next_back_unchecked(&mut self) -> (&'a mut K, &'a mut V) { pub unsafe fn next_back_unchecked(&mut self) -> (&'a mut K, &'a mut V) {
let kv = replace(self, |leaf_edge| { unsafe {
let kv = leaf_edge.next_back_kv(); let kv = replace(self, |leaf_edge| {
let kv = unwrap_unchecked(kv.ok()); let kv = leaf_edge.next_back_kv();
(ptr::read(&kv).next_back_leaf_edge(), kv) let kv = unwrap_unchecked(kv.ok());
}); (ptr::read(&kv).next_back_leaf_edge(), kv)
// Doing the descend (and perhaps another move) invalidates the references });
// returned by `into_kv_mut`, so we have to do this last. // Doing the descend (and perhaps another move) invalidates the references
kv.into_kv_mut() // returned by `into_kv_mut`, so we have to do this last.
kv.into_kv_mut()
}
} }
} }
@ -159,12 +171,14 @@ impl<K, V> Handle<NodeRef<marker::Owned, K, V, marker::Leaf>, marker::Edge> {
/// if the two preconditions above hold. /// if the two preconditions above hold.
/// - Using the updated handle may well invalidate the returned references. /// - Using the updated handle may well invalidate the returned references.
pub unsafe fn next_unchecked(&mut self) -> (K, V) { pub unsafe fn next_unchecked(&mut self) -> (K, V) {
replace(self, |leaf_edge| { unsafe {
let kv = next_kv_unchecked_dealloc(leaf_edge); replace(self, |leaf_edge| {
let k = ptr::read(kv.reborrow().into_kv().0); let kv = next_kv_unchecked_dealloc(leaf_edge);
let v = ptr::read(kv.reborrow().into_kv().1); let k = ptr::read(kv.reborrow().into_kv().0);
(kv.next_leaf_edge(), (k, v)) let v = ptr::read(kv.reborrow().into_kv().1);
}) (kv.next_leaf_edge(), (k, v))
})
}
} }
/// Moves the leaf edge handle to the previous leaf edge and returns the key /// Moves the leaf edge handle to the previous leaf edge and returns the key
@ -179,12 +193,14 @@ impl<K, V> Handle<NodeRef<marker::Owned, K, V, marker::Leaf>, marker::Edge> {
/// if the two preconditions above hold. /// if the two preconditions above hold.
/// - Using the updated handle may well invalidate the returned references. /// - Using the updated handle may well invalidate the returned references.
pub unsafe fn next_back_unchecked(&mut self) -> (K, V) { pub unsafe fn next_back_unchecked(&mut self) -> (K, V) {
replace(self, |leaf_edge| { unsafe {
let kv = next_back_kv_unchecked_dealloc(leaf_edge); replace(self, |leaf_edge| {
let k = ptr::read(kv.reborrow().into_kv().0); let kv = next_back_kv_unchecked_dealloc(leaf_edge);
let v = ptr::read(kv.reborrow().into_kv().1); let k = ptr::read(kv.reborrow().into_kv().0);
(kv.next_back_leaf_edge(), (k, v)) let v = ptr::read(kv.reborrow().into_kv().1);
}) (kv.next_back_leaf_edge(), (k, v))
})
}
} }
} }

View file

@ -107,7 +107,7 @@ impl<K, V> InternalNode<K, V> {
/// `len` of 0), there must be one initialized and valid edge. This function does not set up /// `len` of 0), there must be one initialized and valid edge. This function does not set up
/// such an edge. /// such an edge.
unsafe fn new() -> Self { unsafe fn new() -> Self {
InternalNode { data: LeafNode::new(), edges: [MaybeUninit::UNINIT; 2 * B] } InternalNode { data: unsafe { LeafNode::new() }, edges: [MaybeUninit::UNINIT; 2 * B] }
} }
} }
@ -131,7 +131,7 @@ impl<K, V> BoxedNode<K, V> {
} }
unsafe fn from_ptr(ptr: NonNull<LeafNode<K, V>>) -> Self { unsafe fn from_ptr(ptr: NonNull<LeafNode<K, V>>) -> Self {
BoxedNode { ptr: Unique::new_unchecked(ptr.as_ptr()) } BoxedNode { ptr: unsafe { Unique::new_unchecked(ptr.as_ptr()) } }
} }
fn as_ptr(&self) -> NonNull<LeafNode<K, V>> { fn as_ptr(&self) -> NonNull<LeafNode<K, V>> {
@ -392,14 +392,16 @@ impl<K, V> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
let height = self.height; let height = self.height;
let node = self.node; let node = self.node;
let ret = self.ascend().ok(); let ret = self.ascend().ok();
Global.dealloc( unsafe {
node.cast(), Global.dealloc(
if height > 0 { node.cast(),
Layout::new::<InternalNode<K, V>>() if height > 0 {
} else { Layout::new::<InternalNode<K, V>>()
Layout::new::<LeafNode<K, V>>() } else {
}, Layout::new::<LeafNode<K, V>>()
); },
);
}
ret ret
} }
} }
@ -565,7 +567,7 @@ impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
debug_assert!(first <= self.len()); debug_assert!(first <= self.len());
debug_assert!(after_last <= self.len() + 1); debug_assert!(after_last <= self.len() + 1);
for i in first..after_last { for i in first..after_last {
Handle::new_edge(self.reborrow_mut(), i).correct_parent_link(); unsafe { Handle::new_edge(self.reborrow_mut(), i) }.correct_parent_link();
} }
} }
@ -789,7 +791,7 @@ impl<'a, K, V, NodeType, HandleType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeT
&mut self, &mut self,
) -> Handle<NodeRef<marker::Mut<'_>, K, V, NodeType>, HandleType> { ) -> Handle<NodeRef<marker::Mut<'_>, K, V, NodeType>, HandleType> {
// We can't use Handle::new_kv or Handle::new_edge because we don't know our type // We can't use Handle::new_kv or Handle::new_edge because we don't know our type
Handle { node: self.node.reborrow_mut(), idx: self.idx, _marker: PhantomData } Handle { node: unsafe { self.node.reborrow_mut() }, idx: self.idx, _marker: PhantomData }
} }
} }
@ -885,7 +887,7 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::
unsafe fn cast_unchecked<NewType>( unsafe fn cast_unchecked<NewType>(
&mut self, &mut self,
) -> Handle<NodeRef<marker::Mut<'_>, K, V, NewType>, marker::Edge> { ) -> Handle<NodeRef<marker::Mut<'_>, K, V, NewType>, marker::Edge> {
Handle::new_edge(self.node.cast_unchecked(), self.idx) unsafe { Handle::new_edge(self.node.cast_unchecked(), self.idx) }
} }
/// Inserts a new key/value pair and an edge that will go to the right of that new pair /// Inserts a new key/value pair and an edge that will go to the right of that new pair
@ -1330,8 +1332,10 @@ unsafe fn move_kv<K, V>(
dest_offset: usize, dest_offset: usize,
count: usize, count: usize,
) { ) {
ptr::copy_nonoverlapping(source.0.add(source_offset), dest.0.add(dest_offset), count); unsafe {
ptr::copy_nonoverlapping(source.1.add(source_offset), dest.1.add(dest_offset), count); ptr::copy_nonoverlapping(source.0.add(source_offset), dest.0.add(dest_offset), count);
ptr::copy_nonoverlapping(source.1.add(source_offset), dest.1.add(dest_offset), count);
}
} }
// Source and destination must have the same height. // Source and destination must have the same height.
@ -1344,8 +1348,10 @@ unsafe fn move_edges<K, V>(
) { ) {
let source_ptr = source.as_internal_mut().edges.as_mut_ptr(); let source_ptr = source.as_internal_mut().edges.as_mut_ptr();
let dest_ptr = dest.as_internal_mut().edges.as_mut_ptr(); let dest_ptr = dest.as_internal_mut().edges.as_mut_ptr();
ptr::copy_nonoverlapping(source_ptr.add(source_offset), dest_ptr.add(dest_offset), count); unsafe {
dest.correct_childrens_parent_links(dest_offset, dest_offset + count); ptr::copy_nonoverlapping(source_ptr.add(source_offset), dest_ptr.add(dest_offset), count);
dest.correct_childrens_parent_links(dest_offset, dest_offset + count);
}
} }
impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> { impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
@ -1459,12 +1465,16 @@ pub mod marker {
} }
unsafe fn slice_insert<T>(slice: &mut [T], idx: usize, val: T) { unsafe fn slice_insert<T>(slice: &mut [T], idx: usize, val: T) {
ptr::copy(slice.as_ptr().add(idx), slice.as_mut_ptr().add(idx + 1), slice.len() - idx); unsafe {
ptr::write(slice.get_unchecked_mut(idx), val); ptr::copy(slice.as_ptr().add(idx), slice.as_mut_ptr().add(idx + 1), slice.len() - idx);
ptr::write(slice.get_unchecked_mut(idx), val);
}
} }
unsafe fn slice_remove<T>(slice: &mut [T], idx: usize) -> T { unsafe fn slice_remove<T>(slice: &mut [T], idx: usize) -> T {
let ret = ptr::read(slice.get_unchecked(idx)); unsafe {
ptr::copy(slice.as_ptr().add(idx + 1), slice.as_mut_ptr().add(idx), slice.len() - idx - 1); let ret = ptr::read(slice.get_unchecked(idx));
ret ptr::copy(slice.as_ptr().add(idx + 1), slice.as_mut_ptr().add(idx), slice.len() - idx - 1);
ret
}
} }

View file

@ -225,17 +225,17 @@ impl<T> LinkedList<T> {
/// maintain validity of aliasing pointers. /// maintain validity of aliasing pointers.
#[inline] #[inline]
unsafe fn unlink_node(&mut self, mut node: NonNull<Node<T>>) { unsafe fn unlink_node(&mut self, mut node: NonNull<Node<T>>) {
let node = node.as_mut(); // this one is ours now, we can create an &mut. let node = unsafe { node.as_mut() }; // this one is ours now, we can create an &mut.
// Not creating new mutable (unique!) references overlapping `element`. // Not creating new mutable (unique!) references overlapping `element`.
match node.prev { match node.prev {
Some(prev) => (*prev.as_ptr()).next = node.next, Some(prev) => unsafe { (*prev.as_ptr()).next = node.next },
// this node is the head node // this node is the head node
None => self.head = node.next, None => self.head = node.next,
}; };
match node.next { match node.next {
Some(next) => (*next.as_ptr()).prev = node.prev, Some(next) => unsafe { (*next.as_ptr()).prev = node.prev },
// this node is the tail node // this node is the tail node
None => self.tail = node.prev, None => self.tail = node.prev,
}; };
@ -258,17 +258,23 @@ impl<T> LinkedList<T> {
// This method takes care not to create multiple mutable references to whole nodes at the same time, // This method takes care not to create multiple mutable references to whole nodes at the same time,
// to maintain validity of aliasing pointers into `element`. // to maintain validity of aliasing pointers into `element`.
if let Some(mut existing_prev) = existing_prev { if let Some(mut existing_prev) = existing_prev {
existing_prev.as_mut().next = Some(splice_start); unsafe {
existing_prev.as_mut().next = Some(splice_start);
}
} else { } else {
self.head = Some(splice_start); self.head = Some(splice_start);
} }
if let Some(mut existing_next) = existing_next { if let Some(mut existing_next) = existing_next {
existing_next.as_mut().prev = Some(splice_end); unsafe {
existing_next.as_mut().prev = Some(splice_end);
}
} else { } else {
self.tail = Some(splice_end); self.tail = Some(splice_end);
} }
splice_start.as_mut().prev = existing_prev; unsafe {
splice_end.as_mut().next = existing_next; splice_start.as_mut().prev = existing_prev;
splice_end.as_mut().next = existing_next;
}
self.len += splice_length; self.len += splice_length;
} }
@ -297,9 +303,13 @@ impl<T> LinkedList<T> {
if let Some(mut split_node) = split_node { if let Some(mut split_node) = split_node {
let first_part_head; let first_part_head;
let first_part_tail; let first_part_tail;
first_part_tail = split_node.as_mut().prev.take(); unsafe {
first_part_tail = split_node.as_mut().prev.take();
}
if let Some(mut tail) = first_part_tail { if let Some(mut tail) = first_part_tail {
tail.as_mut().next = None; unsafe {
tail.as_mut().next = None;
}
first_part_head = self.head; first_part_head = self.head;
} else { } else {
first_part_head = None; first_part_head = None;
@ -333,9 +343,13 @@ impl<T> LinkedList<T> {
if let Some(mut split_node) = split_node { if let Some(mut split_node) = split_node {
let second_part_head; let second_part_head;
let second_part_tail; let second_part_tail;
second_part_head = split_node.as_mut().next.take(); unsafe {
second_part_head = split_node.as_mut().next.take();
}
if let Some(mut head) = second_part_head { if let Some(mut head) = second_part_head {
head.as_mut().prev = None; unsafe {
head.as_mut().prev = None;
}
second_part_tail = self.tail; second_part_tail = self.tail;
} else { } else {
second_part_tail = None; second_part_tail = None;

View file

@ -7,6 +7,8 @@
#![stable(feature = "rust1", since = "1.0.0")] #![stable(feature = "rust1", since = "1.0.0")]
// ignore-tidy-filelength
use core::array::LengthAtMost32; use core::array::LengthAtMost32;
use core::cmp::{self, Ordering}; use core::cmp::{self, Ordering};
use core::fmt; use core::fmt;
@ -201,25 +203,27 @@ impl<T> VecDeque<T> {
/// Turn ptr into a slice /// Turn ptr into a slice
#[inline] #[inline]
unsafe fn buffer_as_slice(&self) -> &[T] { unsafe fn buffer_as_slice(&self) -> &[T] {
slice::from_raw_parts(self.ptr(), self.cap()) unsafe { slice::from_raw_parts(self.ptr(), self.cap()) }
} }
/// Turn ptr into a mut slice /// Turn ptr into a mut slice
#[inline] #[inline]
unsafe fn buffer_as_mut_slice(&mut self) -> &mut [T] { unsafe fn buffer_as_mut_slice(&mut self) -> &mut [T] {
slice::from_raw_parts_mut(self.ptr(), self.cap()) unsafe { slice::from_raw_parts_mut(self.ptr(), self.cap()) }
} }
/// Moves an element out of the buffer /// Moves an element out of the buffer
#[inline] #[inline]
unsafe fn buffer_read(&mut self, off: usize) -> T { unsafe fn buffer_read(&mut self, off: usize) -> T {
ptr::read(self.ptr().add(off)) unsafe { ptr::read(self.ptr().add(off)) }
} }
/// Writes an element into the buffer, moving it. /// Writes an element into the buffer, moving it.
#[inline] #[inline]
unsafe fn buffer_write(&mut self, off: usize, value: T) { unsafe fn buffer_write(&mut self, off: usize, value: T) {
ptr::write(self.ptr().add(off), value); unsafe {
ptr::write(self.ptr().add(off), value);
}
} }
/// Returns `true` if the buffer is at full capacity. /// Returns `true` if the buffer is at full capacity.
@ -268,7 +272,9 @@ impl<T> VecDeque<T> {
len, len,
self.cap() self.cap()
); );
ptr::copy(self.ptr().add(src), self.ptr().add(dst), len); unsafe {
ptr::copy(self.ptr().add(src), self.ptr().add(dst), len);
}
} }
/// Copies a contiguous block of memory len long from src to dst /// Copies a contiguous block of memory len long from src to dst
@ -290,7 +296,9 @@ impl<T> VecDeque<T> {
len, len,
self.cap() self.cap()
); );
ptr::copy_nonoverlapping(self.ptr().add(src), self.ptr().add(dst), len); unsafe {
ptr::copy_nonoverlapping(self.ptr().add(src), self.ptr().add(dst), len);
}
} }
/// Copies a potentially wrapping block of memory len long from src to dest. /// Copies a potentially wrapping block of memory len long from src to dest.
@ -330,7 +338,9 @@ impl<T> VecDeque<T> {
// 2 [_ _ A A A A B B _] // 2 [_ _ A A A A B B _]
// D . . . // D . . .
// //
self.copy(dst, src, len); unsafe {
self.copy(dst, src, len);
}
} }
(false, false, true) => { (false, false, true) => {
// dst before src, src doesn't wrap, dst wraps // dst before src, src doesn't wrap, dst wraps
@ -341,8 +351,10 @@ impl<T> VecDeque<T> {
// 3 [B B B B _ _ _ A A] // 3 [B B B B _ _ _ A A]
// . . D . // . . D .
// //
self.copy(dst, src, dst_pre_wrap_len); unsafe {
self.copy(0, src + dst_pre_wrap_len, len - dst_pre_wrap_len); self.copy(dst, src, dst_pre_wrap_len);
self.copy(0, src + dst_pre_wrap_len, len - dst_pre_wrap_len);
}
} }
(true, false, true) => { (true, false, true) => {
// src before dst, src doesn't wrap, dst wraps // src before dst, src doesn't wrap, dst wraps
@ -353,8 +365,10 @@ impl<T> VecDeque<T> {
// 3 [B B _ _ _ A A A A] // 3 [B B _ _ _ A A A A]
// . . D . // . . D .
// //
self.copy(0, src + dst_pre_wrap_len, len - dst_pre_wrap_len); unsafe {
self.copy(dst, src, dst_pre_wrap_len); self.copy(0, src + dst_pre_wrap_len, len - dst_pre_wrap_len);
self.copy(dst, src, dst_pre_wrap_len);
}
} }
(false, true, false) => { (false, true, false) => {
// dst before src, src wraps, dst doesn't wrap // dst before src, src wraps, dst doesn't wrap
@ -365,8 +379,10 @@ impl<T> VecDeque<T> {
// 3 [C C _ _ _ B B C C] // 3 [C C _ _ _ B B C C]
// D . . . // D . . .
// //
self.copy(dst, src, src_pre_wrap_len); unsafe {
self.copy(dst + src_pre_wrap_len, 0, len - src_pre_wrap_len); self.copy(dst, src, src_pre_wrap_len);
self.copy(dst + src_pre_wrap_len, 0, len - src_pre_wrap_len);
}
} }
(true, true, false) => { (true, true, false) => {
// src before dst, src wraps, dst doesn't wrap // src before dst, src wraps, dst doesn't wrap
@ -377,8 +393,10 @@ impl<T> VecDeque<T> {
// 3 [C C A A _ _ _ C C] // 3 [C C A A _ _ _ C C]
// D . . . // D . . .
// //
self.copy(dst + src_pre_wrap_len, 0, len - src_pre_wrap_len); unsafe {
self.copy(dst, src, src_pre_wrap_len); self.copy(dst + src_pre_wrap_len, 0, len - src_pre_wrap_len);
self.copy(dst, src, src_pre_wrap_len);
}
} }
(false, true, true) => { (false, true, true) => {
// dst before src, src wraps, dst wraps // dst before src, src wraps, dst wraps
@ -392,9 +410,11 @@ impl<T> VecDeque<T> {
// //
debug_assert!(dst_pre_wrap_len > src_pre_wrap_len); debug_assert!(dst_pre_wrap_len > src_pre_wrap_len);
let delta = dst_pre_wrap_len - src_pre_wrap_len; let delta = dst_pre_wrap_len - src_pre_wrap_len;
self.copy(dst, src, src_pre_wrap_len); unsafe {
self.copy(dst + src_pre_wrap_len, 0, delta); self.copy(dst, src, src_pre_wrap_len);
self.copy(0, delta, len - dst_pre_wrap_len); self.copy(dst + src_pre_wrap_len, 0, delta);
self.copy(0, delta, len - dst_pre_wrap_len);
}
} }
(true, true, true) => { (true, true, true) => {
// src before dst, src wraps, dst wraps // src before dst, src wraps, dst wraps
@ -408,9 +428,11 @@ impl<T> VecDeque<T> {
// //
debug_assert!(src_pre_wrap_len > dst_pre_wrap_len); debug_assert!(src_pre_wrap_len > dst_pre_wrap_len);
let delta = src_pre_wrap_len - dst_pre_wrap_len; let delta = src_pre_wrap_len - dst_pre_wrap_len;
self.copy(delta, 0, len - src_pre_wrap_len); unsafe {
self.copy(0, self.cap() - delta, delta); self.copy(delta, 0, len - src_pre_wrap_len);
self.copy(dst, src, dst_pre_wrap_len); self.copy(0, self.cap() - delta, delta);
self.copy(dst, src, dst_pre_wrap_len);
}
} }
} }
} }
@ -440,13 +462,17 @@ impl<T> VecDeque<T> {
// Nop // Nop
} else if self.head < old_capacity - self.tail { } else if self.head < old_capacity - self.tail {
// B // B
self.copy_nonoverlapping(old_capacity, 0, self.head); unsafe {
self.copy_nonoverlapping(old_capacity, 0, self.head);
}
self.head += old_capacity; self.head += old_capacity;
debug_assert!(self.head > self.tail); debug_assert!(self.head > self.tail);
} else { } else {
// C // C
let new_tail = new_capacity - (old_capacity - self.tail); let new_tail = new_capacity - (old_capacity - self.tail);
self.copy_nonoverlapping(new_tail, self.tail, old_capacity - self.tail); unsafe {
self.copy_nonoverlapping(new_tail, self.tail, old_capacity - self.tail);
}
self.tail = new_tail; self.tail = new_tail;
debug_assert!(self.head < self.tail); debug_assert!(self.head < self.tail);
} }
@ -2297,7 +2323,9 @@ impl<T> VecDeque<T> {
unsafe fn rotate_left_inner(&mut self, mid: usize) { unsafe fn rotate_left_inner(&mut self, mid: usize) {
debug_assert!(mid * 2 <= self.len()); debug_assert!(mid * 2 <= self.len());
self.wrap_copy(self.head, self.tail, mid); unsafe {
self.wrap_copy(self.head, self.tail, mid);
}
self.head = self.wrap_add(self.head, mid); self.head = self.wrap_add(self.head, mid);
self.tail = self.wrap_add(self.tail, mid); self.tail = self.wrap_add(self.tail, mid);
} }
@ -2306,7 +2334,9 @@ impl<T> VecDeque<T> {
debug_assert!(k * 2 <= self.len()); debug_assert!(k * 2 <= self.len());
self.head = self.wrap_sub(self.head, k); self.head = self.wrap_sub(self.head, k);
self.tail = self.wrap_sub(self.tail, k); self.tail = self.wrap_sub(self.tail, k);
self.wrap_copy(self.tail, self.head, k); unsafe {
self.wrap_copy(self.tail, self.head, k);
}
} }
} }

View file

@ -72,6 +72,7 @@
#![deny(intra_doc_link_resolution_failure)] // rustdoc is run without -D warnings #![deny(intra_doc_link_resolution_failure)] // rustdoc is run without -D warnings
#![allow(explicit_outlives_requirements)] #![allow(explicit_outlives_requirements)]
#![allow(incomplete_features)] #![allow(incomplete_features)]
#![deny(unsafe_op_in_unsafe_fn)]
#![cfg_attr(not(test), feature(generator_trait))] #![cfg_attr(not(test), feature(generator_trait))]
#![cfg_attr(test, feature(test))] #![cfg_attr(test, feature(test))]
#![feature(allocator_api)] #![feature(allocator_api)]
@ -118,6 +119,7 @@
#![feature(try_reserve)] #![feature(try_reserve)]
#![feature(unboxed_closures)] #![feature(unboxed_closures)]
#![feature(unicode_internals)] #![feature(unicode_internals)]
#![feature(unsafe_block_in_unsafe_fn)]
#![feature(unsize)] #![feature(unsize)]
#![feature(unsized_locals)] #![feature(unsized_locals)]
#![feature(allocator_internals)] #![feature(allocator_internals)]

View file

@ -108,7 +108,7 @@ impl<T> RawVec<T, Global> {
/// If the `ptr` and `capacity` come from a `RawVec`, then this is guaranteed. /// If the `ptr` and `capacity` come from a `RawVec`, then this is guaranteed.
#[inline] #[inline]
pub unsafe fn from_raw_parts(ptr: *mut T, capacity: usize) -> Self { pub unsafe fn from_raw_parts(ptr: *mut T, capacity: usize) -> Self {
Self::from_raw_parts_in(ptr, capacity, Global) unsafe { Self::from_raw_parts_in(ptr, capacity, Global) }
} }
/// Converts a `Box<[T]>` into a `RawVec<T>`. /// Converts a `Box<[T]>` into a `RawVec<T>`.
@ -139,8 +139,10 @@ impl<T> RawVec<T, Global> {
); );
let me = ManuallyDrop::new(self); let me = ManuallyDrop::new(self);
let slice = slice::from_raw_parts_mut(me.ptr() as *mut MaybeUninit<T>, len); unsafe {
Box::from_raw(slice) let slice = slice::from_raw_parts_mut(me.ptr() as *mut MaybeUninit<T>, len);
Box::from_raw(slice)
}
} }
} }
@ -192,7 +194,7 @@ impl<T, A: AllocRef> RawVec<T, A> {
/// If the `ptr` and `capacity` come from a `RawVec` created via `a`, then this is guaranteed. /// If the `ptr` and `capacity` come from a `RawVec` created via `a`, then this is guaranteed.
#[inline] #[inline]
pub unsafe fn from_raw_parts_in(ptr: *mut T, capacity: usize, a: A) -> Self { pub unsafe fn from_raw_parts_in(ptr: *mut T, capacity: usize, a: A) -> Self {
Self { ptr: Unique::new_unchecked(ptr), cap: capacity, alloc: a } Self { ptr: unsafe { Unique::new_unchecked(ptr) }, cap: capacity, alloc: a }
} }
/// Gets a raw pointer to the start of the allocation. Note that this is /// Gets a raw pointer to the start of the allocation. Note that this is

View file

@ -35,7 +35,7 @@ fn allocator_param() {
} }
} }
unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: Layout) { unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: Layout) {
Global.dealloc(ptr, layout) unsafe { Global.dealloc(ptr, layout) }
} }
} }

View file

@ -304,7 +304,7 @@ impl<T: ?Sized> Rc<T> {
} }
unsafe fn from_ptr(ptr: *mut RcBox<T>) -> Self { unsafe fn from_ptr(ptr: *mut RcBox<T>) -> Self {
Self::from_inner(NonNull::new_unchecked(ptr)) Self::from_inner(unsafe { NonNull::new_unchecked(ptr) })
} }
} }
@ -544,7 +544,7 @@ impl<T> Rc<[mem::MaybeUninit<T>]> {
#[unstable(feature = "new_uninit", issue = "63291")] #[unstable(feature = "new_uninit", issue = "63291")]
#[inline] #[inline]
pub unsafe fn assume_init(self) -> Rc<[T]> { pub unsafe fn assume_init(self) -> Rc<[T]> {
Rc::from_ptr(mem::ManuallyDrop::new(self).ptr.as_ptr() as _) unsafe { Rc::from_ptr(mem::ManuallyDrop::new(self).ptr.as_ptr() as _) }
} }
} }
@ -643,13 +643,13 @@ impl<T: ?Sized> Rc<T> {
/// ``` /// ```
#[stable(feature = "rc_raw", since = "1.17.0")] #[stable(feature = "rc_raw", since = "1.17.0")]
pub unsafe fn from_raw(ptr: *const T) -> Self { pub unsafe fn from_raw(ptr: *const T) -> Self {
let offset = data_offset(ptr); let offset = unsafe { data_offset(ptr) };
// Reverse the offset to find the original RcBox. // Reverse the offset to find the original RcBox.
let fake_ptr = ptr as *mut RcBox<T>; let fake_ptr = ptr as *mut RcBox<T>;
let rc_ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset)); let rc_ptr = unsafe { set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset)) };
Self::from_ptr(rc_ptr) unsafe { Self::from_ptr(rc_ptr) }
} }
/// Consumes the `Rc`, returning the wrapped pointer as `NonNull<T>`. /// Consumes the `Rc`, returning the wrapped pointer as `NonNull<T>`.
@ -805,7 +805,7 @@ impl<T: ?Sized> Rc<T> {
#[inline] #[inline]
#[unstable(feature = "get_mut_unchecked", issue = "63292")] #[unstable(feature = "get_mut_unchecked", issue = "63292")]
pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T { pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T {
&mut this.ptr.as_mut().value unsafe { &mut this.ptr.as_mut().value }
} }
#[inline] #[inline]
@ -964,10 +964,12 @@ impl<T: ?Sized> Rc<T> {
// Initialize the RcBox // Initialize the RcBox
let inner = mem_to_rcbox(mem.ptr.as_ptr()); let inner = mem_to_rcbox(mem.ptr.as_ptr());
debug_assert_eq!(Layout::for_value(&*inner), layout); unsafe {
debug_assert_eq!(Layout::for_value(&*inner), layout);
ptr::write(&mut (*inner).strong, Cell::new(1)); ptr::write(&mut (*inner).strong, Cell::new(1));
ptr::write(&mut (*inner).weak, Cell::new(1)); ptr::write(&mut (*inner).weak, Cell::new(1));
}
inner inner
} }
@ -975,9 +977,11 @@ impl<T: ?Sized> Rc<T> {
/// Allocates an `RcBox<T>` with sufficient space for an unsized inner value /// Allocates an `RcBox<T>` with sufficient space for an unsized inner value
unsafe fn allocate_for_ptr(ptr: *const T) -> *mut RcBox<T> { unsafe fn allocate_for_ptr(ptr: *const T) -> *mut RcBox<T> {
// Allocate for the `RcBox<T>` using the given value. // Allocate for the `RcBox<T>` using the given value.
Self::allocate_for_layout(Layout::for_value(&*ptr), |mem| { unsafe {
set_data_ptr(ptr as *mut T, mem) as *mut RcBox<T> Self::allocate_for_layout(Layout::for_value(&*ptr), |mem| {
}) set_data_ptr(ptr as *mut T, mem) as *mut RcBox<T>
})
}
} }
fn from_box(v: Box<T>) -> Rc<T> { fn from_box(v: Box<T>) -> Rc<T> {
@ -1006,9 +1010,11 @@ impl<T: ?Sized> Rc<T> {
impl<T> Rc<[T]> { impl<T> Rc<[T]> {
/// Allocates an `RcBox<[T]>` with the given length. /// Allocates an `RcBox<[T]>` with the given length.
unsafe fn allocate_for_slice(len: usize) -> *mut RcBox<[T]> { unsafe fn allocate_for_slice(len: usize) -> *mut RcBox<[T]> {
Self::allocate_for_layout(Layout::array::<T>(len).unwrap(), |mem| { unsafe {
ptr::slice_from_raw_parts_mut(mem as *mut T, len) as *mut RcBox<[T]> Self::allocate_for_layout(Layout::array::<T>(len).unwrap(), |mem| {
}) ptr::slice_from_raw_parts_mut(mem as *mut T, len) as *mut RcBox<[T]>
})
}
} }
} }
@ -1017,7 +1023,9 @@ impl<T> Rc<[T]> {
/// For a slice/trait object, this sets the `data` field and leaves the rest /// For a slice/trait object, this sets the `data` field and leaves the rest
/// unchanged. For a sized raw pointer, this simply sets the pointer. /// unchanged. For a sized raw pointer, this simply sets the pointer.
unsafe fn set_data_ptr<T: ?Sized, U>(mut ptr: *mut T, data: *mut U) -> *mut T { unsafe fn set_data_ptr<T: ?Sized, U>(mut ptr: *mut T, data: *mut U) -> *mut T {
ptr::write(&mut ptr as *mut _ as *mut *mut u8, data as *mut u8); unsafe {
ptr::write(&mut ptr as *mut _ as *mut *mut u8, data as *mut u8);
}
ptr ptr
} }
@ -1026,11 +1034,11 @@ impl<T> Rc<[T]> {
/// ///
/// Unsafe because the caller must either take ownership or bind `T: Copy` /// Unsafe because the caller must either take ownership or bind `T: Copy`
unsafe fn copy_from_slice(v: &[T]) -> Rc<[T]> { unsafe fn copy_from_slice(v: &[T]) -> Rc<[T]> {
let ptr = Self::allocate_for_slice(v.len()); unsafe {
let ptr = Self::allocate_for_slice(v.len());
ptr::copy_nonoverlapping(v.as_ptr(), &mut (*ptr).value as *mut [T] as *mut T, v.len()); ptr::copy_nonoverlapping(v.as_ptr(), &mut (*ptr).value as *mut [T] as *mut T, v.len());
Self::from_ptr(ptr)
Self::from_ptr(ptr) }
} }
/// Constructs an `Rc<[T]>` from an iterator known to be of a certain size. /// Constructs an `Rc<[T]>` from an iterator known to be of a certain size.
@ -1058,25 +1066,27 @@ impl<T> Rc<[T]> {
} }
} }
let ptr = Self::allocate_for_slice(len); unsafe {
let ptr = Self::allocate_for_slice(len);
let mem = ptr as *mut _ as *mut u8; let mem = ptr as *mut _ as *mut u8;
let layout = Layout::for_value(&*ptr); let layout = Layout::for_value(&*ptr);
// Pointer to first element // Pointer to first element
let elems = &mut (*ptr).value as *mut [T] as *mut T; let elems = &mut (*ptr).value as *mut [T] as *mut T;
let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems, layout, n_elems: 0 }; let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems, layout, n_elems: 0 };
for (i, item) in iter.enumerate() { for (i, item) in iter.enumerate() {
ptr::write(elems.add(i), item); ptr::write(elems.add(i), item);
guard.n_elems += 1; guard.n_elems += 1;
}
// All clear. Forget the guard so it doesn't free the new RcBox.
forget(guard);
Self::from_ptr(ptr)
} }
// All clear. Forget the guard so it doesn't free the new RcBox.
forget(guard);
Self::from_ptr(ptr)
} }
} }
@ -1786,10 +1796,12 @@ impl<T> Weak<T> {
Self::new() Self::new()
} else { } else {
// See Rc::from_raw for details // See Rc::from_raw for details
let offset = data_offset(ptr); unsafe {
let fake_ptr = ptr as *mut RcBox<T>; let offset = data_offset(ptr);
let ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset)); let fake_ptr = ptr as *mut RcBox<T>;
Weak { ptr: NonNull::new(ptr).expect("Invalid pointer passed to from_raw") } let ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset));
Weak { ptr: NonNull::new(ptr).expect("Invalid pointer passed to from_raw") }
}
} }
} }
} }
@ -2106,7 +2118,7 @@ unsafe fn data_offset<T: ?Sized>(ptr: *const T) -> isize {
// Because it is ?Sized, it will always be the last field in memory. // Because it is ?Sized, it will always be the last field in memory.
// Note: This is a detail of the current implementation of the compiler, // Note: This is a detail of the current implementation of the compiler,
// and is not a guaranteed language detail. Do not rely on it outside of std. // and is not a guaranteed language detail. Do not rely on it outside of std.
data_offset_align(align_of_val(&*ptr)) unsafe { data_offset_align(align_of_val(&*ptr)) }
} }
/// Computes the offset of the data field within `RcBox`. /// Computes the offset of the data field within `RcBox`.

View file

@ -831,8 +831,7 @@ where
{ {
let len = v.len(); let len = v.len();
let v = v.as_mut_ptr(); let v = v.as_mut_ptr();
let v_mid = v.add(mid); let (v_mid, v_end) = unsafe { (v.add(mid), v.add(len)) };
let v_end = v.add(len);
// The merge process first copies the shorter run into `buf`. Then it traces the newly copied // The merge process first copies the shorter run into `buf`. Then it traces the newly copied
// run and the longer run forwards (or backwards), comparing their next unconsumed elements and // run and the longer run forwards (or backwards), comparing their next unconsumed elements and
@ -855,8 +854,10 @@ where
if mid <= len - mid { if mid <= len - mid {
// The left run is shorter. // The left run is shorter.
ptr::copy_nonoverlapping(v, buf, mid); unsafe {
hole = MergeHole { start: buf, end: buf.add(mid), dest: v }; ptr::copy_nonoverlapping(v, buf, mid);
hole = MergeHole { start: buf, end: buf.add(mid), dest: v };
}
// Initially, these pointers point to the beginnings of their arrays. // Initially, these pointers point to the beginnings of their arrays.
let left = &mut hole.start; let left = &mut hole.start;
@ -866,17 +867,21 @@ where
while *left < hole.end && right < v_end { while *left < hole.end && right < v_end {
// Consume the lesser side. // Consume the lesser side.
// If equal, prefer the left run to maintain stability. // If equal, prefer the left run to maintain stability.
let to_copy = if is_less(&*right, &**left) { unsafe {
get_and_increment(&mut right) let to_copy = if is_less(&*right, &**left) {
} else { get_and_increment(&mut right)
get_and_increment(left) } else {
}; get_and_increment(left)
ptr::copy_nonoverlapping(to_copy, get_and_increment(out), 1); };
ptr::copy_nonoverlapping(to_copy, get_and_increment(out), 1);
}
} }
} else { } else {
// The right run is shorter. // The right run is shorter.
ptr::copy_nonoverlapping(v_mid, buf, len - mid); unsafe {
hole = MergeHole { start: buf, end: buf.add(len - mid), dest: v_mid }; ptr::copy_nonoverlapping(v_mid, buf, len - mid);
hole = MergeHole { start: buf, end: buf.add(len - mid), dest: v_mid };
}
// Initially, these pointers point past the ends of their arrays. // Initially, these pointers point past the ends of their arrays.
let left = &mut hole.dest; let left = &mut hole.dest;
@ -886,12 +891,14 @@ where
while v < *left && buf < *right { while v < *left && buf < *right {
// Consume the greater side. // Consume the greater side.
// If equal, prefer the right run to maintain stability. // If equal, prefer the right run to maintain stability.
let to_copy = if is_less(&*right.offset(-1), &*left.offset(-1)) { unsafe {
decrement_and_get(left) let to_copy = if is_less(&*right.offset(-1), &*left.offset(-1)) {
} else { decrement_and_get(left)
decrement_and_get(right) } else {
}; decrement_and_get(right)
ptr::copy_nonoverlapping(to_copy, decrement_and_get(&mut out), 1); };
ptr::copy_nonoverlapping(to_copy, decrement_and_get(&mut out), 1);
}
} }
} }
// Finally, `hole` gets dropped. If the shorter run was not fully consumed, whatever remains of // Finally, `hole` gets dropped. If the shorter run was not fully consumed, whatever remains of
@ -899,12 +906,12 @@ where
unsafe fn get_and_increment<T>(ptr: &mut *mut T) -> *mut T { unsafe fn get_and_increment<T>(ptr: &mut *mut T) -> *mut T {
let old = *ptr; let old = *ptr;
*ptr = ptr.offset(1); *ptr = unsafe { ptr.offset(1) };
old old
} }
unsafe fn decrement_and_get<T>(ptr: &mut *mut T) -> *mut T { unsafe fn decrement_and_get<T>(ptr: &mut *mut T) -> *mut T {
*ptr = ptr.offset(-1); *ptr = unsafe { ptr.offset(-1) };
*ptr *ptr
} }

View file

@ -583,5 +583,5 @@ impl str {
#[stable(feature = "str_box_extras", since = "1.20.0")] #[stable(feature = "str_box_extras", since = "1.20.0")]
#[inline] #[inline]
pub unsafe fn from_boxed_utf8_unchecked(v: Box<[u8]>) -> Box<str> { pub unsafe fn from_boxed_utf8_unchecked(v: Box<[u8]>) -> Box<str> {
Box::from_raw(Box::into_raw(v) as *mut str) unsafe { Box::from_raw(Box::into_raw(v) as *mut str) }
} }

View file

@ -724,7 +724,7 @@ impl String {
#[inline] #[inline]
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
pub unsafe fn from_raw_parts(buf: *mut u8, length: usize, capacity: usize) -> String { pub unsafe fn from_raw_parts(buf: *mut u8, length: usize, capacity: usize) -> String {
String { vec: Vec::from_raw_parts(buf, length, capacity) } unsafe { String { vec: Vec::from_raw_parts(buf, length, capacity) } }
} }
/// Converts a vector of bytes to a `String` without checking that the /// Converts a vector of bytes to a `String` without checking that the
@ -1329,9 +1329,11 @@ impl String {
let amt = bytes.len(); let amt = bytes.len();
self.vec.reserve(amt); self.vec.reserve(amt);
ptr::copy(self.vec.as_ptr().add(idx), self.vec.as_mut_ptr().add(idx + amt), len - idx); unsafe {
ptr::copy(bytes.as_ptr(), self.vec.as_mut_ptr().add(idx), amt); ptr::copy(self.vec.as_ptr().add(idx), self.vec.as_mut_ptr().add(idx + amt), len - idx);
self.vec.set_len(len + amt); ptr::copy(bytes.as_ptr(), self.vec.as_mut_ptr().add(idx), amt);
self.vec.set_len(len + amt);
}
} }
/// Inserts a string slice into this `String` at a byte position. /// Inserts a string slice into this `String` at a byte position.

View file

@ -232,7 +232,7 @@ impl<T: ?Sized> Arc<T> {
} }
unsafe fn from_ptr(ptr: *mut ArcInner<T>) -> Self { unsafe fn from_ptr(ptr: *mut ArcInner<T>) -> Self {
Self::from_inner(NonNull::new_unchecked(ptr)) unsafe { Self::from_inner(NonNull::new_unchecked(ptr)) }
} }
} }
@ -543,7 +543,7 @@ impl<T> Arc<[mem::MaybeUninit<T>]> {
#[unstable(feature = "new_uninit", issue = "63291")] #[unstable(feature = "new_uninit", issue = "63291")]
#[inline] #[inline]
pub unsafe fn assume_init(self) -> Arc<[T]> { pub unsafe fn assume_init(self) -> Arc<[T]> {
Arc::from_ptr(mem::ManuallyDrop::new(self).ptr.as_ptr() as _) unsafe { Arc::from_ptr(mem::ManuallyDrop::new(self).ptr.as_ptr() as _) }
} }
} }
@ -642,13 +642,15 @@ impl<T: ?Sized> Arc<T> {
/// ``` /// ```
#[stable(feature = "rc_raw", since = "1.17.0")] #[stable(feature = "rc_raw", since = "1.17.0")]
pub unsafe fn from_raw(ptr: *const T) -> Self { pub unsafe fn from_raw(ptr: *const T) -> Self {
let offset = data_offset(ptr); unsafe {
let offset = data_offset(ptr);
// Reverse the offset to find the original ArcInner. // Reverse the offset to find the original ArcInner.
let fake_ptr = ptr as *mut ArcInner<T>; let fake_ptr = ptr as *mut ArcInner<T>;
let arc_ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset)); let arc_ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset));
Self::from_ptr(arc_ptr) Self::from_ptr(arc_ptr)
}
} }
/// Consumes the `Arc`, returning the wrapped pointer as `NonNull<T>`. /// Consumes the `Arc`, returning the wrapped pointer as `NonNull<T>`.
@ -807,7 +809,7 @@ impl<T: ?Sized> Arc<T> {
#[unstable(feature = "arc_mutate_strong_count", issue = "71983")] #[unstable(feature = "arc_mutate_strong_count", issue = "71983")]
pub unsafe fn incr_strong_count(ptr: *const T) { pub unsafe fn incr_strong_count(ptr: *const T) {
// Retain Arc, but don't touch refcount by wrapping in ManuallyDrop // Retain Arc, but don't touch refcount by wrapping in ManuallyDrop
let arc = mem::ManuallyDrop::new(Arc::<T>::from_raw(ptr)); let arc = unsafe { mem::ManuallyDrop::new(Arc::<T>::from_raw(ptr)) };
// Now increase refcount, but don't drop new refcount either // Now increase refcount, but don't drop new refcount either
let _arc_clone: mem::ManuallyDrop<_> = arc.clone(); let _arc_clone: mem::ManuallyDrop<_> = arc.clone();
} }
@ -847,7 +849,7 @@ impl<T: ?Sized> Arc<T> {
#[inline] #[inline]
#[unstable(feature = "arc_mutate_strong_count", issue = "71983")] #[unstable(feature = "arc_mutate_strong_count", issue = "71983")]
pub unsafe fn decr_strong_count(ptr: *const T) { pub unsafe fn decr_strong_count(ptr: *const T) {
mem::drop(Arc::from_raw(ptr)); unsafe { mem::drop(Arc::from_raw(ptr)) };
} }
#[inline] #[inline]
@ -865,7 +867,7 @@ impl<T: ?Sized> Arc<T> {
unsafe fn drop_slow(&mut self) { unsafe fn drop_slow(&mut self) {
// Destroy the data at this time, even though we may not free the box // Destroy the data at this time, even though we may not free the box
// allocation itself (there may still be weak pointers lying around). // allocation itself (there may still be weak pointers lying around).
ptr::drop_in_place(Self::get_mut_unchecked(self)); unsafe { ptr::drop_in_place(Self::get_mut_unchecked(self)) };
// Drop the weak ref collectively held by all strong references // Drop the weak ref collectively held by all strong references
drop(Weak { ptr: self.ptr }); drop(Weak { ptr: self.ptr });
@ -917,10 +919,12 @@ impl<T: ?Sized> Arc<T> {
// Initialize the ArcInner // Initialize the ArcInner
let inner = mem_to_arcinner(mem.ptr.as_ptr()); let inner = mem_to_arcinner(mem.ptr.as_ptr());
debug_assert_eq!(Layout::for_value(&*inner), layout); debug_assert_eq!(unsafe { Layout::for_value(&*inner) }, layout);
ptr::write(&mut (*inner).strong, atomic::AtomicUsize::new(1)); unsafe {
ptr::write(&mut (*inner).weak, atomic::AtomicUsize::new(1)); ptr::write(&mut (*inner).strong, atomic::AtomicUsize::new(1));
ptr::write(&mut (*inner).weak, atomic::AtomicUsize::new(1));
}
inner inner
} }
@ -928,9 +932,11 @@ impl<T: ?Sized> Arc<T> {
/// Allocates an `ArcInner<T>` with sufficient space for an unsized inner value. /// Allocates an `ArcInner<T>` with sufficient space for an unsized inner value.
unsafe fn allocate_for_ptr(ptr: *const T) -> *mut ArcInner<T> { unsafe fn allocate_for_ptr(ptr: *const T) -> *mut ArcInner<T> {
// Allocate for the `ArcInner<T>` using the given value. // Allocate for the `ArcInner<T>` using the given value.
Self::allocate_for_layout(Layout::for_value(&*ptr), |mem| { unsafe {
set_data_ptr(ptr as *mut T, mem) as *mut ArcInner<T> Self::allocate_for_layout(Layout::for_value(&*ptr), |mem| {
}) set_data_ptr(ptr as *mut T, mem) as *mut ArcInner<T>
})
}
} }
fn from_box(v: Box<T>) -> Arc<T> { fn from_box(v: Box<T>) -> Arc<T> {
@ -959,9 +965,11 @@ impl<T: ?Sized> Arc<T> {
impl<T> Arc<[T]> { impl<T> Arc<[T]> {
/// Allocates an `ArcInner<[T]>` with the given length. /// Allocates an `ArcInner<[T]>` with the given length.
unsafe fn allocate_for_slice(len: usize) -> *mut ArcInner<[T]> { unsafe fn allocate_for_slice(len: usize) -> *mut ArcInner<[T]> {
Self::allocate_for_layout(Layout::array::<T>(len).unwrap(), |mem| { unsafe {
ptr::slice_from_raw_parts_mut(mem as *mut T, len) as *mut ArcInner<[T]> Self::allocate_for_layout(Layout::array::<T>(len).unwrap(), |mem| {
}) ptr::slice_from_raw_parts_mut(mem as *mut T, len) as *mut ArcInner<[T]>
})
}
} }
} }
@ -970,7 +978,9 @@ impl<T> Arc<[T]> {
/// For a slice/trait object, this sets the `data` field and leaves the rest /// For a slice/trait object, this sets the `data` field and leaves the rest
/// unchanged. For a sized raw pointer, this simply sets the pointer. /// unchanged. For a sized raw pointer, this simply sets the pointer.
unsafe fn set_data_ptr<T: ?Sized, U>(mut ptr: *mut T, data: *mut U) -> *mut T { unsafe fn set_data_ptr<T: ?Sized, U>(mut ptr: *mut T, data: *mut U) -> *mut T {
ptr::write(&mut ptr as *mut _ as *mut *mut u8, data as *mut u8); unsafe {
ptr::write(&mut ptr as *mut _ as *mut *mut u8, data as *mut u8);
}
ptr ptr
} }
@ -979,11 +989,13 @@ impl<T> Arc<[T]> {
/// ///
/// Unsafe because the caller must either take ownership or bind `T: Copy`. /// Unsafe because the caller must either take ownership or bind `T: Copy`.
unsafe fn copy_from_slice(v: &[T]) -> Arc<[T]> { unsafe fn copy_from_slice(v: &[T]) -> Arc<[T]> {
let ptr = Self::allocate_for_slice(v.len()); unsafe {
let ptr = Self::allocate_for_slice(v.len());
ptr::copy_nonoverlapping(v.as_ptr(), &mut (*ptr).data as *mut [T] as *mut T, v.len()); ptr::copy_nonoverlapping(v.as_ptr(), &mut (*ptr).data as *mut [T] as *mut T, v.len());
Self::from_ptr(ptr) Self::from_ptr(ptr)
}
} }
/// Constructs an `Arc<[T]>` from an iterator known to be of a certain size. /// Constructs an `Arc<[T]>` from an iterator known to be of a certain size.
@ -1011,25 +1023,27 @@ impl<T> Arc<[T]> {
} }
} }
let ptr = Self::allocate_for_slice(len); unsafe {
let ptr = Self::allocate_for_slice(len);
let mem = ptr as *mut _ as *mut u8; let mem = ptr as *mut _ as *mut u8;
let layout = Layout::for_value(&*ptr); let layout = Layout::for_value(&*ptr);
// Pointer to first element // Pointer to first element
let elems = &mut (*ptr).data as *mut [T] as *mut T; let elems = &mut (*ptr).data as *mut [T] as *mut T;
let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems, layout, n_elems: 0 }; let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems, layout, n_elems: 0 };
for (i, item) in iter.enumerate() { for (i, item) in iter.enumerate() {
ptr::write(elems.add(i), item); ptr::write(elems.add(i), item);
guard.n_elems += 1; guard.n_elems += 1;
}
// All clear. Forget the guard so it doesn't free the new ArcInner.
mem::forget(guard);
Self::from_ptr(ptr)
} }
// All clear. Forget the guard so it doesn't free the new ArcInner.
mem::forget(guard);
Self::from_ptr(ptr)
} }
} }
@ -1274,7 +1288,7 @@ impl<T: ?Sized> Arc<T> {
pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T { pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T {
// We are careful to *not* create a reference covering the "count" fields, as // We are careful to *not* create a reference covering the "count" fields, as
// this would alias with concurrent access to the reference counts (e.g. by `Weak`). // this would alias with concurrent access to the reference counts (e.g. by `Weak`).
&mut (*this.ptr.as_ptr()).data unsafe { &mut (*this.ptr.as_ptr()).data }
} }
/// Determine whether this is the unique reference (including weak refs) to /// Determine whether this is the unique reference (including weak refs) to
@ -1551,10 +1565,12 @@ impl<T> Weak<T> {
Self::new() Self::new()
} else { } else {
// See Arc::from_raw for details // See Arc::from_raw for details
let offset = data_offset(ptr); unsafe {
let fake_ptr = ptr as *mut ArcInner<T>; let offset = data_offset(ptr);
let ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset)); let fake_ptr = ptr as *mut ArcInner<T>;
Weak { ptr: NonNull::new(ptr).expect("Invalid pointer passed to from_raw") } let ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset));
Weak { ptr: NonNull::new(ptr).expect("Invalid pointer passed to from_raw") }
}
} }
} }
} }
@ -2260,7 +2276,7 @@ unsafe fn data_offset<T: ?Sized>(ptr: *const T) -> isize {
// Because it is `?Sized`, it will always be the last field in memory. // Because it is `?Sized`, it will always be the last field in memory.
// Note: This is a detail of the current implementation of the compiler, // Note: This is a detail of the current implementation of the compiler,
// and is not a guaranteed language detail. Do not rely on it outside of std. // and is not a guaranteed language detail. Do not rely on it outside of std.
data_offset_align(align_of_val(&*ptr)) unsafe { data_offset_align(align_of_val(&*ptr)) }
} }
/// Computes the offset of the data field within `ArcInner`. /// Computes the offset of the data field within `ArcInner`.

View file

@ -60,7 +60,7 @@ impl<W: Wake + Send + Sync + 'static> From<Arc<W>> for RawWaker {
fn raw_waker<W: Wake + Send + Sync + 'static>(waker: Arc<W>) -> RawWaker { fn raw_waker<W: Wake + Send + Sync + 'static>(waker: Arc<W>) -> RawWaker {
// Increment the reference count of the arc to clone it. // Increment the reference count of the arc to clone it.
unsafe fn clone_waker<W: Wake + Send + Sync + 'static>(waker: *const ()) -> RawWaker { unsafe fn clone_waker<W: Wake + Send + Sync + 'static>(waker: *const ()) -> RawWaker {
Arc::incr_strong_count(waker as *const W); unsafe { Arc::incr_strong_count(waker as *const W) };
RawWaker::new( RawWaker::new(
waker as *const (), waker as *const (),
&RawWakerVTable::new(clone_waker::<W>, wake::<W>, wake_by_ref::<W>, drop_waker::<W>), &RawWakerVTable::new(clone_waker::<W>, wake::<W>, wake_by_ref::<W>, drop_waker::<W>),
@ -69,19 +69,20 @@ fn raw_waker<W: Wake + Send + Sync + 'static>(waker: Arc<W>) -> RawWaker {
// Wake by value, moving the Arc into the Wake::wake function // Wake by value, moving the Arc into the Wake::wake function
unsafe fn wake<W: Wake + Send + Sync + 'static>(waker: *const ()) { unsafe fn wake<W: Wake + Send + Sync + 'static>(waker: *const ()) {
let waker: Arc<W> = Arc::from_raw(waker as *const W); let waker: Arc<W> = unsafe { Arc::from_raw(waker as *const W) };
<W as Wake>::wake(waker); <W as Wake>::wake(waker);
} }
// Wake by reference, wrap the waker in ManuallyDrop to avoid dropping it // Wake by reference, wrap the waker in ManuallyDrop to avoid dropping it
unsafe fn wake_by_ref<W: Wake + Send + Sync + 'static>(waker: *const ()) { unsafe fn wake_by_ref<W: Wake + Send + Sync + 'static>(waker: *const ()) {
let waker: ManuallyDrop<Arc<W>> = ManuallyDrop::new(Arc::from_raw(waker as *const W)); let waker: ManuallyDrop<Arc<W>> =
unsafe { ManuallyDrop::new(Arc::from_raw(waker as *const W)) };
<W as Wake>::wake_by_ref(&waker); <W as Wake>::wake_by_ref(&waker);
} }
// Decrement the reference count of the Arc on drop // Decrement the reference count of the Arc on drop
unsafe fn drop_waker<W: Wake + Send + Sync + 'static>(waker: *const ()) { unsafe fn drop_waker<W: Wake + Send + Sync + 'static>(waker: *const ()) {
Arc::decr_strong_count(waker as *const W); unsafe { Arc::decr_strong_count(waker as *const W) };
} }
RawWaker::new( RawWaker::new(

View file

@ -465,7 +465,7 @@ impl<T> Vec<T> {
/// ``` /// ```
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
pub unsafe fn from_raw_parts(ptr: *mut T, length: usize, capacity: usize) -> Vec<T> { pub unsafe fn from_raw_parts(ptr: *mut T, length: usize, capacity: usize) -> Vec<T> {
Vec { buf: RawVec::from_raw_parts(ptr, capacity), len: length } unsafe { Vec { buf: RawVec::from_raw_parts(ptr, capacity), len: length } }
} }
/// Returns the number of elements the vector can hold without /// Returns the number of elements the vector can hold without
@ -1264,10 +1264,10 @@ impl<T> Vec<T> {
/// Appends elements to `Self` from other buffer. /// Appends elements to `Self` from other buffer.
#[inline] #[inline]
unsafe fn append_elements(&mut self, other: *const [T]) { unsafe fn append_elements(&mut self, other: *const [T]) {
let count = (*other).len(); let count = unsafe { (*other).len() };
self.reserve(count); self.reserve(count);
let len = self.len(); let len = self.len();
ptr::copy_nonoverlapping(other as *const T, self.as_mut_ptr().add(len), count); unsafe { ptr::copy_nonoverlapping(other as *const T, self.as_mut_ptr().add(len), count) };
self.len += count; self.len += count;
} }
@ -2965,15 +2965,16 @@ impl<T> Drain<'_, T> {
/// Fill that range as much as possible with new elements from the `replace_with` iterator. /// Fill that range as much as possible with new elements from the `replace_with` iterator.
/// Returns `true` if we filled the entire range. (`replace_with.next()` didnt return `None`.) /// Returns `true` if we filled the entire range. (`replace_with.next()` didnt return `None`.)
unsafe fn fill<I: Iterator<Item = T>>(&mut self, replace_with: &mut I) -> bool { unsafe fn fill<I: Iterator<Item = T>>(&mut self, replace_with: &mut I) -> bool {
let vec = self.vec.as_mut(); let vec = unsafe { self.vec.as_mut() };
let range_start = vec.len; let range_start = vec.len;
let range_end = self.tail_start; let range_end = self.tail_start;
let range_slice = let range_slice = unsafe {
slice::from_raw_parts_mut(vec.as_mut_ptr().add(range_start), range_end - range_start); slice::from_raw_parts_mut(vec.as_mut_ptr().add(range_start), range_end - range_start)
};
for place in range_slice { for place in range_slice {
if let Some(new_item) = replace_with.next() { if let Some(new_item) = replace_with.next() {
ptr::write(place, new_item); unsafe { ptr::write(place, new_item) };
vec.len += 1; vec.len += 1;
} else { } else {
return false; return false;
@ -2984,14 +2985,16 @@ impl<T> Drain<'_, T> {
/// Makes room for inserting more elements before the tail. /// Makes room for inserting more elements before the tail.
unsafe fn move_tail(&mut self, additional: usize) { unsafe fn move_tail(&mut self, additional: usize) {
let vec = self.vec.as_mut(); let vec = unsafe { self.vec.as_mut() };
let len = self.tail_start + self.tail_len; let len = self.tail_start + self.tail_len;
vec.buf.reserve(len, additional); vec.buf.reserve(len, additional);
let new_tail_start = self.tail_start + additional; let new_tail_start = self.tail_start + additional;
let src = vec.as_ptr().add(self.tail_start); unsafe {
let dst = vec.as_mut_ptr().add(new_tail_start); let src = vec.as_ptr().add(self.tail_start);
ptr::copy(src, dst, self.tail_len); let dst = vec.as_mut_ptr().add(new_tail_start);
ptr::copy(src, dst, self.tail_len);
}
self.tail_start = new_tail_start; self.tail_start = new_tail_start;
} }
} }