Merge branch 'master' into box-alloc
This commit is contained in:
commit
955b37b305
895 changed files with 16039 additions and 11283 deletions
|
@ -19,8 +19,9 @@ mod tests;
|
|||
|
||||
extern "Rust" {
|
||||
// These are the magic symbols to call the global allocator. rustc generates
|
||||
// them from the `#[global_allocator]` attribute if there is one, or uses the
|
||||
// default implementations in libstd (`__rdl_alloc` etc in `src/libstd/alloc.rs`)
|
||||
// them to call `__rg_alloc` etc. if there is a `#[global_allocator]` attribute
|
||||
// (the code expanding that attribute macro generates those functions), or to call
|
||||
// the default implementations in libstd (`__rdl_alloc` etc. in `library/std/src/alloc.rs`)
|
||||
// otherwise.
|
||||
#[rustc_allocator]
|
||||
#[rustc_allocator_nounwind]
|
||||
|
@ -31,8 +32,6 @@ extern "Rust" {
|
|||
fn __rust_realloc(ptr: *mut u8, old_size: usize, align: usize, new_size: usize) -> *mut u8;
|
||||
#[rustc_allocator_nounwind]
|
||||
fn __rust_alloc_zeroed(size: usize, align: usize) -> *mut u8;
|
||||
#[rustc_allocator_nounwind]
|
||||
fn __rust_alloc_error_handler(size: usize, align: usize) -> !;
|
||||
}
|
||||
|
||||
/// The global memory allocator.
|
||||
|
@ -42,7 +41,7 @@ extern "Rust" {
|
|||
/// if there is one, or the `std` crate’s default.
|
||||
///
|
||||
/// Note: while this type is unstable, the functionality it provides can be
|
||||
/// accessed through the [free functions in `alloc`](index.html#functions).
|
||||
/// accessed through the [free functions in `alloc`](self#functions).
|
||||
#[unstable(feature = "allocator_api", issue = "32838")]
|
||||
#[derive(Copy, Clone, Default, Debug)]
|
||||
#[cfg(not(test))]
|
||||
|
@ -334,6 +333,16 @@ pub(crate) unsafe fn box_free<T: ?Sized, A: AllocRef>(ptr: Unique<T>, alloc: A)
|
|||
}
|
||||
}
|
||||
|
||||
// # Allocation error handler
|
||||
|
||||
extern "Rust" {
|
||||
// This is the magic symbol to call the global alloc error handler. rustc generates
|
||||
// it to call `__rg_oom` if there is a `#[alloc_error_handler]`, or to call the
|
||||
// default implementations below (`__rdl_oom`) otherwise.
|
||||
#[rustc_allocator_nounwind]
|
||||
fn __rust_alloc_error_handler(size: usize, align: usize) -> !;
|
||||
}
|
||||
|
||||
/// Abort on memory allocation error or failure.
|
||||
///
|
||||
/// Callers of memory allocation APIs wishing to abort computation
|
||||
|
@ -378,7 +387,7 @@ pub fn handle_alloc_error(layout: Layout) -> ! {
|
|||
#[doc(hidden)]
|
||||
#[allow(unused_attributes)]
|
||||
#[unstable(feature = "alloc_internals", issue = "none")]
|
||||
pub mod __default_lib_allocator {
|
||||
pub mod __alloc_error_handler {
|
||||
use crate::alloc::Layout;
|
||||
|
||||
// called via generated `__rust_alloc_error_handler`
|
||||
|
|
|
@ -1080,7 +1080,7 @@ impl From<Cow<'_, str>> for Box<str> {
|
|||
|
||||
#[stable(feature = "boxed_str_conv", since = "1.19.0")]
|
||||
impl<A: AllocRef> From<Box<str, A>> for Box<[u8], A> {
|
||||
/// Converts a `Box<str>>` into a `Box<[u8]>`
|
||||
/// Converts a `Box<str>` into a `Box<[u8]>`
|
||||
///
|
||||
/// This conversion does not allocate on the heap and happens in place.
|
||||
///
|
||||
|
|
|
@ -1783,6 +1783,10 @@ impl<'a, K: 'a, V: 'a> DrainFilterInner<'a, K, V> {
|
|||
|
||||
/// Implementation of a typical `DrainFilter::size_hint` method.
|
||||
pub(super) fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
// In most of the btree iterators, `self.length` is the number of elements
|
||||
// yet to be visited. Here, it includes elements that were visited and that
|
||||
// the predicate decided not to drain. Making this upper bound more accurate
|
||||
// requires maintaining an extra field and is not worth while.
|
||||
(0, Some(*self.length))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -170,6 +170,22 @@ impl<K, V> Root<K, V> {
|
|||
NodeRef { height: self.height, node: self.node.as_ptr(), _marker: PhantomData }
|
||||
}
|
||||
|
||||
/// Borrows and returns a mutable reference to the leaf node owned by the root.
|
||||
/// # Safety
|
||||
/// The root node is a leaf.
|
||||
unsafe fn leaf_node_as_mut(&mut self) -> NodeRef<marker::Mut<'_>, K, V, marker::Leaf> {
|
||||
debug_assert!(self.height == 0);
|
||||
NodeRef { height: self.height, node: self.node.as_ptr(), _marker: PhantomData }
|
||||
}
|
||||
|
||||
/// Borrows and returns a mutable reference to the internal node owned by the root.
|
||||
/// # Safety
|
||||
/// The root node is not a leaf.
|
||||
unsafe fn internal_node_as_mut(&mut self) -> NodeRef<marker::Mut<'_>, K, V, marker::Internal> {
|
||||
debug_assert!(self.height > 0);
|
||||
NodeRef { height: self.height, node: self.node.as_ptr(), _marker: PhantomData }
|
||||
}
|
||||
|
||||
pub fn node_as_valmut(&mut self) -> NodeRef<marker::ValMut<'_>, K, V, marker::LeafOrInternal> {
|
||||
NodeRef { height: self.height, node: self.node.as_ptr(), _marker: PhantomData }
|
||||
}
|
||||
|
@ -188,14 +204,11 @@ impl<K, V> Root<K, V> {
|
|||
self.node = BoxedNode::from_internal(new_node);
|
||||
self.height += 1;
|
||||
|
||||
let mut ret =
|
||||
NodeRef { height: self.height, node: self.node.as_ptr(), _marker: PhantomData };
|
||||
|
||||
unsafe {
|
||||
let mut ret = self.internal_node_as_mut();
|
||||
ret.reborrow_mut().first_edge().correct_parent_link();
|
||||
ret
|
||||
}
|
||||
|
||||
ret
|
||||
}
|
||||
|
||||
/// Removes the internal root node, using its first child as the new root node.
|
||||
|
@ -212,11 +225,8 @@ impl<K, V> Root<K, V> {
|
|||
|
||||
let top = self.node.ptr;
|
||||
|
||||
self.node = unsafe {
|
||||
BoxedNode::from_ptr(
|
||||
self.node_as_mut().cast_unchecked::<marker::Internal>().first_edge().descend().node,
|
||||
)
|
||||
};
|
||||
let internal_node = unsafe { self.internal_node_as_mut() };
|
||||
self.node = unsafe { BoxedNode::from_ptr(internal_node.first_edge().descend().node) };
|
||||
self.height -= 1;
|
||||
self.node_as_mut().as_leaf_mut().parent = None;
|
||||
|
||||
|
@ -247,8 +257,13 @@ impl<K, V> Root<K, V> {
|
|||
/// `NodeRef` points to an internal node, and when this is `LeafOrInternal` the
|
||||
/// `NodeRef` could be pointing to either type of node.
|
||||
pub struct NodeRef<BorrowType, K, V, Type> {
|
||||
/// The number of levels below the node.
|
||||
/// The number of levels below the node, a property of the node that cannot be
|
||||
/// entirely described by `Type` and that the node does not store itself either.
|
||||
/// Unconstrained if `Type` is `LeafOrInternal`, must be zero if `Type` is `Leaf`,
|
||||
/// and must be non-zero if `Type` is `Internal`.
|
||||
height: usize,
|
||||
/// The pointer to the leaf or internal node. The definition of `InternalNode`
|
||||
/// ensures that the pointer is valid either way.
|
||||
node: NonNull<LeafNode<K, V>>,
|
||||
_marker: PhantomData<(BorrowType, Type)>,
|
||||
}
|
||||
|
@ -305,8 +320,8 @@ impl<BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
|
|||
unsafe { usize::from((*self.as_leaf_ptr()).len) }
|
||||
}
|
||||
|
||||
/// Returns the height of this node in the whole tree. Zero height denotes the
|
||||
/// leaf level.
|
||||
/// Returns the height of this node with respect to the leaf level. Zero height means the
|
||||
/// node is a leaf itself.
|
||||
pub fn height(&self) -> usize {
|
||||
self.height
|
||||
}
|
||||
|
@ -443,9 +458,9 @@ impl<K, V> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
|
|||
}
|
||||
|
||||
impl<'a, K, V, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
|
||||
/// Unsafely asserts to the compiler some static information about whether this
|
||||
/// node is a `Leaf` or an `Internal`.
|
||||
unsafe fn cast_unchecked<NewType>(self) -> NodeRef<marker::Mut<'a>, K, V, NewType> {
|
||||
/// Unsafely asserts to the compiler the static information that this node is an `Internal`.
|
||||
unsafe fn cast_to_internal_unchecked(self) -> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
|
||||
debug_assert!(self.height > 0);
|
||||
NodeRef { height: self.height, node: self.node, _marker: PhantomData }
|
||||
}
|
||||
|
||||
|
@ -574,9 +589,11 @@ impl<'a, K, V, Type> NodeRef<marker::ValMut<'a>, K, V, Type> {
|
|||
// to avoid aliasing with outstanding references to other elements,
|
||||
// in particular, those returned to the caller in earlier iterations.
|
||||
let leaf = self.node.as_ptr();
|
||||
let keys = unsafe { &raw const (*leaf).keys };
|
||||
let vals = unsafe { &raw mut (*leaf).vals };
|
||||
// We must coerce to unsized array pointers because of Rust issue #74679.
|
||||
let keys: *const [_] = unsafe { &raw const (*leaf).keys };
|
||||
let vals: *mut [_] = unsafe { &raw mut (*leaf).vals };
|
||||
let keys: *const [_] = keys;
|
||||
let vals: *mut [_] = vals;
|
||||
// SAFETY: The keys and values of a node must always be initialized up to length.
|
||||
let key = unsafe { (&*keys.get_unchecked(idx)).assume_init_ref() };
|
||||
let val = unsafe { (&mut *vals.get_unchecked_mut(idx)).assume_init_mut() };
|
||||
|
@ -807,11 +824,25 @@ impl<BorrowType, K, V, NodeType> Handle<NodeRef<BorrowType, K, V, NodeType>, mar
|
|||
}
|
||||
}
|
||||
|
||||
impl<BorrowType, K, V, NodeType> NodeRef<BorrowType, K, V, NodeType> {
|
||||
/// Could be a public implementation of PartialEq, but only used in this module.
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
let Self { node, height, _marker: _ } = self;
|
||||
if *node == other.node {
|
||||
debug_assert_eq!(*height, other.height);
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<BorrowType, K, V, NodeType, HandleType> PartialEq
|
||||
for Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType>
|
||||
{
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.node.node == other.node.node && self.idx == other.idx
|
||||
let Self { node, idx, _marker: _ } = self;
|
||||
node.eq(&other.node) && *idx == other.idx
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -819,7 +850,8 @@ impl<BorrowType, K, V, NodeType, HandleType> PartialOrd
|
|||
for Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType>
|
||||
{
|
||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||
if self.node.node == other.node.node { Some(self.idx.cmp(&other.idx)) } else { None }
|
||||
let Self { node, idx, _marker: _ } = self;
|
||||
if node.eq(&other.node) { Some(idx.cmp(&other.idx)) } else { None }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -943,10 +975,7 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, mark
|
|||
Handle::new_edge(left.reborrow_mut(), insert_idx)
|
||||
},
|
||||
InsertionPlace::Right(insert_idx) => unsafe {
|
||||
Handle::new_edge(
|
||||
right.node_as_mut().cast_unchecked::<marker::Leaf>(),
|
||||
insert_idx,
|
||||
)
|
||||
Handle::new_edge(right.leaf_node_as_mut(), insert_idx)
|
||||
},
|
||||
};
|
||||
let val_ptr = insertion_edge.insert_fit(key, val);
|
||||
|
@ -1006,10 +1035,7 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>,
|
|||
Handle::new_edge(left.reborrow_mut(), insert_idx)
|
||||
},
|
||||
InsertionPlace::Right(insert_idx) => unsafe {
|
||||
Handle::new_edge(
|
||||
right.node_as_mut().cast_unchecked::<marker::Internal>(),
|
||||
insert_idx,
|
||||
)
|
||||
Handle::new_edge(right.internal_node_as_mut(), insert_idx)
|
||||
},
|
||||
};
|
||||
insertion_edge.insert_fit(key, val, edge);
|
||||
|
@ -1205,7 +1231,7 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>,
|
|||
|
||||
let mut new_root = Root { node: BoxedNode::from_internal(new_node), height };
|
||||
|
||||
new_root.node_as_mut().cast_unchecked().correct_childrens_parent_links(0..=new_len);
|
||||
new_root.internal_node_as_mut().correct_childrens_parent_links(0..=new_len);
|
||||
|
||||
(self.node, k, v, new_root)
|
||||
}
|
||||
|
@ -1258,8 +1284,8 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>,
|
|||
if self.node.height > 1 {
|
||||
// SAFETY: the height of the nodes being merged is one below the height
|
||||
// of the node of this edge, thus above zero, so they are internal.
|
||||
let mut left_node = left_node.cast_unchecked::<marker::Internal>();
|
||||
let right_node = right_node.cast_unchecked::<marker::Internal>();
|
||||
let mut left_node = left_node.cast_to_internal_unchecked();
|
||||
let right_node = right_node.cast_to_internal_unchecked();
|
||||
ptr::copy_nonoverlapping(
|
||||
right_node.edge_at(0),
|
||||
left_node.edges_mut().as_mut_ptr().add(left_len + 1),
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
use super::*;
|
||||
use core::cmp::Ordering::*;
|
||||
|
||||
#[test]
|
||||
fn test_splitpoint() {
|
||||
|
@ -24,6 +25,38 @@ fn test_splitpoint() {
|
|||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_partial_cmp_eq() {
|
||||
let mut root1: Root<i32, ()> = Root::new_leaf();
|
||||
let mut leaf1 = unsafe { root1.leaf_node_as_mut() };
|
||||
leaf1.push(1, ());
|
||||
root1.push_internal_level();
|
||||
let root2: Root<i32, ()> = Root::new_leaf();
|
||||
|
||||
let leaf_edge_1a = root1.node_as_ref().first_leaf_edge().forget_node_type();
|
||||
let leaf_edge_1b = root1.node_as_ref().last_leaf_edge().forget_node_type();
|
||||
let top_edge_1 = root1.node_as_ref().first_edge();
|
||||
let top_edge_2 = root2.node_as_ref().first_edge();
|
||||
|
||||
assert!(leaf_edge_1a == leaf_edge_1a);
|
||||
assert!(leaf_edge_1a != leaf_edge_1b);
|
||||
assert!(leaf_edge_1a != top_edge_1);
|
||||
assert!(leaf_edge_1a != top_edge_2);
|
||||
assert!(top_edge_1 == top_edge_1);
|
||||
assert!(top_edge_1 != top_edge_2);
|
||||
|
||||
assert_eq!(leaf_edge_1a.partial_cmp(&leaf_edge_1a), Some(Equal));
|
||||
assert_eq!(leaf_edge_1a.partial_cmp(&leaf_edge_1b), Some(Less));
|
||||
assert_eq!(leaf_edge_1a.partial_cmp(&top_edge_1), None);
|
||||
assert_eq!(leaf_edge_1a.partial_cmp(&top_edge_2), None);
|
||||
assert_eq!(top_edge_1.partial_cmp(&top_edge_1), Some(Equal));
|
||||
assert_eq!(top_edge_1.partial_cmp(&top_edge_2), None);
|
||||
|
||||
root1.pop_internal_level();
|
||||
unsafe { root1.into_ref().deallocate_and_ascend() };
|
||||
unsafe { root2.into_ref().deallocate_and_ascend() };
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(target_arch = "x86_64")]
|
||||
fn test_sizes() {
|
||||
|
|
|
@ -1476,7 +1476,8 @@ impl<T> Vec<T> {
|
|||
/// `'a`. If the type has only static references, or none at all, then this
|
||||
/// may be chosen to be `'static`.
|
||||
///
|
||||
/// This function is similar to the `leak` function on `Box`.
|
||||
/// This function is similar to the [`leak`][Box::leak] function on [`Box`]
|
||||
/// except that there is no way to recover the leaked memory.
|
||||
///
|
||||
/// This function is mainly useful for data that lives for the remainder of
|
||||
/// the program's life. Dropping the returned reference will cause a memory
|
||||
|
@ -2591,6 +2592,8 @@ __impl_slice_eq1! { [] Vec<A>, &[B], #[stable(feature = "rust1", since = "1.0.0"
|
|||
__impl_slice_eq1! { [] Vec<A>, &mut [B], #[stable(feature = "rust1", since = "1.0.0")] }
|
||||
__impl_slice_eq1! { [] &[A], Vec<B>, #[stable(feature = "partialeq_vec_for_ref_slice", since = "1.46.0")] }
|
||||
__impl_slice_eq1! { [] &mut [A], Vec<B>, #[stable(feature = "partialeq_vec_for_ref_slice", since = "1.46.0")] }
|
||||
__impl_slice_eq1! { [] Vec<A>, [B], #[stable(feature = "partialeq_vec_for_slice", since = "1.48.0")] }
|
||||
__impl_slice_eq1! { [] [A], Vec<B>, #[stable(feature = "partialeq_vec_for_slice", since = "1.48.0")] }
|
||||
__impl_slice_eq1! { [] Cow<'_, [A]>, Vec<B> where A: Clone, #[stable(feature = "rust1", since = "1.0.0")] }
|
||||
__impl_slice_eq1! { [] Cow<'_, [A]>, &[B] where A: Clone, #[stable(feature = "rust1", since = "1.0.0")] }
|
||||
__impl_slice_eq1! { [] Cow<'_, [A]>, &mut [B] where A: Clone, #[stable(feature = "rust1", since = "1.0.0")] }
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue