1
Fork 0

Merge remote-tracking branch 'upstream/master' into box-alloc

This commit is contained in:
Tim Diekmann 2020-10-25 16:32:28 +01:00
commit 06e4497a04
989 changed files with 13348 additions and 12044 deletions

View file

@ -383,7 +383,7 @@ pub fn handle_alloc_error(layout: Layout) -> ! {
unsafe { oom_impl(layout) }
}
#[cfg(not(any(test, bootstrap)))]
#[cfg(not(any(target_os = "hermit", test, bootstrap)))]
#[doc(hidden)]
#[allow(unused_attributes)]
#[unstable(feature = "alloc_internals", issue = "none")]

View file

@ -9,12 +9,17 @@ use core::ops::{Index, RangeBounds};
use core::ptr;
use super::borrow::DormantMutRef;
use super::node::{self, marker, ForceResult::*, Handle, InsertResult::*, NodeRef};
use super::node::{self, marker, ForceResult::*, Handle, NodeRef};
use super::search::{self, SearchResult::*};
use super::unwrap_unchecked;
mod entry;
pub use entry::{Entry, OccupiedEntry, VacantEntry};
use Entry::*;
use UnderflowResult::*;
/// Minimum number of elements in nodes that are not a root.
/// We might temporarily have fewer elements during methods.
pub(super) const MIN_LEN: usize = node::MIN_LEN_AFTER_SPLIT;
/// A map based on a B-Tree.
///
@ -452,69 +457,6 @@ impl<K: fmt::Debug, V: fmt::Debug> fmt::Debug for RangeMut<'_, K, V> {
}
}
/// A view into a single entry in a map, which may either be vacant or occupied.
///
/// This `enum` is constructed from the [`entry`] method on [`BTreeMap`].
///
/// [`entry`]: BTreeMap::entry
#[stable(feature = "rust1", since = "1.0.0")]
pub enum Entry<'a, K: 'a, V: 'a> {
/// A vacant entry.
#[stable(feature = "rust1", since = "1.0.0")]
Vacant(#[stable(feature = "rust1", since = "1.0.0")] VacantEntry<'a, K, V>),
/// An occupied entry.
#[stable(feature = "rust1", since = "1.0.0")]
Occupied(#[stable(feature = "rust1", since = "1.0.0")] OccupiedEntry<'a, K, V>),
}
#[stable(feature = "debug_btree_map", since = "1.12.0")]
impl<K: Debug + Ord, V: Debug> Debug for Entry<'_, K, V> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
Vacant(ref v) => f.debug_tuple("Entry").field(v).finish(),
Occupied(ref o) => f.debug_tuple("Entry").field(o).finish(),
}
}
}
/// A view into a vacant entry in a `BTreeMap`.
/// It is part of the [`Entry`] enum.
#[stable(feature = "rust1", since = "1.0.0")]
pub struct VacantEntry<'a, K: 'a, V: 'a> {
key: K,
handle: Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>,
dormant_map: DormantMutRef<'a, BTreeMap<K, V>>,
// Be invariant in `K` and `V`
_marker: PhantomData<&'a mut (K, V)>,
}
#[stable(feature = "debug_btree_map", since = "1.12.0")]
impl<K: Debug + Ord, V> Debug for VacantEntry<'_, K, V> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("VacantEntry").field(self.key()).finish()
}
}
/// A view into an occupied entry in a `BTreeMap`.
/// It is part of the [`Entry`] enum.
#[stable(feature = "rust1", since = "1.0.0")]
pub struct OccupiedEntry<'a, K: 'a, V: 'a> {
handle: Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::KV>,
dormant_map: DormantMutRef<'a, BTreeMap<K, V>>,
// Be invariant in `K` and `V`
_marker: PhantomData<&'a mut (K, V)>,
}
#[stable(feature = "debug_btree_map", since = "1.12.0")]
impl<K: Debug + Ord, V: Debug> Debug for OccupiedEntry<'_, K, V> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("OccupiedEntry").field("key", self.key()).field("value", self.get()).finish()
}
}
// An iterator for merging two sorted sequences into one
struct MergeIter<K, V, I: Iterator<Item = (K, V)>> {
left: Peekable<I>,
@ -1156,13 +1098,13 @@ impl<K: Ord, V> BTreeMap<K, V> {
// Check if right-most child is underfull.
let mut last_edge = internal.last_edge();
let right_child_len = last_edge.reborrow().descend().len();
if right_child_len < node::MIN_LEN {
if right_child_len < MIN_LEN {
// We need to steal.
let mut last_kv = match last_edge.left_kv() {
Ok(left) => left,
Err(_) => unreachable!(),
};
last_kv.bulk_steal_left(node::MIN_LEN - right_child_len);
last_kv.bulk_steal_left(MIN_LEN - right_child_len);
last_edge = last_kv.right_edge();
}
@ -1214,40 +1156,8 @@ impl<K: Ord, V> BTreeMap<K, V> {
let mut right = Self::new();
let right_root = Self::ensure_is_owned(&mut right.root);
for _ in 0..left_root.height() {
right_root.push_internal_level();
}
{
let mut left_node = left_root.node_as_mut();
let mut right_node = right_root.node_as_mut();
loop {
let mut split_edge = match search::search_node(left_node, key) {
// key is going to the right tree
Found(handle) => handle.left_edge(),
GoDown(handle) => handle,
};
split_edge.move_suffix(&mut right_node);
match (split_edge.force(), right_node.force()) {
(Internal(edge), Internal(node)) => {
left_node = edge.descend();
right_node = node.first_edge().descend();
}
(Leaf(_), Leaf(_)) => {
break;
}
_ => {
unreachable!();
}
}
}
}
left_root.fix_right_border();
right_root.fix_left_border();
left_root.split_off(right_root, key);
if left_root.height() < right_root.height() {
self.length = left_root.node_as_ref().calc_length();
@ -2310,596 +2220,6 @@ impl<K, V> BTreeMap<K, V> {
}
}
impl<'a, K: Ord, V> Entry<'a, K, V> {
/// Ensures a value is in the entry by inserting the default if empty, and returns
/// a mutable reference to the value in the entry.
///
/// # Examples
///
/// ```
/// use std::collections::BTreeMap;
///
/// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
/// map.entry("poneyland").or_insert(12);
///
/// assert_eq!(map["poneyland"], 12);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn or_insert(self, default: V) -> &'a mut V {
match self {
Occupied(entry) => entry.into_mut(),
Vacant(entry) => entry.insert(default),
}
}
/// Ensures a value is in the entry by inserting the result of the default function if empty,
/// and returns a mutable reference to the value in the entry.
///
/// # Examples
///
/// ```
/// use std::collections::BTreeMap;
///
/// let mut map: BTreeMap<&str, String> = BTreeMap::new();
/// let s = "hoho".to_string();
///
/// map.entry("poneyland").or_insert_with(|| s);
///
/// assert_eq!(map["poneyland"], "hoho".to_string());
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn or_insert_with<F: FnOnce() -> V>(self, default: F) -> &'a mut V {
match self {
Occupied(entry) => entry.into_mut(),
Vacant(entry) => entry.insert(default()),
}
}
#[unstable(feature = "or_insert_with_key", issue = "71024")]
/// Ensures a value is in the entry by inserting, if empty, the result of the default function,
/// which takes the key as its argument, and returns a mutable reference to the value in the
/// entry.
///
/// # Examples
///
/// ```
/// #![feature(or_insert_with_key)]
/// use std::collections::BTreeMap;
///
/// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
///
/// map.entry("poneyland").or_insert_with_key(|key| key.chars().count());
///
/// assert_eq!(map["poneyland"], 9);
/// ```
#[inline]
pub fn or_insert_with_key<F: FnOnce(&K) -> V>(self, default: F) -> &'a mut V {
match self {
Occupied(entry) => entry.into_mut(),
Vacant(entry) => {
let value = default(entry.key());
entry.insert(value)
}
}
}
/// Returns a reference to this entry's key.
///
/// # Examples
///
/// ```
/// use std::collections::BTreeMap;
///
/// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
/// assert_eq!(map.entry("poneyland").key(), &"poneyland");
/// ```
#[stable(feature = "map_entry_keys", since = "1.10.0")]
pub fn key(&self) -> &K {
match *self {
Occupied(ref entry) => entry.key(),
Vacant(ref entry) => entry.key(),
}
}
/// Provides in-place mutable access to an occupied entry before any
/// potential inserts into the map.
///
/// # Examples
///
/// ```
/// use std::collections::BTreeMap;
///
/// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
///
/// map.entry("poneyland")
/// .and_modify(|e| { *e += 1 })
/// .or_insert(42);
/// assert_eq!(map["poneyland"], 42);
///
/// map.entry("poneyland")
/// .and_modify(|e| { *e += 1 })
/// .or_insert(42);
/// assert_eq!(map["poneyland"], 43);
/// ```
#[stable(feature = "entry_and_modify", since = "1.26.0")]
pub fn and_modify<F>(self, f: F) -> Self
where
F: FnOnce(&mut V),
{
match self {
Occupied(mut entry) => {
f(entry.get_mut());
Occupied(entry)
}
Vacant(entry) => Vacant(entry),
}
}
}
impl<'a, K: Ord, V: Default> Entry<'a, K, V> {
#[stable(feature = "entry_or_default", since = "1.28.0")]
/// Ensures a value is in the entry by inserting the default value if empty,
/// and returns a mutable reference to the value in the entry.
///
/// # Examples
///
/// ```
/// use std::collections::BTreeMap;
///
/// let mut map: BTreeMap<&str, Option<usize>> = BTreeMap::new();
/// map.entry("poneyland").or_default();
///
/// assert_eq!(map["poneyland"], None);
/// ```
pub fn or_default(self) -> &'a mut V {
match self {
Occupied(entry) => entry.into_mut(),
Vacant(entry) => entry.insert(Default::default()),
}
}
}
impl<'a, K: Ord, V> VacantEntry<'a, K, V> {
/// Gets a reference to the key that would be used when inserting a value
/// through the VacantEntry.
///
/// # Examples
///
/// ```
/// use std::collections::BTreeMap;
///
/// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
/// assert_eq!(map.entry("poneyland").key(), &"poneyland");
/// ```
#[stable(feature = "map_entry_keys", since = "1.10.0")]
pub fn key(&self) -> &K {
&self.key
}
/// Take ownership of the key.
///
/// # Examples
///
/// ```
/// use std::collections::BTreeMap;
/// use std::collections::btree_map::Entry;
///
/// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
///
/// if let Entry::Vacant(v) = map.entry("poneyland") {
/// v.into_key();
/// }
/// ```
#[stable(feature = "map_entry_recover_keys2", since = "1.12.0")]
pub fn into_key(self) -> K {
self.key
}
/// Sets the value of the entry with the `VacantEntry`'s key,
/// and returns a mutable reference to it.
///
/// # Examples
///
/// ```
/// use std::collections::BTreeMap;
/// use std::collections::btree_map::Entry;
///
/// let mut map: BTreeMap<&str, u32> = BTreeMap::new();
///
/// if let Entry::Vacant(o) = map.entry("poneyland") {
/// o.insert(37);
/// }
/// assert_eq!(map["poneyland"], 37);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn insert(self, value: V) -> &'a mut V {
let out_ptr = match self.handle.insert_recursing(self.key, value) {
(Fit(_), val_ptr) => {
// Safety: We have consumed self.handle and the handle returned.
let map = unsafe { self.dormant_map.awaken() };
map.length += 1;
val_ptr
}
(Split(ins), val_ptr) => {
drop(ins.left);
// Safety: We have consumed self.handle and the reference returned.
let map = unsafe { self.dormant_map.awaken() };
let root = map.root.as_mut().unwrap();
root.push_internal_level().push(ins.k, ins.v, ins.right);
map.length += 1;
val_ptr
}
};
// Now that we have finished growing the tree using borrowed references,
// dereference the pointer to a part of it, that we picked up along the way.
unsafe { &mut *out_ptr }
}
}
impl<'a, K: Ord, V> OccupiedEntry<'a, K, V> {
/// Gets a reference to the key in the entry.
///
/// # Examples
///
/// ```
/// use std::collections::BTreeMap;
///
/// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
/// map.entry("poneyland").or_insert(12);
/// assert_eq!(map.entry("poneyland").key(), &"poneyland");
/// ```
#[stable(feature = "map_entry_keys", since = "1.10.0")]
pub fn key(&self) -> &K {
self.handle.reborrow().into_kv().0
}
/// Take ownership of the key and value from the map.
///
/// # Examples
///
/// ```
/// use std::collections::BTreeMap;
/// use std::collections::btree_map::Entry;
///
/// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
/// map.entry("poneyland").or_insert(12);
///
/// if let Entry::Occupied(o) = map.entry("poneyland") {
/// // We delete the entry from the map.
/// o.remove_entry();
/// }
///
/// // If now try to get the value, it will panic:
/// // println!("{}", map["poneyland"]);
/// ```
#[stable(feature = "map_entry_recover_keys2", since = "1.12.0")]
pub fn remove_entry(self) -> (K, V) {
self.remove_kv()
}
/// Gets a reference to the value in the entry.
///
/// # Examples
///
/// ```
/// use std::collections::BTreeMap;
/// use std::collections::btree_map::Entry;
///
/// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
/// map.entry("poneyland").or_insert(12);
///
/// if let Entry::Occupied(o) = map.entry("poneyland") {
/// assert_eq!(o.get(), &12);
/// }
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn get(&self) -> &V {
self.handle.reborrow().into_kv().1
}
/// Gets a mutable reference to the value in the entry.
///
/// If you need a reference to the `OccupiedEntry` that may outlive the
/// destruction of the `Entry` value, see [`into_mut`].
///
/// [`into_mut`]: OccupiedEntry::into_mut
///
/// # Examples
///
/// ```
/// use std::collections::BTreeMap;
/// use std::collections::btree_map::Entry;
///
/// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
/// map.entry("poneyland").or_insert(12);
///
/// assert_eq!(map["poneyland"], 12);
/// if let Entry::Occupied(mut o) = map.entry("poneyland") {
/// *o.get_mut() += 10;
/// assert_eq!(*o.get(), 22);
///
/// // We can use the same Entry multiple times.
/// *o.get_mut() += 2;
/// }
/// assert_eq!(map["poneyland"], 24);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn get_mut(&mut self) -> &mut V {
self.handle.kv_mut().1
}
/// Converts the entry into a mutable reference to its value.
///
/// If you need multiple references to the `OccupiedEntry`, see [`get_mut`].
///
/// [`get_mut`]: OccupiedEntry::get_mut
///
/// # Examples
///
/// ```
/// use std::collections::BTreeMap;
/// use std::collections::btree_map::Entry;
///
/// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
/// map.entry("poneyland").or_insert(12);
///
/// assert_eq!(map["poneyland"], 12);
/// if let Entry::Occupied(o) = map.entry("poneyland") {
/// *o.into_mut() += 10;
/// }
/// assert_eq!(map["poneyland"], 22);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn into_mut(self) -> &'a mut V {
self.handle.into_val_mut()
}
/// Sets the value of the entry with the `OccupiedEntry`'s key,
/// and returns the entry's old value.
///
/// # Examples
///
/// ```
/// use std::collections::BTreeMap;
/// use std::collections::btree_map::Entry;
///
/// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
/// map.entry("poneyland").or_insert(12);
///
/// if let Entry::Occupied(mut o) = map.entry("poneyland") {
/// assert_eq!(o.insert(15), 12);
/// }
/// assert_eq!(map["poneyland"], 15);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn insert(&mut self, value: V) -> V {
mem::replace(self.get_mut(), value)
}
/// Takes the value of the entry out of the map, and returns it.
///
/// # Examples
///
/// ```
/// use std::collections::BTreeMap;
/// use std::collections::btree_map::Entry;
///
/// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
/// map.entry("poneyland").or_insert(12);
///
/// if let Entry::Occupied(o) = map.entry("poneyland") {
/// assert_eq!(o.remove(), 12);
/// }
/// // If we try to get "poneyland"'s value, it'll panic:
/// // println!("{}", map["poneyland"]);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn remove(self) -> V {
self.remove_kv().1
}
// Body of `remove_entry`, separate to keep the above implementations short.
fn remove_kv(self) -> (K, V) {
let mut emptied_internal_root = false;
let (old_kv, _) = self.handle.remove_kv_tracking(|| emptied_internal_root = true);
// SAFETY: we consumed the intermediate root borrow, `self.handle`.
let map = unsafe { self.dormant_map.awaken() };
map.length -= 1;
if emptied_internal_root {
let root = map.root.as_mut().unwrap();
root.pop_internal_level();
}
old_kv
}
}
impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::KV> {
/// Removes a key/value-pair from the map, and returns that pair, as well as
/// the leaf edge corresponding to that former pair.
fn remove_kv_tracking<F: FnOnce()>(
self,
handle_emptied_internal_root: F,
) -> ((K, V), Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>) {
let (old_kv, mut pos, was_internal) = match self.force() {
Leaf(leaf) => {
let (old_kv, pos) = leaf.remove();
(old_kv, pos, false)
}
Internal(mut internal) => {
// Replace the location freed in the internal node with an
// adjacent KV, and remove that adjacent KV from its leaf.
// Always choose the adjacent KV on the left side because
// it is typically faster to pop an element from the end
// of the KV arrays without needing to shift other elements.
let key_loc = internal.kv_mut().0 as *mut K;
let val_loc = internal.kv_mut().1 as *mut V;
let to_remove = internal.left_edge().descend().last_leaf_edge().left_kv().ok();
let to_remove = unsafe { unwrap_unchecked(to_remove) };
let (kv, pos) = to_remove.remove();
let old_key = unsafe { mem::replace(&mut *key_loc, kv.0) };
let old_val = unsafe { mem::replace(&mut *val_loc, kv.1) };
((old_key, old_val), pos, true)
}
};
// Handle underflow
let mut cur_node = unsafe { ptr::read(&pos).into_node().forget_type() };
let mut at_leaf = true;
while cur_node.len() < node::MIN_LEN {
match handle_underfull_node(cur_node) {
AtRoot => break,
Merged(edge, merged_with_left, offset) => {
// If we merged with our right sibling then our tracked
// position has not changed. However if we merged with our
// left sibling then our tracked position is now dangling.
if at_leaf && merged_with_left {
let idx = pos.idx() + offset;
let node = match unsafe { ptr::read(&edge).descend().force() } {
Leaf(leaf) => leaf,
Internal(_) => unreachable!(),
};
pos = unsafe { Handle::new_edge(node, idx) };
}
let parent = edge.into_node();
if parent.len() == 0 {
// The parent that was just emptied must be the root,
// because nodes on a lower level would not have been
// left with a single child.
handle_emptied_internal_root();
break;
} else {
cur_node = parent.forget_type();
at_leaf = false;
}
}
Stole(stole_from_left) => {
// Adjust the tracked position if we stole from a left sibling
if stole_from_left && at_leaf {
// SAFETY: This is safe since we just added an element to our node.
unsafe {
pos.move_next_unchecked();
}
}
break;
}
}
}
// If we deleted from an internal node then we need to compensate for
// the earlier swap and adjust the tracked position to point to the
// next element.
if was_internal {
pos = unsafe { unwrap_unchecked(pos.next_kv().ok()).next_leaf_edge() };
}
(old_kv, pos)
}
}
impl<K, V> node::Root<K, V> {
/// Removes empty levels on the top, but keep an empty leaf if the entire tree is empty.
fn fix_top(&mut self) {
while self.height() > 0 && self.node_as_ref().len() == 0 {
self.pop_internal_level();
}
}
fn fix_right_border(&mut self) {
self.fix_top();
{
let mut cur_node = self.node_as_mut();
while let Internal(node) = cur_node.force() {
let mut last_kv = node.last_kv();
if last_kv.can_merge() {
cur_node = last_kv.merge().descend();
} else {
let right_len = last_kv.reborrow().right_edge().descend().len();
// `MINLEN + 1` to avoid readjust if merge happens on the next level.
if right_len < node::MIN_LEN + 1 {
last_kv.bulk_steal_left(node::MIN_LEN + 1 - right_len);
}
cur_node = last_kv.right_edge().descend();
}
}
}
self.fix_top();
}
/// The symmetric clone of `fix_right_border`.
fn fix_left_border(&mut self) {
self.fix_top();
{
let mut cur_node = self.node_as_mut();
while let Internal(node) = cur_node.force() {
let mut first_kv = node.first_kv();
if first_kv.can_merge() {
cur_node = first_kv.merge().descend();
} else {
let left_len = first_kv.reborrow().left_edge().descend().len();
if left_len < node::MIN_LEN + 1 {
first_kv.bulk_steal_right(node::MIN_LEN + 1 - left_len);
}
cur_node = first_kv.left_edge().descend();
}
}
}
self.fix_top();
}
}
enum UnderflowResult<'a, K, V> {
AtRoot,
Merged(Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge>, bool, usize),
Stole(bool),
}
fn handle_underfull_node<'a, K: 'a, V: 'a>(
node: NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>,
) -> UnderflowResult<'_, K, V> {
let parent = match node.ascend() {
Ok(parent) => parent,
Err(_) => return AtRoot,
};
// Prefer the left KV if it exists. Merging with the left side is faster,
// since merging happens towards the left and `node` has fewer elements.
// Stealing from the left side is faster, since we can pop from the end of
// the KV arrays.
let (is_left, mut handle) = match parent.left_kv() {
Ok(left) => (true, left),
Err(parent) => {
let right = unsafe { unwrap_unchecked(parent.right_kv().ok()) };
(false, right)
}
};
if handle.can_merge() {
let offset = if is_left { handle.reborrow().left_edge().descend().len() + 1 } else { 0 };
Merged(handle.merge(), is_left, offset)
} else {
if is_left {
handle.steal_left();
} else {
handle.steal_right();
}
Stole(is_left)
}
}
impl<K: Ord, V, I: Iterator<Item = (K, V)>> Iterator for MergeIter<K, V, I> {
type Item = (K, V);

View file

@ -0,0 +1,475 @@
use core::fmt::{self, Debug};
use core::marker::PhantomData;
use core::mem;
use super::super::borrow::DormantMutRef;
use super::super::node::{marker, Handle, InsertResult::*, NodeRef};
use super::BTreeMap;
use Entry::*;
/// A view into a single entry in a map, which may either be vacant or occupied.
///
/// This `enum` is constructed from the [`entry`] method on [`BTreeMap`].
///
/// [`entry`]: BTreeMap::entry
#[stable(feature = "rust1", since = "1.0.0")]
pub enum Entry<'a, K: 'a, V: 'a> {
/// A vacant entry.
#[stable(feature = "rust1", since = "1.0.0")]
Vacant(#[stable(feature = "rust1", since = "1.0.0")] VacantEntry<'a, K, V>),
/// An occupied entry.
#[stable(feature = "rust1", since = "1.0.0")]
Occupied(#[stable(feature = "rust1", since = "1.0.0")] OccupiedEntry<'a, K, V>),
}
#[stable(feature = "debug_btree_map", since = "1.12.0")]
impl<K: Debug + Ord, V: Debug> Debug for Entry<'_, K, V> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
Vacant(ref v) => f.debug_tuple("Entry").field(v).finish(),
Occupied(ref o) => f.debug_tuple("Entry").field(o).finish(),
}
}
}
/// A view into a vacant entry in a `BTreeMap`.
/// It is part of the [`Entry`] enum.
#[stable(feature = "rust1", since = "1.0.0")]
pub struct VacantEntry<'a, K: 'a, V: 'a> {
pub(super) key: K,
pub(super) handle: Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>,
pub(super) dormant_map: DormantMutRef<'a, BTreeMap<K, V>>,
// Be invariant in `K` and `V`
pub(super) _marker: PhantomData<&'a mut (K, V)>,
}
#[stable(feature = "debug_btree_map", since = "1.12.0")]
impl<K: Debug + Ord, V> Debug for VacantEntry<'_, K, V> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("VacantEntry").field(self.key()).finish()
}
}
/// A view into an occupied entry in a `BTreeMap`.
/// It is part of the [`Entry`] enum.
#[stable(feature = "rust1", since = "1.0.0")]
pub struct OccupiedEntry<'a, K: 'a, V: 'a> {
pub(super) handle: Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::KV>,
pub(super) dormant_map: DormantMutRef<'a, BTreeMap<K, V>>,
// Be invariant in `K` and `V`
pub(super) _marker: PhantomData<&'a mut (K, V)>,
}
#[stable(feature = "debug_btree_map", since = "1.12.0")]
impl<K: Debug + Ord, V: Debug> Debug for OccupiedEntry<'_, K, V> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("OccupiedEntry").field("key", self.key()).field("value", self.get()).finish()
}
}
impl<'a, K: Ord, V> Entry<'a, K, V> {
/// Ensures a value is in the entry by inserting the default if empty, and returns
/// a mutable reference to the value in the entry.
///
/// # Examples
///
/// ```
/// use std::collections::BTreeMap;
///
/// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
/// map.entry("poneyland").or_insert(12);
///
/// assert_eq!(map["poneyland"], 12);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn or_insert(self, default: V) -> &'a mut V {
match self {
Occupied(entry) => entry.into_mut(),
Vacant(entry) => entry.insert(default),
}
}
/// Ensures a value is in the entry by inserting the result of the default function if empty,
/// and returns a mutable reference to the value in the entry.
///
/// # Examples
///
/// ```
/// use std::collections::BTreeMap;
///
/// let mut map: BTreeMap<&str, String> = BTreeMap::new();
/// let s = "hoho".to_string();
///
/// map.entry("poneyland").or_insert_with(|| s);
///
/// assert_eq!(map["poneyland"], "hoho".to_string());
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn or_insert_with<F: FnOnce() -> V>(self, default: F) -> &'a mut V {
match self {
Occupied(entry) => entry.into_mut(),
Vacant(entry) => entry.insert(default()),
}
}
#[unstable(feature = "or_insert_with_key", issue = "71024")]
/// Ensures a value is in the entry by inserting, if empty, the result of the default function,
/// which takes the key as its argument, and returns a mutable reference to the value in the
/// entry.
///
/// # Examples
///
/// ```
/// #![feature(or_insert_with_key)]
/// use std::collections::BTreeMap;
///
/// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
///
/// map.entry("poneyland").or_insert_with_key(|key| key.chars().count());
///
/// assert_eq!(map["poneyland"], 9);
/// ```
#[inline]
pub fn or_insert_with_key<F: FnOnce(&K) -> V>(self, default: F) -> &'a mut V {
match self {
Occupied(entry) => entry.into_mut(),
Vacant(entry) => {
let value = default(entry.key());
entry.insert(value)
}
}
}
/// Returns a reference to this entry's key.
///
/// # Examples
///
/// ```
/// use std::collections::BTreeMap;
///
/// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
/// assert_eq!(map.entry("poneyland").key(), &"poneyland");
/// ```
#[stable(feature = "map_entry_keys", since = "1.10.0")]
pub fn key(&self) -> &K {
match *self {
Occupied(ref entry) => entry.key(),
Vacant(ref entry) => entry.key(),
}
}
/// Provides in-place mutable access to an occupied entry before any
/// potential inserts into the map.
///
/// # Examples
///
/// ```
/// use std::collections::BTreeMap;
///
/// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
///
/// map.entry("poneyland")
/// .and_modify(|e| { *e += 1 })
/// .or_insert(42);
/// assert_eq!(map["poneyland"], 42);
///
/// map.entry("poneyland")
/// .and_modify(|e| { *e += 1 })
/// .or_insert(42);
/// assert_eq!(map["poneyland"], 43);
/// ```
#[stable(feature = "entry_and_modify", since = "1.26.0")]
pub fn and_modify<F>(self, f: F) -> Self
where
F: FnOnce(&mut V),
{
match self {
Occupied(mut entry) => {
f(entry.get_mut());
Occupied(entry)
}
Vacant(entry) => Vacant(entry),
}
}
}
impl<'a, K: Ord, V: Default> Entry<'a, K, V> {
#[stable(feature = "entry_or_default", since = "1.28.0")]
/// Ensures a value is in the entry by inserting the default value if empty,
/// and returns a mutable reference to the value in the entry.
///
/// # Examples
///
/// ```
/// use std::collections::BTreeMap;
///
/// let mut map: BTreeMap<&str, Option<usize>> = BTreeMap::new();
/// map.entry("poneyland").or_default();
///
/// assert_eq!(map["poneyland"], None);
/// ```
pub fn or_default(self) -> &'a mut V {
match self {
Occupied(entry) => entry.into_mut(),
Vacant(entry) => entry.insert(Default::default()),
}
}
}
impl<'a, K: Ord, V> VacantEntry<'a, K, V> {
/// Gets a reference to the key that would be used when inserting a value
/// through the VacantEntry.
///
/// # Examples
///
/// ```
/// use std::collections::BTreeMap;
///
/// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
/// assert_eq!(map.entry("poneyland").key(), &"poneyland");
/// ```
#[stable(feature = "map_entry_keys", since = "1.10.0")]
pub fn key(&self) -> &K {
&self.key
}
/// Take ownership of the key.
///
/// # Examples
///
/// ```
/// use std::collections::BTreeMap;
/// use std::collections::btree_map::Entry;
///
/// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
///
/// if let Entry::Vacant(v) = map.entry("poneyland") {
/// v.into_key();
/// }
/// ```
#[stable(feature = "map_entry_recover_keys2", since = "1.12.0")]
pub fn into_key(self) -> K {
self.key
}
/// Sets the value of the entry with the `VacantEntry`'s key,
/// and returns a mutable reference to it.
///
/// # Examples
///
/// ```
/// use std::collections::BTreeMap;
/// use std::collections::btree_map::Entry;
///
/// let mut map: BTreeMap<&str, u32> = BTreeMap::new();
///
/// if let Entry::Vacant(o) = map.entry("poneyland") {
/// o.insert(37);
/// }
/// assert_eq!(map["poneyland"], 37);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn insert(self, value: V) -> &'a mut V {
let out_ptr = match self.handle.insert_recursing(self.key, value) {
(Fit(_), val_ptr) => {
// Safety: We have consumed self.handle and the handle returned.
let map = unsafe { self.dormant_map.awaken() };
map.length += 1;
val_ptr
}
(Split(ins), val_ptr) => {
drop(ins.left);
// Safety: We have consumed self.handle and the reference returned.
let map = unsafe { self.dormant_map.awaken() };
let root = map.root.as_mut().unwrap();
root.push_internal_level().push(ins.k, ins.v, ins.right);
map.length += 1;
val_ptr
}
};
// Now that we have finished growing the tree using borrowed references,
// dereference the pointer to a part of it, that we picked up along the way.
unsafe { &mut *out_ptr }
}
}
impl<'a, K: Ord, V> OccupiedEntry<'a, K, V> {
/// Gets a reference to the key in the entry.
///
/// # Examples
///
/// ```
/// use std::collections::BTreeMap;
///
/// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
/// map.entry("poneyland").or_insert(12);
/// assert_eq!(map.entry("poneyland").key(), &"poneyland");
/// ```
#[stable(feature = "map_entry_keys", since = "1.10.0")]
pub fn key(&self) -> &K {
self.handle.reborrow().into_kv().0
}
/// Take ownership of the key and value from the map.
///
/// # Examples
///
/// ```
/// use std::collections::BTreeMap;
/// use std::collections::btree_map::Entry;
///
/// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
/// map.entry("poneyland").or_insert(12);
///
/// if let Entry::Occupied(o) = map.entry("poneyland") {
/// // We delete the entry from the map.
/// o.remove_entry();
/// }
///
/// // If now try to get the value, it will panic:
/// // println!("{}", map["poneyland"]);
/// ```
#[stable(feature = "map_entry_recover_keys2", since = "1.12.0")]
pub fn remove_entry(self) -> (K, V) {
self.remove_kv()
}
/// Gets a reference to the value in the entry.
///
/// # Examples
///
/// ```
/// use std::collections::BTreeMap;
/// use std::collections::btree_map::Entry;
///
/// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
/// map.entry("poneyland").or_insert(12);
///
/// if let Entry::Occupied(o) = map.entry("poneyland") {
/// assert_eq!(o.get(), &12);
/// }
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn get(&self) -> &V {
self.handle.reborrow().into_kv().1
}
/// Gets a mutable reference to the value in the entry.
///
/// If you need a reference to the `OccupiedEntry` that may outlive the
/// destruction of the `Entry` value, see [`into_mut`].
///
/// [`into_mut`]: OccupiedEntry::into_mut
///
/// # Examples
///
/// ```
/// use std::collections::BTreeMap;
/// use std::collections::btree_map::Entry;
///
/// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
/// map.entry("poneyland").or_insert(12);
///
/// assert_eq!(map["poneyland"], 12);
/// if let Entry::Occupied(mut o) = map.entry("poneyland") {
/// *o.get_mut() += 10;
/// assert_eq!(*o.get(), 22);
///
/// // We can use the same Entry multiple times.
/// *o.get_mut() += 2;
/// }
/// assert_eq!(map["poneyland"], 24);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn get_mut(&mut self) -> &mut V {
self.handle.kv_mut().1
}
/// Converts the entry into a mutable reference to its value.
///
/// If you need multiple references to the `OccupiedEntry`, see [`get_mut`].
///
/// [`get_mut`]: OccupiedEntry::get_mut
///
/// # Examples
///
/// ```
/// use std::collections::BTreeMap;
/// use std::collections::btree_map::Entry;
///
/// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
/// map.entry("poneyland").or_insert(12);
///
/// assert_eq!(map["poneyland"], 12);
/// if let Entry::Occupied(o) = map.entry("poneyland") {
/// *o.into_mut() += 10;
/// }
/// assert_eq!(map["poneyland"], 22);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn into_mut(self) -> &'a mut V {
self.handle.into_val_mut()
}
/// Sets the value of the entry with the `OccupiedEntry`'s key,
/// and returns the entry's old value.
///
/// # Examples
///
/// ```
/// use std::collections::BTreeMap;
/// use std::collections::btree_map::Entry;
///
/// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
/// map.entry("poneyland").or_insert(12);
///
/// if let Entry::Occupied(mut o) = map.entry("poneyland") {
/// assert_eq!(o.insert(15), 12);
/// }
/// assert_eq!(map["poneyland"], 15);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn insert(&mut self, value: V) -> V {
mem::replace(self.get_mut(), value)
}
/// Takes the value of the entry out of the map, and returns it.
///
/// # Examples
///
/// ```
/// use std::collections::BTreeMap;
/// use std::collections::btree_map::Entry;
///
/// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
/// map.entry("poneyland").or_insert(12);
///
/// if let Entry::Occupied(o) = map.entry("poneyland") {
/// assert_eq!(o.remove(), 12);
/// }
/// // If we try to get "poneyland"'s value, it'll panic:
/// // println!("{}", map["poneyland"]);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn remove(self) -> V {
self.remove_kv().1
}
// Body of `remove_entry`, separate to keep the above implementations short.
pub(super) fn remove_kv(self) -> (K, V) {
let mut emptied_internal_root = false;
let (old_kv, _) = self.handle.remove_kv_tracking(|| emptied_internal_root = true);
// SAFETY: we consumed the intermediate root borrow, `self.handle`.
let map = unsafe { self.dormant_map.awaken() };
map.length -= 1;
if emptied_internal_root {
let root = map.root.as_mut().unwrap();
root.pop_internal_level();
}
old_kv
}
}

View file

@ -1,4 +1,4 @@
use super::super::{navigate::Position, node, DeterministicRng};
use super::super::{node, DeterministicRng};
use super::Entry::{Occupied, Vacant};
use super::*;
use crate::boxed::Box;
@ -7,7 +7,7 @@ use crate::rc::Rc;
use crate::string::{String, ToString};
use crate::vec::Vec;
use std::convert::TryFrom;
use std::iter::FromIterator;
use std::iter::{self, FromIterator};
use std::mem;
use std::ops::Bound::{self, Excluded, Included, Unbounded};
use std::ops::RangeBounds;
@ -42,19 +42,6 @@ fn test_all_refs<'a, T: 'a>(dummy: &mut T, iter: impl Iterator<Item = &'a mut T>
}
}
struct SeriesChecker<T> {
previous: Option<T>,
}
impl<T: Copy + Debug + Ord> SeriesChecker<T> {
fn is_ascending(&mut self, next: T) {
if let Some(previous) = self.previous {
assert!(previous < next, "{:?} >= {:?}", previous, next);
}
self.previous = Some(next);
}
}
impl<'a, K: 'a, V: 'a> BTreeMap<K, V> {
/// Panics if the map (or the code navigating it) is corrupted.
fn check(&self)
@ -63,44 +50,15 @@ impl<'a, K: 'a, V: 'a> BTreeMap<K, V> {
{
if let Some(root) = &self.root {
let root_node = root.node_as_ref();
let mut checker = SeriesChecker { previous: None };
let mut internal_length = 0;
let mut internal_kv_count = 0;
let mut leaf_length = 0;
root_node.visit_nodes_in_order(|pos| match pos {
Position::Leaf(node) => {
let is_root = root_node.height() == 0;
let min_len = if is_root { 0 } else { node::MIN_LEN };
assert!(node.len() >= min_len, "{} < {}", node.len(), min_len);
for idx in 0..node.len() {
let key = *unsafe { node.key_at(idx) };
checker.is_ascending(key);
}
leaf_length += node.len();
}
Position::Internal(node) => {
let is_root = root_node.height() == node.height();
let min_len = if is_root { 1 } else { node::MIN_LEN };
assert!(node.len() >= min_len, "{} < {}", node.len(), min_len);
assert!(root_node.ascend().is_err());
root_node.assert_back_pointers();
for idx in 0..=node.len() {
let edge = unsafe { node::Handle::new_edge(node, idx) };
assert!(edge.descend().ascend().ok().unwrap() == edge);
}
let counted = root_node.assert_ascending();
assert_eq!(self.length, counted);
assert_eq!(self.length, root_node.calc_length());
internal_length += node.len();
}
Position::InternalKV(kv) => {
let key = *kv.into_kv().0;
checker.is_ascending(key);
internal_kv_count += 1;
}
});
assert_eq!(internal_length, internal_kv_count);
assert_eq!(root_node.calc_length(), internal_length + leaf_length);
assert_eq!(self.length, internal_length + leaf_length);
root_node.assert_min_len(if root_node.height() > 0 { 1 } else { 0 });
} else {
assert_eq!(self.length, 0);
}
@ -116,34 +74,25 @@ impl<'a, K: 'a, V: 'a> BTreeMap<K, V> {
K: Debug,
{
if let Some(root) = self.root.as_ref() {
let mut result = String::new();
let root_node = root.node_as_ref();
root_node.visit_nodes_in_order(|pos| match pos {
Position::Leaf(leaf) => {
let depth = root_node.height();
let indent = " ".repeat(depth);
result += &format!("\n{}", indent);
for idx in 0..leaf.len() {
if idx > 0 {
result += ", ";
}
result += &format!("{:?}", unsafe { leaf.key_at(idx) });
}
}
Position::Internal(_) => {}
Position::InternalKV(kv) => {
let depth = root_node.height() - kv.into_node().height();
let indent = " ".repeat(depth);
result += &format!("\n{}{:?}", indent, kv.into_kv().0);
}
});
result
root.node_as_ref().dump_keys()
} else {
String::from("not yet allocated")
}
}
}
impl<'a, K: 'a, V: 'a> NodeRef<marker::Immut<'a>, K, V, marker::LeafOrInternal> {
pub fn assert_min_len(self, min_len: usize) {
assert!(self.len() >= min_len, "{} < {}", self.len(), min_len);
if let node::ForceResult::Internal(node) = self.force() {
for idx in 0..=node.len() {
let edge = unsafe { Handle::new_edge(node, idx) };
edge.descend().assert_min_len(MIN_LEN);
}
}
}
}
// Test our value of MIN_INSERTS_HEIGHT_2. It may change according to the
// implementation of insertion, but it's best to be aware of when it does.
#[test]
@ -170,7 +119,6 @@ fn test_levels() {
let last_key = *map.last_key_value().unwrap().0;
map.insert(last_key + 1, ());
}
println!("{}", map.dump_keys());
map.check();
// Structure:
// - 1 element in internal root node with 2 children
@ -372,7 +320,7 @@ fn test_iter_rev() {
fn do_test_iter_mut_mutation<T>(size: usize)
where
T: Copy + Debug + Ord + TryFrom<usize>,
<T as std::convert::TryFrom<usize>>::Error: std::fmt::Debug,
<T as TryFrom<usize>>::Error: Debug,
{
let zero = T::try_from(0).unwrap();
let mut map: BTreeMap<T, T> = (0..size).map(|i| (T::try_from(i).unwrap(), zero)).collect();
@ -857,7 +805,7 @@ mod test_drain_filter {
fn consuming_nothing() {
let pairs = (0..3).map(|i| (i, i));
let mut map: BTreeMap<_, _> = pairs.collect();
assert!(map.drain_filter(|_, _| false).eq(std::iter::empty()));
assert!(map.drain_filter(|_, _| false).eq(iter::empty()));
map.check();
}
@ -878,7 +826,7 @@ mod test_drain_filter {
*v += 6;
false
})
.eq(std::iter::empty())
.eq(iter::empty())
);
assert!(map.keys().copied().eq(0..3));
assert!(map.values().copied().eq(6..9));

View file

@ -2,8 +2,10 @@ mod borrow;
pub mod map;
mod navigate;
mod node;
mod remove;
mod search;
pub mod set;
mod split;
#[doc(hidden)]
trait Recover<Q: ?Sized> {

View file

@ -38,8 +38,8 @@ use crate::alloc::{AllocRef, Global, Layout};
use crate::boxed::Box;
const B: usize = 6;
pub const MIN_LEN: usize = B - 1;
pub const CAPACITY: usize = 2 * B - 1;
pub const MIN_LEN_AFTER_SPLIT: usize = B - 1;
const KV_IDX_CENTER: usize = B - 1;
const EDGE_IDX_LEFT_OF_CENTER: usize = B - 1;
const EDGE_IDX_RIGHT_OF_CENTER: usize = B;
@ -87,7 +87,6 @@ impl<K, V> LeafNode<K, V> {
#[repr(C)]
// gdb_providers.py uses this type name for introspection.
struct InternalNode<K, V> {
// gdb_providers.py uses this field name for introspection.
data: LeafNode<K, V>,
/// The pointers to the children of this node. `len + 1` of these are considered
@ -128,10 +127,6 @@ impl<K, V> BoxedNode<K, V> {
BoxedNode { ptr: Unique::from(&mut Box::leak(node).data) }
}
unsafe fn from_ptr(ptr: NonNull<LeafNode<K, V>>) -> Self {
BoxedNode { ptr: unsafe { Unique::new_unchecked(ptr.as_ptr()) } }
}
fn as_ptr(&self) -> NonNull<LeafNode<K, V>> {
NonNull::from(self.ptr)
}
@ -199,7 +194,7 @@ impl<K, V> Root<K, V> {
/// and is the opposite of `pop_internal_level`.
pub fn push_internal_level(&mut self) -> NodeRef<marker::Mut<'_>, K, V, marker::Internal> {
let mut new_node = Box::new(unsafe { InternalNode::new() });
new_node.edges[0].write(unsafe { BoxedNode::from_ptr(self.node.as_ptr()) });
new_node.edges[0].write(unsafe { ptr::read(&mut self.node) });
self.node = BoxedNode::from_internal(new_node);
self.height += 1;
@ -225,8 +220,8 @@ impl<K, V> Root<K, V> {
let top = self.node.ptr;
let internal_node = unsafe { self.internal_node_as_mut() };
self.node = unsafe { BoxedNode::from_ptr(internal_node.first_edge().descend().node) };
let mut internal_node = unsafe { self.internal_node_as_mut() };
self.node = unsafe { internal_node.as_internal_mut().edges[0].assume_init_read() };
self.height -= 1;
self.node_as_mut().as_leaf_mut().parent = None;
@ -616,7 +611,7 @@ impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::Leaf> {
/// Adds a key/value pair to the beginning of the node.
fn push_front(&mut self, key: K, val: V) {
debug_assert!(self.len() < CAPACITY);
assert!(self.len() < CAPACITY);
unsafe {
slice_insert(self.keys_mut(), 0, key);
@ -669,14 +664,7 @@ impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
unsafe {
slice_insert(self.keys_mut(), 0, key);
slice_insert(self.vals_mut(), 0, val);
slice_insert(
slice::from_raw_parts_mut(
MaybeUninit::slice_as_mut_ptr(&mut self.as_internal_mut().edges),
self.len() + 1,
),
0,
edge.node,
);
slice_insert(self.edges_mut(), 0, edge.node);
}
self.as_leaf_mut().len += 1;
@ -926,24 +914,6 @@ fn splitpoint(edge_idx: usize) -> (usize, InsertionPlace) {
}
}
impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::Edge> {
/// Helps implementations of `insert_fit` for a particular `NodeType`,
/// by taking care of leaf data.
/// Inserts a new key/value pair between the key/value pairs to the right and left of
/// this edge. This method assumes that there is enough space in the node for the new
/// pair to fit.
fn leafy_insert_fit(&mut self, key: K, val: V) {
debug_assert!(self.node.len() < CAPACITY);
unsafe {
slice_insert(self.node.keys_mut(), self.idx, key);
slice_insert(self.node.vals_mut(), self.idx, val);
self.node.as_leaf_mut().len += 1;
}
}
}
impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge> {
/// Inserts a new key/value pair between the key/value pairs to the right and left of
/// this edge. This method assumes that there is enough space in the node for the new
@ -951,8 +921,15 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, mark
///
/// The returned pointer points to the inserted value.
fn insert_fit(&mut self, key: K, val: V) -> *mut V {
self.leafy_insert_fit(key, val);
unsafe { self.node.val_mut_at(self.idx) }
debug_assert!(self.node.len() < CAPACITY);
unsafe {
slice_insert(self.node.keys_mut(), self.idx, key);
slice_insert(self.node.vals_mut(), self.idx, val);
self.node.as_leaf_mut().len += 1;
self.node.val_mut_at(self.idx)
}
}
}
@ -1001,11 +978,14 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>,
/// between this edge and the key/value pair to the right of this edge. This method assumes
/// that there is enough space in the node for the new pair to fit.
fn insert_fit(&mut self, key: K, val: V, edge: Root<K, V>) {
debug_assert!(self.node.len() < CAPACITY);
debug_assert!(edge.height == self.node.height - 1);
unsafe {
slice_insert(self.node.keys_mut(), self.idx, key);
slice_insert(self.node.vals_mut(), self.idx, val);
slice_insert(self.node.edges_mut(), self.idx + 1, edge.node);
self.leafy_insert_fit(key, val);
self.node.as_leaf_mut().len += 1;
self.node.correct_childrens_parent_links((self.idx + 1)..=self.node.len());
}
@ -1136,15 +1116,21 @@ impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>
}
impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
/// Helps implementations of `split` for a particular `NodeType`,
/// by calculating the length of the new node.
fn split_new_node_len(&self) -> usize {
debug_assert!(self.idx < self.node.len());
self.node.len() - self.idx - 1
}
/// Helps implementations of `split` for a particular `NodeType`,
/// by taking care of leaf data.
fn leafy_split(&mut self, new_node: &mut LeafNode<K, V>) -> (K, V, usize) {
fn split_leaf_data(&mut self, new_node: &mut LeafNode<K, V>) -> (K, V) {
let new_len = self.split_new_node_len();
unsafe {
let k = ptr::read(self.node.key_at(self.idx));
let v = ptr::read(self.node.val_at(self.idx));
let new_len = self.node.len() - self.idx - 1;
ptr::copy_nonoverlapping(
self.node.key_at(self.idx + 1),
MaybeUninit::slice_as_mut_ptr(&mut new_node.keys),
@ -1158,7 +1144,7 @@ impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>
self.node.as_leaf_mut().len = self.idx as u16;
new_node.len = new_len as u16;
(k, v, new_len)
(k, v)
}
}
}
@ -1166,7 +1152,7 @@ impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>
impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::KV> {
/// Splits the underlying node into three parts:
///
/// - The node is truncated to only contain the key/value pairs to the right of
/// - The node is truncated to only contain the key/value pairs to the left of
/// this handle.
/// - The key and value pointed to by this handle are extracted.
/// - All the key/value pairs to the right of this handle are put into a newly
@ -1175,9 +1161,10 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, mark
unsafe {
let mut new_node = Box::new(LeafNode::new());
let (k, v, _) = self.leafy_split(&mut new_node);
let (k, v) = self.split_leaf_data(&mut new_node);
(self.node, k, v, Root { node: BoxedNode::from_leaf(new_node), height: 0 })
let right = Root { node: BoxedNode::from_leaf(new_node), height: 0 };
(self.node, k, v, right)
}
}
@ -1211,29 +1198,28 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>,
/// Splits the underlying node into three parts:
///
/// - The node is truncated to only contain the edges and key/value pairs to the
/// right of this handle.
/// left of this handle.
/// - The key and value pointed to by this handle are extracted.
/// - All the edges and key/value pairs to the right of this handle are put into
/// a newly allocated node.
pub fn split(mut self) -> (NodeRef<marker::Mut<'a>, K, V, marker::Internal>, K, V, Root<K, V>) {
unsafe {
let mut new_node = Box::new(InternalNode::new());
let (k, v, new_len) = self.leafy_split(&mut new_node.data);
let height = self.node.height;
let old_node = &*self.node.as_internal_ptr();
// Move edges out before reducing length:
let new_len = self.split_new_node_len();
ptr::copy_nonoverlapping(
old_node.edges.as_ptr().add(self.idx + 1),
new_node.edges.as_mut_ptr(),
self.node.edge_at(self.idx + 1),
MaybeUninit::slice_as_mut_ptr(&mut new_node.edges),
new_len + 1,
);
let (k, v) = self.split_leaf_data(&mut new_node.data);
let mut new_root = Root { node: BoxedNode::from_internal(new_node), height };
let height = self.node.height;
let mut right = Root { node: BoxedNode::from_internal(new_node), height };
new_root.internal_node_as_mut().correct_childrens_parent_links(0..=new_len);
right.internal_node_as_mut().correct_childrens_parent_links(0..=new_len);
(self.node, k, v, new_root)
(self.node, k, v, right)
}
}

View file

@ -1,6 +1,87 @@
use super::super::navigate;
use super::*;
use crate::fmt::Debug;
use crate::string::String;
use core::cmp::Ordering::*;
impl<'a, K: 'a, V: 'a> NodeRef<marker::Immut<'a>, K, V, marker::LeafOrInternal> {
/// Asserts that the back pointer in each reachable node points to its parent.
pub fn assert_back_pointers(self) {
if let ForceResult::Internal(node) = self.force() {
for idx in 0..=node.len() {
let edge = unsafe { Handle::new_edge(node, idx) };
let child = edge.descend();
assert!(child.ascend().ok() == Some(edge));
child.assert_back_pointers();
}
}
}
/// Asserts that the keys are in strictly ascending order.
/// Returns how many keys it encountered.
pub fn assert_ascending(self) -> usize
where
K: Copy + Debug + Ord,
{
struct SeriesChecker<T> {
num_seen: usize,
previous: Option<T>,
}
impl<T: Copy + Debug + Ord> SeriesChecker<T> {
fn is_ascending(&mut self, next: T) {
if let Some(previous) = self.previous {
assert!(previous < next, "{:?} >= {:?}", previous, next);
}
self.previous = Some(next);
self.num_seen += 1;
}
}
let mut checker = SeriesChecker { num_seen: 0, previous: None };
self.visit_nodes_in_order(|pos| match pos {
navigate::Position::Leaf(node) => {
for idx in 0..node.len() {
let key = *unsafe { node.key_at(idx) };
checker.is_ascending(key);
}
}
navigate::Position::InternalKV(kv) => {
let key = *kv.into_kv().0;
checker.is_ascending(key);
}
navigate::Position::Internal(_) => {}
});
checker.num_seen
}
pub fn dump_keys(self) -> String
where
K: Debug,
{
let mut result = String::new();
self.visit_nodes_in_order(|pos| match pos {
navigate::Position::Leaf(leaf) => {
let depth = self.height();
let indent = " ".repeat(depth);
result += &format!("\n{}", indent);
for idx in 0..leaf.len() {
if idx > 0 {
result += ", ";
}
result += &format!("{:?}", unsafe { leaf.key_at(idx) });
}
}
navigate::Position::Internal(_) => {}
navigate::Position::InternalKV(kv) => {
let depth = self.height() - kv.into_node().height();
let indent = " ".repeat(depth);
result += &format!("\n{}{:?}", indent, kv.into_kv().0);
}
});
result
}
}
#[test]
fn test_splitpoint() {
for idx in 0..=CAPACITY {
@ -19,8 +100,8 @@ fn test_splitpoint() {
right_len += 1;
}
}
assert!(left_len >= MIN_LEN);
assert!(right_len >= MIN_LEN);
assert!(left_len >= MIN_LEN_AFTER_SPLIT);
assert!(right_len >= MIN_LEN_AFTER_SPLIT);
assert!(left_len + right_len == CAPACITY);
}
}

View file

@ -0,0 +1,133 @@
use super::map::MIN_LEN;
use super::node::{marker, ForceResult, Handle, NodeRef};
use super::unwrap_unchecked;
use core::mem;
use core::ptr;
impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::KV> {
/// Removes a key/value-pair from the map, and returns that pair, as well as
/// the leaf edge corresponding to that former pair.
pub fn remove_kv_tracking<F: FnOnce()>(
self,
handle_emptied_internal_root: F,
) -> ((K, V), Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>) {
let (old_kv, mut pos, was_internal) = match self.force() {
ForceResult::Leaf(leaf) => {
let (old_kv, pos) = leaf.remove();
(old_kv, pos, false)
}
ForceResult::Internal(mut internal) => {
// Replace the location freed in the internal node with an
// adjacent KV, and remove that adjacent KV from its leaf.
// Always choose the adjacent KV on the left side because
// it is typically faster to pop an element from the end
// of the KV arrays without needing to shift other elements.
let key_loc = internal.kv_mut().0 as *mut K;
let val_loc = internal.kv_mut().1 as *mut V;
let to_remove = internal.left_edge().descend().last_leaf_edge().left_kv().ok();
let to_remove = unsafe { unwrap_unchecked(to_remove) };
let (kv, pos) = to_remove.remove();
let old_key = unsafe { mem::replace(&mut *key_loc, kv.0) };
let old_val = unsafe { mem::replace(&mut *val_loc, kv.1) };
((old_key, old_val), pos, true)
}
};
// Handle underflow
let mut cur_node = unsafe { ptr::read(&pos).into_node().forget_type() };
let mut at_leaf = true;
while cur_node.len() < MIN_LEN {
match handle_underfull_node(cur_node) {
UnderflowResult::AtRoot => break,
UnderflowResult::Merged(edge, merged_with_left, offset) => {
// If we merged with our right sibling then our tracked
// position has not changed. However if we merged with our
// left sibling then our tracked position is now dangling.
if at_leaf && merged_with_left {
let idx = pos.idx() + offset;
let node = match unsafe { ptr::read(&edge).descend().force() } {
ForceResult::Leaf(leaf) => leaf,
ForceResult::Internal(_) => unreachable!(),
};
pos = unsafe { Handle::new_edge(node, idx) };
}
let parent = edge.into_node();
if parent.len() == 0 {
// The parent that was just emptied must be the root,
// because nodes on a lower level would not have been
// left with a single child.
handle_emptied_internal_root();
break;
} else {
cur_node = parent.forget_type();
at_leaf = false;
}
}
UnderflowResult::Stole(stole_from_left) => {
// Adjust the tracked position if we stole from a left sibling
if stole_from_left && at_leaf {
// SAFETY: This is safe since we just added an element to our node.
unsafe {
pos.move_next_unchecked();
}
}
break;
}
}
}
// If we deleted from an internal node then we need to compensate for
// the earlier swap and adjust the tracked position to point to the
// next element.
if was_internal {
pos = unsafe { unwrap_unchecked(pos.next_kv().ok()).next_leaf_edge() };
}
(old_kv, pos)
}
}
enum UnderflowResult<'a, K, V> {
AtRoot,
Merged(Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge>, bool, usize),
Stole(bool),
}
fn handle_underfull_node<'a, K: 'a, V: 'a>(
node: NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>,
) -> UnderflowResult<'_, K, V> {
let parent = match node.ascend() {
Ok(parent) => parent,
Err(_) => return UnderflowResult::AtRoot,
};
// Prefer the left KV if it exists. Merging with the left side is faster,
// since merging happens towards the left and `node` has fewer elements.
// Stealing from the left side is faster, since we can pop from the end of
// the KV arrays.
let (is_left, mut handle) = match parent.left_kv() {
Ok(left) => (true, left),
Err(parent) => {
let right = unsafe { unwrap_unchecked(parent.right_kv().ok()) };
(false, right)
}
};
if handle.can_merge() {
let offset = if is_left { handle.reborrow().left_edge().descend().len() + 1 } else { 0 };
UnderflowResult::Merged(handle.merge(), is_left, offset)
} else {
if is_left {
handle.steal_left();
} else {
handle.steal_right();
}
UnderflowResult::Stole(is_left)
}
}

View file

@ -0,0 +1,106 @@
use super::map::MIN_LEN;
use super::node::{ForceResult::*, Root};
use super::search::{search_node, SearchResult::*};
use core::borrow::Borrow;
impl<K, V> Root<K, V> {
pub fn split_off<Q: ?Sized + Ord>(&mut self, right_root: &mut Self, key: &Q)
where
K: Borrow<Q>,
{
debug_assert!(right_root.height() == 0);
debug_assert!(right_root.node_as_ref().len() == 0);
let left_root = self;
for _ in 0..left_root.height() {
right_root.push_internal_level();
}
{
let mut left_node = left_root.node_as_mut();
let mut right_node = right_root.node_as_mut();
loop {
let mut split_edge = match search_node(left_node, key) {
// key is going to the right tree
Found(handle) => handle.left_edge(),
GoDown(handle) => handle,
};
split_edge.move_suffix(&mut right_node);
match (split_edge.force(), right_node.force()) {
(Internal(edge), Internal(node)) => {
left_node = edge.descend();
right_node = node.first_edge().descend();
}
(Leaf(_), Leaf(_)) => {
break;
}
_ => unreachable!(),
}
}
}
left_root.fix_right_border();
right_root.fix_left_border();
}
/// Removes empty levels on the top, but keeps an empty leaf if the entire tree is empty.
fn fix_top(&mut self) {
while self.height() > 0 && self.node_as_ref().len() == 0 {
self.pop_internal_level();
}
}
fn fix_right_border(&mut self) {
self.fix_top();
{
let mut cur_node = self.node_as_mut();
while let Internal(node) = cur_node.force() {
let mut last_kv = node.last_kv();
if last_kv.can_merge() {
cur_node = last_kv.merge().descend();
} else {
let right_len = last_kv.reborrow().right_edge().descend().len();
// `MIN_LEN + 1` to avoid readjust if merge happens on the next level.
if right_len < MIN_LEN + 1 {
last_kv.bulk_steal_left(MIN_LEN + 1 - right_len);
}
cur_node = last_kv.right_edge().descend();
}
}
}
self.fix_top();
}
/// The symmetric clone of `fix_right_border`.
fn fix_left_border(&mut self) {
self.fix_top();
{
let mut cur_node = self.node_as_mut();
while let Internal(node) = cur_node.force() {
let mut first_kv = node.first_kv();
if first_kv.can_merge() {
cur_node = first_kv.merge().descend();
} else {
let left_len = first_kv.reborrow().left_edge().descend().len();
// `MIN_LEN + 1` to avoid readjust if merge happens on the next level.
if left_len < MIN_LEN + 1 {
first_kv.bulk_steal_right(MIN_LEN + 1 - left_len);
}
cur_node = first_kv.left_edge().descend();
}
}
}
self.fix_top();
}
}

View file

@ -1102,7 +1102,7 @@ impl<T> VecDeque<T> {
where
R: RangeBounds<usize>,
{
let Range { start, end } = slice::check_range(self.len(), range);
let Range { start, end } = range.assert_len(self.len());
let tail = self.wrap_add(self.tail, start);
let head = self.wrap_add(self.tail, end);
(tail, head)
@ -2181,7 +2181,7 @@ impl<T> VecDeque<T> {
///
/// This method does not allocate and does not change the order of the
/// inserted elements. As it returns a mutable slice, this can be used to
/// sort or binary search a deque.
/// sort a deque.
///
/// Once the internal storage is contiguous, the [`as_slices`] and
/// [`as_mut_slices`] methods will return the entire contents of the
@ -2430,6 +2430,143 @@ impl<T> VecDeque<T> {
self.wrap_copy(self.tail, self.head, k);
}
}
/// Binary searches this sorted `VecDeque` for a given element.
///
/// If the value is found then [`Result::Ok`] is returned, containing the
/// index of the matching element. If there are multiple matches, then any
/// one of the matches could be returned. If the value is not found then
/// [`Result::Err`] is returned, containing the index where a matching
/// element could be inserted while maintaining sorted order.
///
/// # Examples
///
/// Looks up a series of four elements. The first is found, with a
/// uniquely determined position; the second and third are not
/// found; the fourth could match any position in `[1, 4]`.
///
/// ```
/// #![feature(vecdeque_binary_search)]
/// use std::collections::VecDeque;
///
/// let deque: VecDeque<_> = vec![0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55].into();
///
/// assert_eq!(deque.binary_search(&13), Ok(9));
/// assert_eq!(deque.binary_search(&4), Err(7));
/// assert_eq!(deque.binary_search(&100), Err(13));
/// let r = deque.binary_search(&1);
/// assert!(matches!(r, Ok(1..=4)));
/// ```
///
/// If you want to insert an item to a sorted `VecDeque`, while maintaining
/// sort order:
///
/// ```
/// #![feature(vecdeque_binary_search)]
/// use std::collections::VecDeque;
///
/// let mut deque: VecDeque<_> = vec![0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55].into();
/// let num = 42;
/// let idx = deque.binary_search(&num).unwrap_or_else(|x| x);
/// deque.insert(idx, num);
/// assert_eq!(deque, &[0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 42, 55]);
/// ```
#[unstable(feature = "vecdeque_binary_search", issue = "78021")]
#[inline]
pub fn binary_search(&self, x: &T) -> Result<usize, usize>
where
T: Ord,
{
self.binary_search_by(|e| e.cmp(x))
}
/// Binary searches this sorted `VecDeque` with a comparator function.
///
/// The comparator function should implement an order consistent
/// with the sort order of the underlying `VecDeque`, returning an
/// order code that indicates whether its argument is `Less`,
/// `Equal` or `Greater` than the desired target.
///
/// If the value is found then [`Result::Ok`] is returned, containing the
/// index of the matching element. If there are multiple matches, then any
/// one of the matches could be returned. If the value is not found then
/// [`Result::Err`] is returned, containing the index where a matching
/// element could be inserted while maintaining sorted order.
///
/// # Examples
///
/// Looks up a series of four elements. The first is found, with a
/// uniquely determined position; the second and third are not
/// found; the fourth could match any position in `[1, 4]`.
///
/// ```
/// #![feature(vecdeque_binary_search)]
/// use std::collections::VecDeque;
///
/// let deque: VecDeque<_> = vec![0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55].into();
///
/// assert_eq!(deque.binary_search_by(|x| x.cmp(&13)), Ok(9));
/// assert_eq!(deque.binary_search_by(|x| x.cmp(&4)), Err(7));
/// assert_eq!(deque.binary_search_by(|x| x.cmp(&100)), Err(13));
/// let r = deque.binary_search_by(|x| x.cmp(&1));
/// assert!(matches!(r, Ok(1..=4)));
/// ```
#[unstable(feature = "vecdeque_binary_search", issue = "78021")]
pub fn binary_search_by<'a, F>(&'a self, mut f: F) -> Result<usize, usize>
where
F: FnMut(&'a T) -> Ordering,
{
let (front, back) = self.as_slices();
if let Some(Ordering::Less | Ordering::Equal) = back.first().map(|elem| f(elem)) {
back.binary_search_by(f).map(|idx| idx + front.len()).map_err(|idx| idx + front.len())
} else {
front.binary_search_by(f)
}
}
/// Binary searches this sorted `VecDeque` with a key extraction function.
///
/// Assumes that the `VecDeque` is sorted by the key, for instance with
/// [`make_contiguous().sort_by_key()`](#method.make_contiguous) using the same
/// key extraction function.
///
/// If the value is found then [`Result::Ok`] is returned, containing the
/// index of the matching element. If there are multiple matches, then any
/// one of the matches could be returned. If the value is not found then
/// [`Result::Err`] is returned, containing the index where a matching
/// element could be inserted while maintaining sorted order.
///
/// # Examples
///
/// Looks up a series of four elements in a slice of pairs sorted by
/// their second elements. The first is found, with a uniquely
/// determined position; the second and third are not found; the
/// fourth could match any position in `[1, 4]`.
///
/// ```
/// #![feature(vecdeque_binary_search)]
/// use std::collections::VecDeque;
///
/// let deque: VecDeque<_> = vec![(0, 0), (2, 1), (4, 1), (5, 1),
/// (3, 1), (1, 2), (2, 3), (4, 5), (5, 8), (3, 13),
/// (1, 21), (2, 34), (4, 55)].into();
///
/// assert_eq!(deque.binary_search_by_key(&13, |&(a,b)| b), Ok(9));
/// assert_eq!(deque.binary_search_by_key(&4, |&(a,b)| b), Err(7));
/// assert_eq!(deque.binary_search_by_key(&100, |&(a,b)| b), Err(13));
/// let r = deque.binary_search_by_key(&1, |&(a,b)| b);
/// assert!(matches!(r, Ok(1..=4)));
/// ```
#[unstable(feature = "vecdeque_binary_search", issue = "78021")]
#[inline]
pub fn binary_search_by_key<'a, B, F>(&'a self, b: &B, mut f: F) -> Result<usize, usize>
where
F: FnMut(&'a T) -> B,
B: Ord,
{
self.binary_search_by(|k| f(k).cmp(b))
}
}
impl<T: Clone> VecDeque<T> {

View file

@ -72,6 +72,7 @@
#![allow(explicit_outlives_requirements)]
#![allow(incomplete_features)]
#![deny(unsafe_op_in_unsafe_fn)]
#![cfg_attr(not(bootstrap), feature(rustc_allow_const_fn_unstable))]
#![cfg_attr(not(test), feature(generator_trait))]
#![cfg_attr(test, feature(test))]
#![cfg_attr(test, feature(new_uninit))]
@ -114,11 +115,12 @@
#![feature(or_patterns)]
#![feature(pattern)]
#![feature(ptr_internals)]
#![feature(range_bounds_assert_len)]
#![feature(raw_ref_op)]
#![feature(rustc_attrs)]
#![feature(receiver_trait)]
#![feature(renamed_spin_loop)]
#![feature(min_specialization)]
#![feature(slice_check_range)]
#![feature(slice_ptr_get)]
#![feature(slice_ptr_len)]
#![feature(staged_api)]

View file

@ -116,7 +116,8 @@ impl<T> RawVec<T, Global> {
impl<T, A: AllocRef> RawVec<T, A> {
/// Like `new`, but parameterized over the choice of allocator for
/// the returned `RawVec`.
#[allow_internal_unstable(const_fn)]
#[cfg_attr(not(bootstrap), rustc_allow_const_fn_unstable(const_fn))]
#[cfg_attr(bootstrap, allow_internal_unstable(const_fn))]
pub const fn new_in(alloc: A) -> Self {
// `cap: 0` means "unallocated". zero-sized types are ignored.
Self { ptr: Unique::dangling(), cap: 0, alloc }
@ -259,7 +260,7 @@ impl<T, A: AllocRef> RawVec<T, A> {
/// Ensures that the buffer contains at least enough space to hold `len +
/// additional` elements. If it doesn't already have enough capacity, will
/// reallocate enough space plus comfortable slack space to get amortized
/// `O(1)` behavior. Will limit this behavior if it would needlessly cause
/// *O*(1) behavior. Will limit this behavior if it would needlessly cause
/// itself to panic.
///
/// If `len` exceeds `self.capacity()`, this may fail to actually allocate

View file

@ -91,8 +91,6 @@ use crate::borrow::ToOwned;
use crate::boxed::Box;
use crate::vec::Vec;
#[unstable(feature = "slice_check_range", issue = "76393")]
pub use core::slice::check_range;
#[unstable(feature = "array_chunks", issue = "74985")]
pub use core::slice::ArrayChunks;
#[unstable(feature = "array_chunks", issue = "74985")]
@ -169,7 +167,7 @@ mod hack {
impl<T> [T] {
/// Sorts the slice.
///
/// This sort is stable (i.e., does not reorder equal elements) and `O(n * log(n))` worst-case.
/// This sort is stable (i.e., does not reorder equal elements) and *O*(*n* \* log(*n*)) worst-case.
///
/// When applicable, unstable sorting is preferred because it is generally faster than stable
/// sorting and it doesn't allocate auxiliary memory.
@ -204,7 +202,7 @@ impl<T> [T] {
/// Sorts the slice with a comparator function.
///
/// This sort is stable (i.e., does not reorder equal elements) and `O(n * log(n))` worst-case.
/// This sort is stable (i.e., does not reorder equal elements) and *O*(*n* \* log(*n*)) worst-case.
///
/// The comparator function must define a total ordering for the elements in the slice. If
/// the ordering is not total, the order of the elements is unspecified. An order is a
@ -258,8 +256,8 @@ impl<T> [T] {
/// Sorts the slice with a key extraction function.
///
/// This sort is stable (i.e., does not reorder equal elements) and `O(m * n * log(n))`
/// worst-case, where the key function is `O(m)`.
/// This sort is stable (i.e., does not reorder equal elements) and *O*(*m* \* *n* \* log(*n*))
/// worst-case, where the key function is *O*(*m*).
///
/// For expensive key functions (e.g. functions that are not simple property accesses or
/// basic operations), [`sort_by_cached_key`](#method.sort_by_cached_key) is likely to be
@ -301,8 +299,8 @@ impl<T> [T] {
///
/// During sorting, the key function is called only once per element.
///
/// This sort is stable (i.e., does not reorder equal elements) and `O(m * n + n * log(n))`
/// worst-case, where the key function is `O(m)`.
/// This sort is stable (i.e., does not reorder equal elements) and *O*(*m* \* *n* + *n* \* log(*n*))
/// worst-case, where the key function is *O*(*m*).
///
/// For simple key functions (e.g., functions that are property accesses or
/// basic operations), [`sort_by_key`](#method.sort_by_key) is likely to be
@ -946,7 +944,7 @@ where
/// 1. for every `i` in `1..runs.len()`: `runs[i - 1].len > runs[i].len`
/// 2. for every `i` in `2..runs.len()`: `runs[i - 2].len > runs[i - 1].len + runs[i].len`
///
/// The invariants ensure that the total running time is `O(n * log(n))` worst-case.
/// The invariants ensure that the total running time is *O*(*n* \* log(*n*)) worst-case.
fn merge_sort<T, F>(v: &mut [T], mut is_less: F)
where
F: FnMut(&T, &T) -> bool,

View file

@ -1,8 +1,8 @@
//! A UTF-8 encoded, growable string.
//! A UTF-8encoded, growable string.
//!
//! This module contains the [`String`] type, a trait for converting
//! [`ToString`]s, and several error types that may result from working with
//! [`String`]s.
//! This module contains the [`String`] type, the [`ToString`] trait for
//! converting to strings, and several error types that may result from
//! working with [`String`]s.
//!
//! # Examples
//!
@ -49,7 +49,6 @@ use core::iter::{FromIterator, FusedIterator};
use core::ops::Bound::{Excluded, Included, Unbounded};
use core::ops::{self, Add, AddAssign, Index, IndexMut, Range, RangeBounds};
use core::ptr;
use core::slice;
use core::str::{lossy, pattern::Pattern};
use crate::borrow::{Cow, ToOwned};
@ -58,7 +57,7 @@ use crate::collections::TryReserveError;
use crate::str::{self, from_boxed_utf8_unchecked, Chars, FromStr, Utf8Error};
use crate::vec::Vec;
/// A UTF-8 encoded, growable string.
/// A UTF-8encoded, growable string.
///
/// The `String` type is the most common string type that has ownership over the
/// contents of the string. It has a close relationship with its borrowed
@ -566,7 +565,7 @@ impl String {
Cow::Owned(res)
}
/// Decode a UTF-16 encoded vector `v` into a `String`, returning [`Err`]
/// Decode a UTF-16encoded vector `v` into a `String`, returning [`Err`]
/// if `v` contains any invalid data.
///
/// # Examples
@ -600,7 +599,7 @@ impl String {
Ok(ret)
}
/// Decode a UTF-16 encoded slice `v` into a `String`, replacing
/// Decode a UTF-16encoded slice `v` into a `String`, replacing
/// invalid data with [the replacement character (`U+FFFD`)][U+FFFD].
///
/// Unlike [`from_utf8_lossy`] which returns a [`Cow<'a, str>`],
@ -1507,14 +1506,14 @@ impl String {
// of the vector version. The data is just plain bytes.
// Because the range removal happens in Drop, if the Drain iterator is leaked,
// the removal will not happen.
let Range { start, end } = slice::check_range(self.len(), range);
let Range { start, end } = range.assert_len(self.len());
assert!(self.is_char_boundary(start));
assert!(self.is_char_boundary(end));
// Take out two simultaneous borrows. The &mut String won't be accessed
// until iteration is over, in Drop.
let self_ptr = self as *mut _;
// SAFETY: `check_range` and `is_char_boundary` do the appropriate bounds checks.
// SAFETY: `assert_len` and `is_char_boundary` do the appropriate bounds checks.
let chars_iter = unsafe { self.get_unchecked(start..end) }.chars();
Drain { start, end, iter: chars_iter, string: self_ptr }
@ -2192,15 +2191,15 @@ pub trait ToString {
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: fmt::Display + ?Sized> ToString for T {
// A common guideline is to not inline generic functions. However,
// remove `#[inline]` from this method causes non-negligible regression.
// See <https://github.com/rust-lang/rust/pull/74852> as last attempt try to remove it.
// removing `#[inline]` from this method causes non-negligible regressions.
// See <https://github.com/rust-lang/rust/pull/74852>, the last attempt
// to try to remove it.
#[inline]
default fn to_string(&self) -> String {
use fmt::Write;
let mut buf = String::new();
buf.write_fmt(format_args!("{}", self))
.expect("a Display implementation returned an error unexpectedly");
buf.shrink_to_fit();
buf
}
}

View file

@ -10,6 +10,7 @@ use core::cmp::Ordering;
use core::convert::{From, TryFrom};
use core::fmt;
use core::hash::{Hash, Hasher};
use core::hint;
use core::intrinsics::abort;
use core::iter;
use core::marker::{PhantomData, Unpin, Unsize};
@ -764,6 +765,7 @@ impl<T: ?Sized> Arc<T> {
loop {
// check if the weak counter is currently "locked"; if so, spin.
if cur == usize::MAX {
hint::spin_loop();
cur = this.inner().weak.load(Relaxed);
continue;
}

View file

@ -259,7 +259,7 @@ use crate::raw_vec::RawVec;
/// `Vec` does not guarantee any particular growth strategy when reallocating
/// when full, nor when [`reserve`] is called. The current strategy is basic
/// and it may prove desirable to use a non-constant growth factor. Whatever
/// strategy is used will of course guarantee `O(1)` amortized [`push`].
/// strategy is used will of course guarantee *O*(1) amortized [`push`].
///
/// `vec![x; n]`, `vec![a, b, c, d]`, and
/// [`Vec::with_capacity(n)`][`Vec::with_capacity`], will all produce a `Vec`
@ -1314,7 +1314,7 @@ impl<T> Vec<T> {
// the hole, and the vector length is restored to the new length.
//
let len = self.len();
let Range { start, end } = slice::check_range(len, range);
let Range { start, end } = range.assert_len(len);
unsafe {
// set self.vec length's to start, to be safe in case Drain is leaked
@ -1603,50 +1603,6 @@ impl<T: Clone> Vec<T> {
}
}
impl<T: Default> Vec<T> {
/// Resizes the `Vec` in-place so that `len` is equal to `new_len`.
///
/// If `new_len` is greater than `len`, the `Vec` is extended by the
/// difference, with each additional slot filled with [`Default::default()`].
/// If `new_len` is less than `len`, the `Vec` is simply truncated.
///
/// This method uses [`Default`] to create new values on every push. If
/// you'd rather [`Clone`] a given value, use [`resize`].
///
/// # Examples
///
/// ```
/// # #![allow(deprecated)]
/// #![feature(vec_resize_default)]
///
/// let mut vec = vec![1, 2, 3];
/// vec.resize_default(5);
/// assert_eq!(vec, [1, 2, 3, 0, 0]);
///
/// let mut vec = vec![1, 2, 3, 4];
/// vec.resize_default(2);
/// assert_eq!(vec, [1, 2]);
/// ```
///
/// [`resize`]: Vec::resize
#[unstable(feature = "vec_resize_default", issue = "41758")]
#[rustc_deprecated(
reason = "This is moving towards being removed in favor \
of `.resize_with(Default::default)`. If you disagree, please comment \
in the tracking issue.",
since = "1.33.0"
)]
pub fn resize_default(&mut self, new_len: usize) {
let len = self.len();
if new_len > len {
self.extend_with(new_len - len, ExtendDefault);
} else {
self.truncate(new_len);
}
}
}
// This code generalizes `extend_with_{element,default}`.
trait ExtendWith<T> {
fn next(&mut self) -> T;