1
Fork 0

BTreeMap: Add alloc param

This commit is contained in:
Jacob Hughes 2022-03-10 04:55:24 -05:00
parent 872503d918
commit dc5951a6e5
11 changed files with 676 additions and 340 deletions

View file

@ -1,5 +1,6 @@
use super::merge_iter::MergeIterInner;
use super::node::{self, Root};
use core::alloc::Allocator;
use core::iter::FusedIterator;
impl<K, V> Root<K, V> {
@ -14,8 +15,13 @@ impl<K, V> Root<K, V> {
/// a `BTreeMap`, both iterators should produce keys in strictly ascending
/// order, each greater than all keys in the tree, including any keys
/// already in the tree upon entry.
pub fn append_from_sorted_iters<I>(&mut self, left: I, right: I, length: &mut usize)
where
pub fn append_from_sorted_iters<I, A: Allocator>(
&mut self,
left: I,
right: I,
length: &mut usize,
alloc: &A,
) where
K: Ord,
I: Iterator<Item = (K, V)> + FusedIterator,
{
@ -23,13 +29,13 @@ impl<K, V> Root<K, V> {
let iter = MergeIter(MergeIterInner::new(left, right));
// Meanwhile, we build a tree from the sorted sequence in linear time.
self.bulk_push(iter, length)
self.bulk_push(iter, length, alloc)
}
/// Pushes all key-value pairs to the end of the tree, incrementing a
/// `length` variable along the way. The latter makes it easier for the
/// caller to avoid a leak when the iterator panicks.
pub fn bulk_push<I>(&mut self, iter: I, length: &mut usize)
pub fn bulk_push<I, A: Allocator>(&mut self, iter: I, length: &mut usize, alloc: &A)
where
I: Iterator<Item = (K, V)>,
{
@ -58,7 +64,7 @@ impl<K, V> Root<K, V> {
}
Err(_) => {
// We are at the top, create a new root node and push there.
open_node = self.push_internal_level();
open_node = self.push_internal_level(alloc);
break;
}
}
@ -66,9 +72,9 @@ impl<K, V> Root<K, V> {
// Push key-value pair and new right subtree.
let tree_height = open_node.height() - 1;
let mut right_tree = Root::new();
let mut right_tree = Root::new(alloc);
for _ in 0..tree_height {
right_tree.push_internal_level();
right_tree.push_internal_level(alloc);
}
open_node.push(key, value, right_tree);

View file

@ -1,13 +1,15 @@
use super::map::MIN_LEN;
use super::node::{marker, ForceResult::*, Handle, LeftOrRight::*, NodeRef, Root};
use core::alloc::Allocator;
impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
/// Stocks up a possibly underfull node by merging with or stealing from a
/// sibling. If successful but at the cost of shrinking the parent node,
/// returns that shrunk parent node. Returns an `Err` if the node is
/// an empty root.
fn fix_node_through_parent(
fn fix_node_through_parent<A: Allocator>(
self,
alloc: &A,
) -> Result<Option<NodeRef<marker::Mut<'a>, K, V, marker::Internal>>, Self> {
let len = self.len();
if len >= MIN_LEN {
@ -16,7 +18,7 @@ impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
match self.choose_parent_kv() {
Ok(Left(mut left_parent_kv)) => {
if left_parent_kv.can_merge() {
let parent = left_parent_kv.merge_tracking_parent();
let parent = left_parent_kv.merge_tracking_parent(alloc);
Ok(Some(parent))
} else {
left_parent_kv.bulk_steal_left(MIN_LEN - len);
@ -25,7 +27,7 @@ impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
}
Ok(Right(mut right_parent_kv)) => {
if right_parent_kv.can_merge() {
let parent = right_parent_kv.merge_tracking_parent();
let parent = right_parent_kv.merge_tracking_parent(alloc);
Ok(Some(parent))
} else {
right_parent_kv.bulk_steal_right(MIN_LEN - len);
@ -52,9 +54,9 @@ impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
///
/// This method does not expect ancestors to already be underfull upon entry
/// and panics if it encounters an empty ancestor.
pub fn fix_node_and_affected_ancestors(mut self) -> bool {
pub fn fix_node_and_affected_ancestors<A: Allocator>(mut self, alloc: &A) -> bool {
loop {
match self.fix_node_through_parent() {
match self.fix_node_through_parent(alloc) {
Ok(Some(parent)) => self = parent.forget_type(),
Ok(None) => return true,
Err(_) => return false,
@ -65,29 +67,29 @@ impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
impl<K, V> Root<K, V> {
/// Removes empty levels on the top, but keeps an empty leaf if the entire tree is empty.
pub fn fix_top(&mut self) {
pub fn fix_top<A: Allocator>(&mut self, alloc: &A) {
while self.height() > 0 && self.len() == 0 {
self.pop_internal_level();
self.pop_internal_level(alloc);
}
}
/// Stocks up or merge away any underfull nodes on the right border of the
/// tree. The other nodes, those that are not the root nor a rightmost edge,
/// must already have at least MIN_LEN elements.
pub fn fix_right_border(&mut self) {
self.fix_top();
pub fn fix_right_border<A: Allocator>(&mut self, alloc: &A) {
self.fix_top(alloc);
if self.len() > 0 {
self.borrow_mut().last_kv().fix_right_border_of_right_edge();
self.fix_top();
self.borrow_mut().last_kv().fix_right_border_of_right_edge(alloc);
self.fix_top(alloc);
}
}
/// The symmetric clone of `fix_right_border`.
pub fn fix_left_border(&mut self) {
self.fix_top();
pub fn fix_left_border<A: Allocator>(&mut self, alloc: &A) {
self.fix_top(alloc);
if self.len() > 0 {
self.borrow_mut().first_kv().fix_left_border_of_left_edge();
self.fix_top();
self.borrow_mut().first_kv().fix_left_border_of_left_edge(alloc);
self.fix_top(alloc);
}
}
@ -113,16 +115,16 @@ impl<K, V> Root<K, V> {
}
impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::KV> {
fn fix_left_border_of_left_edge(mut self) {
fn fix_left_border_of_left_edge<A: Allocator>(mut self, alloc: &A) {
while let Internal(internal_kv) = self.force() {
self = internal_kv.fix_left_child().first_kv();
self = internal_kv.fix_left_child(alloc).first_kv();
debug_assert!(self.reborrow().into_node().len() > MIN_LEN);
}
}
fn fix_right_border_of_right_edge(mut self) {
fn fix_right_border_of_right_edge<A: Allocator>(mut self, alloc: &A) {
while let Internal(internal_kv) = self.force() {
self = internal_kv.fix_right_child().last_kv();
self = internal_kv.fix_right_child(alloc).last_kv();
debug_assert!(self.reborrow().into_node().len() > MIN_LEN);
}
}
@ -133,12 +135,15 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>,
/// provisions an extra element to allow merging its children in turn
/// without becoming underfull.
/// Returns the left child.
fn fix_left_child(self) -> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
fn fix_left_child<A: Allocator>(
self,
alloc: &A,
) -> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
let mut internal_kv = self.consider_for_balancing();
let left_len = internal_kv.left_child_len();
debug_assert!(internal_kv.right_child_len() >= MIN_LEN);
if internal_kv.can_merge() {
internal_kv.merge_tracking_child()
internal_kv.merge_tracking_child(alloc)
} else {
// `MIN_LEN + 1` to avoid readjust if merge happens on the next level.
let count = (MIN_LEN + 1).saturating_sub(left_len);
@ -153,12 +158,15 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>,
/// provisions an extra element to allow merging its children in turn
/// without becoming underfull.
/// Returns wherever the right child ended up.
fn fix_right_child(self) -> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
fn fix_right_child<A: Allocator>(
self,
alloc: &A,
) -> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
let mut internal_kv = self.consider_for_balancing();
let right_len = internal_kv.right_child_len();
debug_assert!(internal_kv.left_child_len() >= MIN_LEN);
if internal_kv.can_merge() {
internal_kv.merge_tracking_child()
internal_kv.merge_tracking_child(alloc)
} else {
// `MIN_LEN + 1` to avoid readjust if merge happens on the next level.
let count = (MIN_LEN + 1).saturating_sub(right_len);

View file

@ -9,6 +9,8 @@ use core::mem::{self, ManuallyDrop};
use core::ops::{Index, RangeBounds};
use core::ptr;
use crate::alloc::{Allocator, Global};
use super::borrow::DormantMutRef;
use super::dedup_sorted_iter::DedupSortedIter;
use super::navigate::{LazyLeafRange, LeafRange};
@ -163,31 +165,41 @@ pub(super) const MIN_LEN: usize = node::MIN_LEN_AFTER_SPLIT;
#[stable(feature = "rust1", since = "1.0.0")]
#[cfg_attr(not(test), rustc_diagnostic_item = "BTreeMap")]
#[rustc_insignificant_dtor]
pub struct BTreeMap<K, V> {
pub struct BTreeMap<
K,
V,
#[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
> {
root: Option<Root<K, V>>,
length: usize,
pub(super) alloc: ManuallyDrop<A>,
}
#[stable(feature = "btree_drop", since = "1.7.0")]
unsafe impl<#[may_dangle] K, #[may_dangle] V> Drop for BTreeMap<K, V> {
unsafe impl<#[may_dangle] K, #[may_dangle] V, A: Allocator> Drop for BTreeMap<K, V, A> {
fn drop(&mut self) {
drop(unsafe { ptr::read(self) }.into_iter())
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<K: Clone, V: Clone> Clone for BTreeMap<K, V> {
fn clone(&self) -> BTreeMap<K, V> {
fn clone_subtree<'a, K: Clone, V: Clone>(
impl<K: Clone, V: Clone, A: Clone + Allocator> Clone for BTreeMap<K, V, A> {
fn clone(&self) -> BTreeMap<K, V, A> {
fn clone_subtree<'a, K: Clone, V: Clone, A: Clone + Allocator>(
node: NodeRef<marker::Immut<'a>, K, V, marker::LeafOrInternal>,
) -> BTreeMap<K, V>
alloc: &A,
) -> BTreeMap<K, V, A>
where
K: 'a,
V: 'a,
{
match node.force() {
Leaf(leaf) => {
let mut out_tree = BTreeMap { root: Some(Root::new()), length: 0 };
let mut out_tree = BTreeMap {
root: Some(Root::new(alloc)),
length: 0,
alloc: ManuallyDrop::new((*alloc).clone()),
};
{
let root = out_tree.root.as_mut().unwrap(); // unwrap succeeds because we just wrapped
@ -209,11 +221,11 @@ impl<K: Clone, V: Clone> Clone for BTreeMap<K, V> {
out_tree
}
Internal(internal) => {
let mut out_tree = clone_subtree(internal.first_edge().descend());
let mut out_tree = clone_subtree(internal.first_edge().descend(), alloc);
{
let out_root = out_tree.root.as_mut().unwrap();
let mut out_node = out_root.push_internal_level();
let mut out_node = out_root.push_internal_level(alloc);
let mut in_edge = internal.first_edge();
while let Ok(kv) = in_edge.right_kv() {
let (k, v) = kv.into_kv();
@ -221,7 +233,7 @@ impl<K: Clone, V: Clone> Clone for BTreeMap<K, V> {
let k = (*k).clone();
let v = (*v).clone();
let subtree = clone_subtree(in_edge.descend());
let subtree = clone_subtree(in_edge.descend(), alloc);
// We can't destructure subtree directly
// because BTreeMap implements Drop
@ -232,7 +244,7 @@ impl<K: Clone, V: Clone> Clone for BTreeMap<K, V> {
(root, length)
};
out_node.push(k, v, subroot.unwrap_or_else(Root::new));
out_node.push(k, v, subroot.unwrap_or_else(|| Root::new(alloc)));
out_tree.length += 1 + sublength;
}
}
@ -243,14 +255,14 @@ impl<K: Clone, V: Clone> Clone for BTreeMap<K, V> {
}
if self.is_empty() {
BTreeMap::new()
BTreeMap::new_in(ManuallyDrop::into_inner(self.alloc.clone()))
} else {
clone_subtree(self.root.as_ref().unwrap().reborrow()) // unwrap succeeds because not empty
clone_subtree(self.root.as_ref().unwrap().reborrow(), &*self.alloc) // unwrap succeeds because not empty
}
}
}
impl<K, Q: ?Sized> super::Recover<Q> for BTreeMap<K, ()>
impl<K, Q: ?Sized, A: Allocator> super::Recover<Q> for BTreeMap<K, (), A>
where
K: Borrow<Q> + Ord,
Q: Ord,
@ -269,21 +281,29 @@ where
let (map, dormant_map) = DormantMutRef::new(self);
let root_node = map.root.as_mut()?.borrow_mut();
match root_node.search_tree(key) {
Found(handle) => {
Some(OccupiedEntry { handle, dormant_map, _marker: PhantomData }.remove_kv().0)
}
Found(handle) => Some(
OccupiedEntry { handle, dormant_map, alloc: &*map.alloc, _marker: PhantomData }
.remove_kv()
.0,
),
GoDown(_) => None,
}
}
fn replace(&mut self, key: K) -> Option<K> {
let (map, dormant_map) = DormantMutRef::new(self);
let root_node = map.root.get_or_insert_with(Root::new).borrow_mut();
let root_node = map.root.get_or_insert_with(|| Root::new(&*map.alloc)).borrow_mut();
match root_node.search_tree::<K>(&key) {
Found(mut kv) => Some(mem::replace(kv.key_mut(), key)),
GoDown(handle) => {
VacantEntry { key, handle: Some(handle), dormant_map, _marker: PhantomData }
.insert(());
VacantEntry {
key,
handle: Some(handle),
dormant_map,
alloc: &*map.alloc,
_marker: PhantomData,
}
.insert(());
None
}
}
@ -343,12 +363,17 @@ impl<K: fmt::Debug, V: fmt::Debug> fmt::Debug for IterMut<'_, K, V> {
/// [`IntoIterator`]: core::iter::IntoIterator
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_insignificant_dtor]
pub struct IntoIter<K, V> {
pub struct IntoIter<
K,
V,
#[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
> {
range: LazyLeafRange<marker::Dying, K, V>,
length: usize,
alloc: A,
}
impl<K, V> IntoIter<K, V> {
impl<K, V, A: Allocator> IntoIter<K, V, A> {
/// Returns an iterator of references over the remaining items.
#[inline]
pub(super) fn iter(&self) -> Iter<'_, K, V> {
@ -357,7 +382,7 @@ impl<K, V> IntoIter<K, V> {
}
#[stable(feature = "collection_debug", since = "1.17.0")]
impl<K: fmt::Debug, V: fmt::Debug> fmt::Debug for IntoIter<K, V> {
impl<K: Debug, V: Debug, A: Allocator> Debug for IntoIter<K, V, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.iter()).finish()
}
@ -371,7 +396,7 @@ impl<K: fmt::Debug, V: fmt::Debug> fmt::Debug for IntoIter<K, V> {
/// [`keys`]: BTreeMap::keys
#[must_use = "iterators are lazy and do nothing unless consumed"]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Keys<'a, K: 'a, V: 'a> {
pub struct Keys<'a, K, V> {
inner: Iter<'a, K, V>,
}
@ -390,7 +415,7 @@ impl<K: fmt::Debug, V> fmt::Debug for Keys<'_, K, V> {
/// [`values`]: BTreeMap::values
#[must_use = "iterators are lazy and do nothing unless consumed"]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Values<'a, K: 'a, V: 'a> {
pub struct Values<'a, K, V> {
inner: Iter<'a, K, V>,
}
@ -409,7 +434,7 @@ impl<K, V: fmt::Debug> fmt::Debug for Values<'_, K, V> {
/// [`values_mut`]: BTreeMap::values_mut
#[must_use = "iterators are lazy and do nothing unless consumed"]
#[stable(feature = "map_values_mut", since = "1.10.0")]
pub struct ValuesMut<'a, K: 'a, V: 'a> {
pub struct ValuesMut<'a, K, V> {
inner: IterMut<'a, K, V>,
}
@ -428,12 +453,12 @@ impl<K, V: fmt::Debug> fmt::Debug for ValuesMut<'_, K, V> {
/// [`into_keys`]: BTreeMap::into_keys
#[must_use = "iterators are lazy and do nothing unless consumed"]
#[stable(feature = "map_into_keys_values", since = "1.54.0")]
pub struct IntoKeys<K, V> {
inner: IntoIter<K, V>,
pub struct IntoKeys<K, V, A: Allocator = Global> {
inner: IntoIter<K, V, A>,
}
#[stable(feature = "map_into_keys_values", since = "1.54.0")]
impl<K: fmt::Debug, V> fmt::Debug for IntoKeys<K, V> {
impl<K: fmt::Debug, V, A: Allocator> fmt::Debug for IntoKeys<K, V, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.inner.iter().map(|(key, _)| key)).finish()
}
@ -447,12 +472,16 @@ impl<K: fmt::Debug, V> fmt::Debug for IntoKeys<K, V> {
/// [`into_values`]: BTreeMap::into_values
#[must_use = "iterators are lazy and do nothing unless consumed"]
#[stable(feature = "map_into_keys_values", since = "1.54.0")]
pub struct IntoValues<K, V> {
inner: IntoIter<K, V>,
pub struct IntoValues<
K,
V,
#[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
> {
inner: IntoIter<K, V, A>,
}
#[stable(feature = "map_into_keys_values", since = "1.54.0")]
impl<K, V: fmt::Debug> fmt::Debug for IntoValues<K, V> {
impl<K, V: fmt::Debug, A: Allocator> fmt::Debug for IntoValues<K, V, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.inner.iter().map(|(_, val)| val)).finish()
}
@ -521,9 +550,11 @@ impl<K, V> BTreeMap<K, V> {
#[rustc_const_unstable(feature = "const_btree_new", issue = "71835")]
#[must_use]
pub const fn new() -> BTreeMap<K, V> {
BTreeMap { root: None, length: 0 }
BTreeMap { root: None, length: 0, alloc: ManuallyDrop::new(Global) }
}
}
impl<K, V, A: Allocator> BTreeMap<K, V, A> {
/// Clears the map, removing all elements.
///
/// # Examples
@ -540,9 +571,37 @@ impl<K, V> BTreeMap<K, V> {
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn clear(&mut self) {
*self = BTreeMap::new();
let alloc = unsafe {
// drop all elements and retrieve allocator
ptr::read(self).into_iter().into_alloc()
};
*self = BTreeMap::new_in(alloc);
}
/// Makes a new empty BTreeMap with a reasonable choice for B.
///
/// # Examples
///
/// Basic usage:
///
/// ```
/// # #![feature(allocator_api)]
/// # #![feature(btreemap_alloc)]
/// use std::collections::BTreeMap;
/// use std::alloc::Global;
///
/// let mut map = BTreeMap::new_in(Global);
///
/// // entries can now be inserted into the empty map
/// map.insert(1, "a");
/// ```
#[unstable(feature = "btreemap_alloc", issue = "32838")]
pub fn new_in(alloc: A) -> BTreeMap<K, V, A> {
BTreeMap { root: None, length: 0, alloc: ManuallyDrop::new(alloc) }
}
}
impl<K, V, A: Allocator> BTreeMap<K, V, A> {
/// Returns a reference to the value corresponding to the key.
///
/// The key may be any borrowed form of the map's key type, but the ordering
@ -648,14 +707,19 @@ impl<K, V> BTreeMap<K, V> {
/// assert_eq!(*map.get(&2).unwrap(), "b");
/// ```
#[unstable(feature = "map_first_last", issue = "62924")]
pub fn first_entry(&mut self) -> Option<OccupiedEntry<'_, K, V>>
pub fn first_entry(&mut self) -> Option<OccupiedEntry<'_, K, V, A>>
where
K: Ord,
{
let (map, dormant_map) = DormantMutRef::new(self);
let root_node = map.root.as_mut()?.borrow_mut();
let kv = root_node.first_leaf_edge().right_kv().ok()?;
Some(OccupiedEntry { handle: kv.forget_node_type(), dormant_map, _marker: PhantomData })
Some(OccupiedEntry {
handle: kv.forget_node_type(),
dormant_map,
alloc: &*map.alloc,
_marker: PhantomData,
})
}
/// Removes and returns the first element in the map.
@ -731,14 +795,19 @@ impl<K, V> BTreeMap<K, V> {
/// assert_eq!(*map.get(&2).unwrap(), "last");
/// ```
#[unstable(feature = "map_first_last", issue = "62924")]
pub fn last_entry(&mut self) -> Option<OccupiedEntry<'_, K, V>>
pub fn last_entry(&mut self) -> Option<OccupiedEntry<'_, K, V, A>>
where
K: Ord,
{
let (map, dormant_map) = DormantMutRef::new(self);
let root_node = map.root.as_mut()?.borrow_mut();
let kv = root_node.last_leaf_edge().left_kv().ok()?;
Some(OccupiedEntry { handle: kv.forget_node_type(), dormant_map, _marker: PhantomData })
Some(OccupiedEntry {
handle: kv.forget_node_type(),
dormant_map,
alloc: &*map.alloc,
_marker: PhantomData,
})
}
/// Removes and returns the last element in the map.
@ -891,7 +960,7 @@ impl<K, V> BTreeMap<K, V> {
/// assert_eq!(err.value, "b");
/// ```
#[unstable(feature = "map_try_insert", issue = "82766")]
pub fn try_insert(&mut self, key: K, value: V) -> Result<&mut V, OccupiedError<'_, K, V>>
pub fn try_insert(&mut self, key: K, value: V) -> Result<&mut V, OccupiedError<'_, K, V, A>>
where
K: Ord,
{
@ -955,9 +1024,10 @@ impl<K, V> BTreeMap<K, V> {
let (map, dormant_map) = DormantMutRef::new(self);
let root_node = map.root.as_mut()?.borrow_mut();
match root_node.search_tree(key) {
Found(handle) => {
Some(OccupiedEntry { handle, dormant_map, _marker: PhantomData }.remove_entry())
}
Found(handle) => Some(
OccupiedEntry { handle, dormant_map, alloc: &*map.alloc, _marker: PhantomData }
.remove_entry(),
),
GoDown(_) => None,
}
}
@ -1019,6 +1089,7 @@ impl<K, V> BTreeMap<K, V> {
pub fn append(&mut self, other: &mut Self)
where
K: Ord,
A: Clone,
{
// Do we have to append anything at all?
if other.is_empty() {
@ -1031,10 +1102,14 @@ impl<K, V> BTreeMap<K, V> {
return;
}
let self_iter = mem::take(self).into_iter();
let other_iter = mem::take(other).into_iter();
let root = self.root.get_or_insert_with(Root::new);
root.append_from_sorted_iters(self_iter, other_iter, &mut self.length)
let self_iter =
mem::replace(self, Self::new_in(ManuallyDrop::into_inner(self.alloc.clone())))
.into_iter();
let other_iter =
mem::replace(other, Self::new_in(ManuallyDrop::into_inner(self.alloc.clone())))
.into_iter();
let root = self.root.get_or_insert_with(|| Root::new(&*self.alloc));
root.append_from_sorted_iters(self_iter, other_iter, &mut self.length, &*self.alloc)
}
/// Constructs a double-ended iterator over a sub-range of elements in the map.
@ -1141,21 +1216,31 @@ impl<K, V> BTreeMap<K, V> {
/// assert_eq!(count["a"], 3);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn entry(&mut self, key: K) -> Entry<'_, K, V>
pub fn entry(&mut self, key: K) -> Entry<'_, K, V, A>
where
K: Ord,
{
let (map, dormant_map) = DormantMutRef::new(self);
match map.root {
None => Vacant(VacantEntry { key, handle: None, dormant_map, _marker: PhantomData }),
None => Vacant(VacantEntry {
key,
handle: None,
dormant_map,
alloc: &*map.alloc,
_marker: PhantomData,
}),
Some(ref mut root) => match root.borrow_mut().search_tree(&key) {
Found(handle) => {
Occupied(OccupiedEntry { handle, dormant_map, _marker: PhantomData })
}
Found(handle) => Occupied(OccupiedEntry {
handle,
dormant_map,
alloc: &*map.alloc,
_marker: PhantomData,
}),
GoDown(handle) => Vacant(VacantEntry {
key,
handle: Some(handle),
dormant_map,
alloc: &*map.alloc,
_marker: PhantomData,
}),
},
@ -1195,20 +1280,25 @@ impl<K, V> BTreeMap<K, V> {
pub fn split_off<Q: ?Sized + Ord>(&mut self, key: &Q) -> Self
where
K: Borrow<Q> + Ord,
A: Clone,
{
if self.is_empty() {
return Self::new();
return Self::new_in(ManuallyDrop::into_inner(self.alloc.clone()));
}
let total_num = self.len();
let left_root = self.root.as_mut().unwrap(); // unwrap succeeds because not empty
let right_root = left_root.split_off(key);
let right_root = left_root.split_off(key, &*self.alloc);
let (new_left_len, right_len) = Root::calc_split_length(total_num, &left_root, &right_root);
self.length = new_left_len;
BTreeMap { root: Some(right_root), length: right_len }
BTreeMap {
root: Some(right_root),
length: right_len,
alloc: ManuallyDrop::new((*self.alloc).clone()),
}
}
/// Creates an iterator that visits all elements (key-value pairs) in
@ -1244,28 +1334,39 @@ impl<K, V> BTreeMap<K, V> {
/// assert_eq!(odds.keys().copied().collect::<Vec<_>>(), [1, 3, 5, 7]);
/// ```
#[unstable(feature = "btree_drain_filter", issue = "70530")]
pub fn drain_filter<F>(&mut self, pred: F) -> DrainFilter<'_, K, V, F>
pub fn drain_filter<F>(&mut self, pred: F) -> DrainFilter<'_, K, V, F, &A>
where
K: Ord,
F: FnMut(&K, &mut V) -> bool,
{
DrainFilter { pred, inner: self.drain_filter_inner() }
let (inner, alloc) = self.drain_filter_inner();
DrainFilter { pred, inner, alloc }
}
pub(super) fn drain_filter_inner(&mut self) -> DrainFilterInner<'_, K, V>
pub(super) fn drain_filter_inner(&mut self) -> (DrainFilterInner<'_, K, V>, &A)
where
K: Ord,
{
if let Some(root) = self.root.as_mut() {
let (root, dormant_root) = DormantMutRef::new(root);
let front = root.borrow_mut().first_leaf_edge();
DrainFilterInner {
length: &mut self.length,
dormant_root: Some(dormant_root),
cur_leaf_edge: Some(front),
}
(
DrainFilterInner {
length: &mut self.length,
dormant_root: Some(dormant_root),
cur_leaf_edge: Some(front),
},
&*self.alloc,
)
} else {
DrainFilterInner { length: &mut self.length, dormant_root: None, cur_leaf_edge: None }
(
DrainFilterInner {
length: &mut self.length,
dormant_root: None,
cur_leaf_edge: None,
},
&*self.alloc,
)
}
}
@ -1287,7 +1388,7 @@ impl<K, V> BTreeMap<K, V> {
/// ```
#[inline]
#[stable(feature = "map_into_keys_values", since = "1.54.0")]
pub fn into_keys(self) -> IntoKeys<K, V> {
pub fn into_keys(self) -> IntoKeys<K, V, A> {
IntoKeys { inner: self.into_iter() }
}
@ -1309,25 +1410,25 @@ impl<K, V> BTreeMap<K, V> {
/// ```
#[inline]
#[stable(feature = "map_into_keys_values", since = "1.54.0")]
pub fn into_values(self) -> IntoValues<K, V> {
pub fn into_values(self) -> IntoValues<K, V, A> {
IntoValues { inner: self.into_iter() }
}
/// Makes a `BTreeMap` from a sorted iterator.
pub(crate) fn bulk_build_from_sorted_iter<I>(iter: I) -> Self
pub(crate) fn bulk_build_from_sorted_iter<I>(iter: I, alloc: A) -> Self
where
K: Ord,
I: IntoIterator<Item = (K, V)>,
{
let mut root = Root::new();
let mut root = Root::new(&alloc);
let mut length = 0;
root.bulk_push(DedupSortedIter::new(iter.into_iter()), &mut length);
BTreeMap { root: Some(root), length }
root.bulk_push(DedupSortedIter::new(iter.into_iter()), &mut length, &alloc);
BTreeMap { root: Some(root), length, alloc: ManuallyDrop::new(alloc) }
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, K, V> IntoIterator for &'a BTreeMap<K, V> {
impl<'a, K, V, A: Allocator> IntoIterator for &'a BTreeMap<K, V, A> {
type Item = (&'a K, &'a V);
type IntoIter = Iter<'a, K, V>;
@ -1396,7 +1497,7 @@ impl<K, V> Clone for Iter<'_, K, V> {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, K, V> IntoIterator for &'a mut BTreeMap<K, V> {
impl<'a, K, V, A: Allocator> IntoIterator for &'a mut BTreeMap<K, V, A> {
type Item = (&'a K, &'a mut V);
type IntoIter = IterMut<'a, K, V>;
@ -1406,7 +1507,7 @@ impl<'a, K, V> IntoIterator for &'a mut BTreeMap<K, V> {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, K: 'a, V: 'a> Iterator for IterMut<'a, K, V> {
impl<'a, K, V> Iterator for IterMut<'a, K, V> {
type Item = (&'a K, &'a mut V);
fn next(&mut self) -> Option<(&'a K, &'a mut V)> {
@ -1436,7 +1537,7 @@ impl<'a, K: 'a, V: 'a> Iterator for IterMut<'a, K, V> {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, K: 'a, V: 'a> DoubleEndedIterator for IterMut<'a, K, V> {
impl<'a, K, V> DoubleEndedIterator for IterMut<'a, K, V> {
fn next_back(&mut self) -> Option<(&'a K, &'a mut V)> {
if self.length == 0 {
None
@ -1466,28 +1567,41 @@ impl<'a, K, V> IterMut<'a, K, V> {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<K, V> IntoIterator for BTreeMap<K, V> {
impl<K, V, A: Allocator> IntoIterator for BTreeMap<K, V, A> {
type Item = (K, V);
type IntoIter = IntoIter<K, V>;
type IntoIter = IntoIter<K, V, A>;
fn into_iter(self) -> IntoIter<K, V> {
fn into_iter(self) -> IntoIter<K, V, A> {
let mut me = ManuallyDrop::new(self);
if let Some(root) = me.root.take() {
let full_range = root.into_dying().full_range();
IntoIter { range: full_range, length: me.length }
IntoIter {
range: full_range,
length: me.length,
alloc: unsafe { ManuallyDrop::take(&mut me.alloc) },
}
} else {
IntoIter { range: LazyLeafRange::none(), length: 0 }
IntoIter {
range: LazyLeafRange::none(),
length: 0,
alloc: unsafe { ManuallyDrop::take(&mut me.alloc) },
}
}
}
}
#[stable(feature = "btree_drop", since = "1.7.0")]
impl<K, V> Drop for IntoIter<K, V> {
impl<K, V, A: Allocator> Drop for IntoIter<K, V, A> {
fn drop(&mut self) {
struct DropGuard<'a, K, V>(&'a mut IntoIter<K, V>);
self.dealloc()
}
}
impl<K, V, A: Allocator> IntoIter<K, V, A> {
fn dealloc(&mut self) {
struct DropGuard<'a, K, V, A: Allocator>(&'a mut IntoIter<K, V, A>);
impl<'a, K, V> Drop for DropGuard<'a, K, V> {
impl<'a, K, V, A: Allocator> Drop for DropGuard<'a, K, V, A> {
fn drop(&mut self) {
// Continue the same loop we perform below. This only runs when unwinding, so we
// don't have to care about panics this time (they'll abort).
@ -1507,18 +1621,18 @@ impl<K, V> Drop for IntoIter<K, V> {
}
}
impl<K, V> IntoIter<K, V> {
impl<K, V, A: Allocator> IntoIter<K, V, A> {
/// Core of a `next` method returning a dying KV handle,
/// invalidated by further calls to this function and some others.
fn dying_next(
&mut self,
) -> Option<Handle<NodeRef<marker::Dying, K, V, marker::LeafOrInternal>, marker::KV>> {
if self.length == 0 {
self.range.deallocating_end();
self.range.deallocating_end(&self.alloc);
None
} else {
self.length -= 1;
Some(unsafe { self.range.deallocating_next_unchecked() })
Some(unsafe { self.range.deallocating_next_unchecked(&self.alloc) })
}
}
@ -1528,17 +1642,22 @@ impl<K, V> IntoIter<K, V> {
&mut self,
) -> Option<Handle<NodeRef<marker::Dying, K, V, marker::LeafOrInternal>, marker::KV>> {
if self.length == 0 {
self.range.deallocating_end();
self.range.deallocating_end(&self.alloc);
None
} else {
self.length -= 1;
Some(unsafe { self.range.deallocating_next_back_unchecked() })
Some(unsafe { self.range.deallocating_next_back_unchecked(&self.alloc) })
}
}
fn into_alloc(mut self) -> A {
self.dealloc(); // Deallocate, then don't drop as drop will also call dealloc
let iter = ManuallyDrop::new(self);
unsafe { ptr::read(&iter.alloc) }
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<K, V> Iterator for IntoIter<K, V> {
impl<K, V, A: Allocator> Iterator for IntoIter<K, V, A> {
type Item = (K, V);
fn next(&mut self) -> Option<(K, V)> {
@ -1552,7 +1671,7 @@ impl<K, V> Iterator for IntoIter<K, V> {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<K, V> DoubleEndedIterator for IntoIter<K, V> {
impl<K, V, A: Allocator> DoubleEndedIterator for IntoIter<K, V, A> {
fn next_back(&mut self) -> Option<(K, V)> {
// SAFETY: we consume the dying handle immediately.
self.dying_next_back().map(unsafe { |kv| kv.into_key_val() })
@ -1560,14 +1679,14 @@ impl<K, V> DoubleEndedIterator for IntoIter<K, V> {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<K, V> ExactSizeIterator for IntoIter<K, V> {
impl<K, V, A: Allocator> ExactSizeIterator for IntoIter<K, V, A> {
fn len(&self) -> usize {
self.length
}
}
#[stable(feature = "fused", since = "1.26.0")]
impl<K, V> FusedIterator for IntoIter<K, V> {}
impl<K, V, A: Allocator> FusedIterator for IntoIter<K, V, A> {}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, K, V> Iterator for Keys<'a, K, V> {
@ -1661,18 +1780,22 @@ impl<K, V> Clone for Values<'_, K, V> {
/// An iterator produced by calling `drain_filter` on BTreeMap.
#[unstable(feature = "btree_drain_filter", issue = "70530")]
pub struct DrainFilter<'a, K, V, F>
where
K: 'a,
V: 'a,
pub struct DrainFilter<
'a,
K,
V,
F,
#[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
> where
F: 'a + FnMut(&K, &mut V) -> bool,
{
pred: F,
inner: DrainFilterInner<'a, K, V>,
alloc: A,
}
/// Most of the implementation of DrainFilter are generic over the type
/// of the predicate, thus also serving for BTreeSet::DrainFilter.
pub(super) struct DrainFilterInner<'a, K: 'a, V: 'a> {
pub(super) struct DrainFilterInner<'a, K, V> {
/// Reference to the length field in the borrowed map, updated live.
length: &'a mut usize,
/// Buried reference to the root field in the borrowed map.
@ -1685,7 +1808,7 @@ pub(super) struct DrainFilterInner<'a, K: 'a, V: 'a> {
}
#[unstable(feature = "btree_drain_filter", issue = "70530")]
impl<K, V, F> Drop for DrainFilter<'_, K, V, F>
impl<K, V, F, A: Allocator> Drop for DrainFilter<'_, K, V, F, A>
where
F: FnMut(&K, &mut V) -> bool,
{
@ -1707,14 +1830,14 @@ where
}
#[unstable(feature = "btree_drain_filter", issue = "70530")]
impl<K, V, F> Iterator for DrainFilter<'_, K, V, F>
impl<K, V, F, A: Allocator> Iterator for DrainFilter<'_, K, V, F, A>
where
F: FnMut(&K, &mut V) -> bool,
{
type Item = (K, V);
fn next(&mut self) -> Option<(K, V)> {
self.inner.next(&mut self.pred)
self.inner.next(&mut self.pred, &self.alloc)
}
fn size_hint(&self) -> (usize, Option<usize>) {
@ -1722,7 +1845,7 @@ where
}
}
impl<'a, K: 'a, V: 'a> DrainFilterInner<'a, K, V> {
impl<'a, K, V> DrainFilterInner<'a, K, V> {
/// Allow Debug implementations to predict the next element.
pub(super) fn peek(&self) -> Option<(&K, &V)> {
let edge = self.cur_leaf_edge.as_ref()?;
@ -1730,7 +1853,7 @@ impl<'a, K: 'a, V: 'a> DrainFilterInner<'a, K, V> {
}
/// Implementation of a typical `DrainFilter::next` method, given the predicate.
pub(super) fn next<F>(&mut self, pred: &mut F) -> Option<(K, V)>
pub(super) fn next<F, A: Allocator>(&mut self, pred: &mut F, alloc: &A) -> Option<(K, V)>
where
F: FnMut(&K, &mut V) -> bool,
{
@ -1738,13 +1861,16 @@ impl<'a, K: 'a, V: 'a> DrainFilterInner<'a, K, V> {
let (k, v) = kv.kv_mut();
if pred(k, v) {
*self.length -= 1;
let (kv, pos) = kv.remove_kv_tracking(|| {
// SAFETY: we will touch the root in a way that will not
// invalidate the position returned.
let root = unsafe { self.dormant_root.take().unwrap().awaken() };
root.pop_internal_level();
self.dormant_root = Some(DormantMutRef::new(root).1);
});
let (kv, pos) = kv.remove_kv_tracking(
|| {
// SAFETY: we will touch the root in a way that will not
// invalidate the position returned.
let root = unsafe { self.dormant_root.take().unwrap().awaken() };
root.pop_internal_level(alloc);
self.dormant_root = Some(DormantMutRef::new(root).1);
},
alloc,
);
self.cur_leaf_edge = Some(pos);
return Some(kv);
}
@ -1822,7 +1948,7 @@ impl<K, V> ExactSizeIterator for ValuesMut<'_, K, V> {
impl<K, V> FusedIterator for ValuesMut<'_, K, V> {}
#[stable(feature = "map_into_keys_values", since = "1.54.0")]
impl<K, V> Iterator for IntoKeys<K, V> {
impl<K, V, A: Allocator> Iterator for IntoKeys<K, V, A> {
type Item = K;
fn next(&mut self) -> Option<K> {
@ -1847,24 +1973,24 @@ impl<K, V> Iterator for IntoKeys<K, V> {
}
#[stable(feature = "map_into_keys_values", since = "1.54.0")]
impl<K, V> DoubleEndedIterator for IntoKeys<K, V> {
impl<K, V, A: Allocator> DoubleEndedIterator for IntoKeys<K, V, A> {
fn next_back(&mut self) -> Option<K> {
self.inner.next_back().map(|(k, _)| k)
}
}
#[stable(feature = "map_into_keys_values", since = "1.54.0")]
impl<K, V> ExactSizeIterator for IntoKeys<K, V> {
impl<K, V, A: Allocator> ExactSizeIterator for IntoKeys<K, V, A> {
fn len(&self) -> usize {
self.inner.len()
}
}
#[stable(feature = "map_into_keys_values", since = "1.54.0")]
impl<K, V> FusedIterator for IntoKeys<K, V> {}
impl<K, V, A: Allocator> FusedIterator for IntoKeys<K, V, A> {}
#[stable(feature = "map_into_keys_values", since = "1.54.0")]
impl<K, V> Iterator for IntoValues<K, V> {
impl<K, V, A: Allocator> Iterator for IntoValues<K, V, A> {
type Item = V;
fn next(&mut self) -> Option<V> {
@ -1881,21 +2007,21 @@ impl<K, V> Iterator for IntoValues<K, V> {
}
#[stable(feature = "map_into_keys_values", since = "1.54.0")]
impl<K, V> DoubleEndedIterator for IntoValues<K, V> {
impl<K, V, A: Allocator> DoubleEndedIterator for IntoValues<K, V, A> {
fn next_back(&mut self) -> Option<V> {
self.inner.next_back().map(|(_, v)| v)
}
}
#[stable(feature = "map_into_keys_values", since = "1.54.0")]
impl<K, V> ExactSizeIterator for IntoValues<K, V> {
impl<K, V, A: Allocator> ExactSizeIterator for IntoValues<K, V, A> {
fn len(&self) -> usize {
self.inner.len()
}
}
#[stable(feature = "map_into_keys_values", since = "1.54.0")]
impl<K, V> FusedIterator for IntoValues<K, V> {}
impl<K, V, A: Allocator> FusedIterator for IntoValues<K, V, A> {}
#[stable(feature = "btree_range", since = "1.17.0")]
impl<'a, K, V> DoubleEndedIterator for Range<'a, K, V> {
@ -1956,12 +2082,12 @@ impl<K: Ord, V> FromIterator<(K, V)> for BTreeMap<K, V> {
// use stable sort to preserve the insertion order.
inputs.sort_by(|a, b| a.0.cmp(&b.0));
BTreeMap::bulk_build_from_sorted_iter(inputs)
BTreeMap::bulk_build_from_sorted_iter(inputs, Global)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<K: Ord, V> Extend<(K, V)> for BTreeMap<K, V> {
impl<K: Ord, V, A: Allocator> Extend<(K, V)> for BTreeMap<K, V, A> {
#[inline]
fn extend<T: IntoIterator<Item = (K, V)>>(&mut self, iter: T) {
iter.into_iter().for_each(move |(k, v)| {
@ -1976,7 +2102,7 @@ impl<K: Ord, V> Extend<(K, V)> for BTreeMap<K, V> {
}
#[stable(feature = "extend_ref", since = "1.2.0")]
impl<'a, K: Ord + Copy, V: Copy> Extend<(&'a K, &'a V)> for BTreeMap<K, V> {
impl<'a, K: Ord + Copy, V: Copy, A: Allocator> Extend<(&'a K, &'a V)> for BTreeMap<K, V, A> {
fn extend<I: IntoIterator<Item = (&'a K, &'a V)>>(&mut self, iter: I) {
self.extend(iter.into_iter().map(|(&key, &value)| (key, value)));
}
@ -1988,7 +2114,7 @@ impl<'a, K: Ord + Copy, V: Copy> Extend<(&'a K, &'a V)> for BTreeMap<K, V> {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<K: Hash, V: Hash> Hash for BTreeMap<K, V> {
impl<K: Hash, V: Hash, A: Allocator> Hash for BTreeMap<K, V, A> {
fn hash<H: Hasher>(&self, state: &mut H) {
state.write_length_prefix(self.len());
for elt in self {
@ -2006,40 +2132,40 @@ impl<K, V> Default for BTreeMap<K, V> {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<K: PartialEq, V: PartialEq> PartialEq for BTreeMap<K, V> {
fn eq(&self, other: &BTreeMap<K, V>) -> bool {
impl<K: PartialEq, V: PartialEq, A: Allocator> PartialEq for BTreeMap<K, V, A> {
fn eq(&self, other: &BTreeMap<K, V, A>) -> bool {
self.len() == other.len() && self.iter().zip(other).all(|(a, b)| a == b)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<K: Eq, V: Eq> Eq for BTreeMap<K, V> {}
impl<K: Eq, V: Eq, A: Allocator> Eq for BTreeMap<K, V, A> {}
#[stable(feature = "rust1", since = "1.0.0")]
impl<K: PartialOrd, V: PartialOrd> PartialOrd for BTreeMap<K, V> {
impl<K: PartialOrd, V: PartialOrd, A: Allocator> PartialOrd for BTreeMap<K, V, A> {
#[inline]
fn partial_cmp(&self, other: &BTreeMap<K, V>) -> Option<Ordering> {
fn partial_cmp(&self, other: &BTreeMap<K, V, A>) -> Option<Ordering> {
self.iter().partial_cmp(other.iter())
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<K: Ord, V: Ord> Ord for BTreeMap<K, V> {
impl<K: Ord, V: Ord, A: Allocator> Ord for BTreeMap<K, V, A> {
#[inline]
fn cmp(&self, other: &BTreeMap<K, V>) -> Ordering {
fn cmp(&self, other: &BTreeMap<K, V, A>) -> Ordering {
self.iter().cmp(other.iter())
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<K: Debug, V: Debug> Debug for BTreeMap<K, V> {
impl<K: Debug, V: Debug, A: Allocator> Debug for BTreeMap<K, V, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_map().entries(self.iter()).finish()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<K, Q: ?Sized, V> Index<&Q> for BTreeMap<K, V>
impl<K, Q: ?Sized, V, A: Allocator> Index<&Q> for BTreeMap<K, V, A>
where
K: Borrow<Q> + Ord,
Q: Ord,
@ -2075,11 +2201,11 @@ impl<K: Ord, V, const N: usize> From<[(K, V); N]> for BTreeMap<K, V> {
// use stable sort to preserve the insertion order.
arr.sort_by(|a, b| a.0.cmp(&b.0));
BTreeMap::bulk_build_from_sorted_iter(arr)
BTreeMap::bulk_build_from_sorted_iter(arr, Global)
}
}
impl<K, V> BTreeMap<K, V> {
impl<K, V, A: Allocator> BTreeMap<K, V, A> {
/// Gets an iterator over the entries of the map, sorted by key.
///
/// # Examples

View file

@ -2,6 +2,8 @@ use core::fmt::{self, Debug};
use core::marker::PhantomData;
use core::mem;
use crate::alloc::{Allocator, Global};
use super::super::borrow::DormantMutRef;
use super::super::node::{marker, Handle, NodeRef};
use super::BTreeMap;
@ -15,18 +17,23 @@ use Entry::*;
/// [`entry`]: BTreeMap::entry
#[stable(feature = "rust1", since = "1.0.0")]
#[cfg_attr(not(test), rustc_diagnostic_item = "BTreeEntry")]
pub enum Entry<'a, K: 'a, V: 'a> {
pub enum Entry<
'a,
K: 'a,
V: 'a,
#[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
> {
/// A vacant entry.
#[stable(feature = "rust1", since = "1.0.0")]
Vacant(#[stable(feature = "rust1", since = "1.0.0")] VacantEntry<'a, K, V>),
Vacant(#[stable(feature = "rust1", since = "1.0.0")] VacantEntry<'a, K, V, A>),
/// An occupied entry.
#[stable(feature = "rust1", since = "1.0.0")]
Occupied(#[stable(feature = "rust1", since = "1.0.0")] OccupiedEntry<'a, K, V>),
Occupied(#[stable(feature = "rust1", since = "1.0.0")] OccupiedEntry<'a, K, V, A>),
}
#[stable(feature = "debug_btree_map", since = "1.12.0")]
impl<K: Debug + Ord, V: Debug> Debug for Entry<'_, K, V> {
impl<K: Debug + Ord, V: Debug, A: Allocator> Debug for Entry<'_, K, V, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
Vacant(ref v) => f.debug_tuple("Entry").field(v).finish(),
@ -38,18 +45,25 @@ impl<K: Debug + Ord, V: Debug> Debug for Entry<'_, K, V> {
/// A view into a vacant entry in a `BTreeMap`.
/// It is part of the [`Entry`] enum.
#[stable(feature = "rust1", since = "1.0.0")]
pub struct VacantEntry<'a, K: 'a, V: 'a> {
pub struct VacantEntry<
'a,
K,
V,
#[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
> {
pub(super) key: K,
/// `None` for a (empty) map without root
pub(super) handle: Option<Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>>,
pub(super) dormant_map: DormantMutRef<'a, BTreeMap<K, V>>,
pub(super) dormant_map: DormantMutRef<'a, BTreeMap<K, V, A>>,
pub(super) alloc: &'a A,
// Be invariant in `K` and `V`
pub(super) _marker: PhantomData<&'a mut (K, V)>,
}
#[stable(feature = "debug_btree_map", since = "1.12.0")]
impl<K: Debug + Ord, V> Debug for VacantEntry<'_, K, V> {
impl<K: Debug + Ord, V, A: Allocator> Debug for VacantEntry<'_, K, V, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("VacantEntry").field(self.key()).finish()
}
@ -58,16 +72,23 @@ impl<K: Debug + Ord, V> Debug for VacantEntry<'_, K, V> {
/// A view into an occupied entry in a `BTreeMap`.
/// It is part of the [`Entry`] enum.
#[stable(feature = "rust1", since = "1.0.0")]
pub struct OccupiedEntry<'a, K: 'a, V: 'a> {
pub struct OccupiedEntry<
'a,
K,
V,
#[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
> {
pub(super) handle: Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::KV>,
pub(super) dormant_map: DormantMutRef<'a, BTreeMap<K, V>>,
pub(super) dormant_map: DormantMutRef<'a, BTreeMap<K, V, A>>,
pub(super) alloc: &'a A,
// Be invariant in `K` and `V`
pub(super) _marker: PhantomData<&'a mut (K, V)>,
}
#[stable(feature = "debug_btree_map", since = "1.12.0")]
impl<K: Debug + Ord, V: Debug> Debug for OccupiedEntry<'_, K, V> {
impl<K: Debug + Ord, V: Debug, A: Allocator> Debug for OccupiedEntry<'_, K, V, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("OccupiedEntry").field("key", self.key()).field("value", self.get()).finish()
}
@ -77,15 +98,15 @@ impl<K: Debug + Ord, V: Debug> Debug for OccupiedEntry<'_, K, V> {
///
/// Contains the occupied entry, and the value that was not inserted.
#[unstable(feature = "map_try_insert", issue = "82766")]
pub struct OccupiedError<'a, K: 'a, V: 'a> {
pub struct OccupiedError<'a, K: 'a, V: 'a, A: Allocator = Global> {
/// The entry in the map that was already occupied.
pub entry: OccupiedEntry<'a, K, V>,
pub entry: OccupiedEntry<'a, K, V, A>,
/// The value which was not inserted, because the entry was already occupied.
pub value: V,
}
#[unstable(feature = "map_try_insert", issue = "82766")]
impl<K: Debug + Ord, V: Debug> Debug for OccupiedError<'_, K, V> {
impl<K: Debug + Ord, V: Debug, A: Allocator> Debug for OccupiedError<'_, K, V, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("OccupiedError")
.field("key", self.entry.key())
@ -96,7 +117,7 @@ impl<K: Debug + Ord, V: Debug> Debug for OccupiedError<'_, K, V> {
}
#[unstable(feature = "map_try_insert", issue = "82766")]
impl<'a, K: Debug + Ord, V: Debug> fmt::Display for OccupiedError<'a, K, V> {
impl<'a, K: Debug + Ord, V: Debug, A: Allocator> fmt::Display for OccupiedError<'a, K, V, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
@ -108,7 +129,7 @@ impl<'a, K: Debug + Ord, V: Debug> fmt::Display for OccupiedError<'a, K, V> {
}
}
impl<'a, K: Ord, V> Entry<'a, K, V> {
impl<'a, K: Ord, V, A: Allocator> Entry<'a, K, V, A> {
/// Ensures a value is in the entry by inserting the default if empty, and returns
/// a mutable reference to the value in the entry.
///
@ -236,7 +257,7 @@ impl<'a, K: Ord, V> Entry<'a, K, V> {
}
}
impl<'a, K: Ord, V: Default> Entry<'a, K, V> {
impl<'a, K: Ord, V: Default, A: Allocator> Entry<'a, K, V, A> {
#[stable(feature = "entry_or_default", since = "1.28.0")]
/// Ensures a value is in the entry by inserting the default value if empty,
/// and returns a mutable reference to the value in the entry.
@ -259,7 +280,7 @@ impl<'a, K: Ord, V: Default> Entry<'a, K, V> {
}
}
impl<'a, K: Ord, V> VacantEntry<'a, K, V> {
impl<'a, K: Ord, V, A: Allocator> VacantEntry<'a, K, V, A> {
/// Gets a reference to the key that would be used when inserting a value
/// through the VacantEntry.
///
@ -317,13 +338,13 @@ impl<'a, K: Ord, V> VacantEntry<'a, K, V> {
None => {
// SAFETY: There is no tree yet so no reference to it exists.
let map = unsafe { self.dormant_map.awaken() };
let mut root = NodeRef::new_leaf();
let mut root = NodeRef::new_leaf(self.alloc);
let val_ptr = root.borrow_mut().push(self.key, value) as *mut V;
map.root = Some(root.forget_type());
map.length = 1;
val_ptr
}
Some(handle) => match handle.insert_recursing(self.key, value) {
Some(handle) => match handle.insert_recursing(self.key, value, self.alloc) {
(None, val_ptr) => {
// SAFETY: We have consumed self.handle.
let map = unsafe { self.dormant_map.awaken() };
@ -336,7 +357,7 @@ impl<'a, K: Ord, V> VacantEntry<'a, K, V> {
// remaining reference to the tree, ins.left.
let map = unsafe { self.dormant_map.awaken() };
let root = map.root.as_mut().unwrap(); // same as ins.left
root.push_internal_level().push(ins.kv.0, ins.kv.1, ins.right);
root.push_internal_level(self.alloc).push(ins.kv.0, ins.kv.1, ins.right);
map.length += 1;
val_ptr
}
@ -348,7 +369,7 @@ impl<'a, K: Ord, V> VacantEntry<'a, K, V> {
}
}
impl<'a, K: Ord, V> OccupiedEntry<'a, K, V> {
impl<'a, K: Ord, V, A: Allocator> OccupiedEntry<'a, K, V, A> {
/// Gets a reference to the key in the entry.
///
/// # Examples
@ -516,13 +537,14 @@ impl<'a, K: Ord, V> OccupiedEntry<'a, K, V> {
// Body of `remove_entry`, probably separate because the name reflects the returned pair.
pub(super) fn remove_kv(self) -> (K, V) {
let mut emptied_internal_root = false;
let (old_kv, _) = self.handle.remove_kv_tracking(|| emptied_internal_root = true);
let (old_kv, _) =
self.handle.remove_kv_tracking(|| emptied_internal_root = true, self.alloc);
// SAFETY: we consumed the intermediate root borrow, `self.handle`.
let map = unsafe { self.dormant_map.awaken() };
map.length -= 1;
if emptied_internal_root {
let root = map.root.as_mut().unwrap();
root.pop_internal_level();
root.pop_internal_level(&*self.alloc);
}
old_kv
}

View file

@ -116,7 +116,11 @@ impl<K, V> BTreeMap<K, V> {
{
let iter = mem::take(self).into_iter();
if !iter.is_empty() {
self.root.insert(Root::new()).bulk_push(iter, &mut self.length);
self.root.insert(Root::new(&*self.alloc)).bulk_push(
iter,
&mut self.length,
&*self.alloc,
);
}
}
}

View file

@ -5,6 +5,7 @@ use core::ptr;
use super::node::{marker, ForceResult::*, Handle, NodeRef};
use crate::alloc::Allocator;
// `front` and `back` are always both `None` or both `Some`.
pub struct LeafRange<BorrowType, K, V> {
front: Option<Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge>>,
@ -177,27 +178,29 @@ impl<K, V> LazyLeafRange<marker::Dying, K, V> {
}
#[inline]
pub unsafe fn deallocating_next_unchecked(
pub unsafe fn deallocating_next_unchecked<A: Allocator>(
&mut self,
alloc: &A,
) -> Handle<NodeRef<marker::Dying, K, V, marker::LeafOrInternal>, marker::KV> {
debug_assert!(self.front.is_some());
let front = self.init_front().unwrap();
unsafe { front.deallocating_next_unchecked() }
unsafe { front.deallocating_next_unchecked(alloc) }
}
#[inline]
pub unsafe fn deallocating_next_back_unchecked(
pub unsafe fn deallocating_next_back_unchecked<A: Allocator>(
&mut self,
alloc: &A,
) -> Handle<NodeRef<marker::Dying, K, V, marker::LeafOrInternal>, marker::KV> {
debug_assert!(self.back.is_some());
let back = self.init_back().unwrap();
unsafe { back.deallocating_next_back_unchecked() }
unsafe { back.deallocating_next_back_unchecked(alloc) }
}
#[inline]
pub fn deallocating_end(&mut self) {
pub fn deallocating_end<A: Allocator>(&mut self, alloc: &A) {
if let Some(front) = self.take_front() {
front.deallocating_end()
front.deallocating_end(alloc)
}
}
}
@ -441,18 +444,21 @@ impl<K, V> Handle<NodeRef<marker::Dying, K, V, marker::Leaf>, marker::Edge> {
/// `deallocating_next_back`.
/// - The returned KV handle is only valid to access the key and value,
/// and only valid until the next call to a `deallocating_` method.
unsafe fn deallocating_next(
unsafe fn deallocating_next<A: Allocator>(
self,
alloc: &A,
) -> Option<(Self, Handle<NodeRef<marker::Dying, K, V, marker::LeafOrInternal>, marker::KV>)>
{
let mut edge = self.forget_node_type();
loop {
edge = match edge.right_kv() {
Ok(kv) => return Some((unsafe { ptr::read(&kv) }.next_leaf_edge(), kv)),
Err(last_edge) => match unsafe { last_edge.into_node().deallocate_and_ascend() } {
Some(parent_edge) => parent_edge.forget_node_type(),
None => return None,
},
Err(last_edge) => {
match unsafe { last_edge.into_node().deallocate_and_ascend(alloc) } {
Some(parent_edge) => parent_edge.forget_node_type(),
None => return None,
}
}
}
}
}
@ -470,18 +476,21 @@ impl<K, V> Handle<NodeRef<marker::Dying, K, V, marker::Leaf>, marker::Edge> {
/// `deallocating_next`.
/// - The returned KV handle is only valid to access the key and value,
/// and only valid until the next call to a `deallocating_` method.
unsafe fn deallocating_next_back(
unsafe fn deallocating_next_back<A: Allocator>(
self,
alloc: &A,
) -> Option<(Self, Handle<NodeRef<marker::Dying, K, V, marker::LeafOrInternal>, marker::KV>)>
{
let mut edge = self.forget_node_type();
loop {
edge = match edge.left_kv() {
Ok(kv) => return Some((unsafe { ptr::read(&kv) }.next_back_leaf_edge(), kv)),
Err(last_edge) => match unsafe { last_edge.into_node().deallocate_and_ascend() } {
Some(parent_edge) => parent_edge.forget_node_type(),
None => return None,
},
Err(last_edge) => {
match unsafe { last_edge.into_node().deallocate_and_ascend(alloc) } {
Some(parent_edge) => parent_edge.forget_node_type(),
None => return None,
}
}
}
}
}
@ -492,9 +501,9 @@ impl<K, V> Handle<NodeRef<marker::Dying, K, V, marker::Leaf>, marker::Edge> {
/// both sides of the tree, and have hit the same edge. As it is intended
/// only to be called when all keys and values have been returned,
/// no cleanup is done on any of the keys or values.
fn deallocating_end(self) {
fn deallocating_end<A: Allocator>(self, alloc: &A) {
let mut edge = self.forget_node_type();
while let Some(parent_edge) = unsafe { edge.into_node().deallocate_and_ascend() } {
while let Some(parent_edge) = unsafe { edge.into_node().deallocate_and_ascend(alloc) } {
edge = parent_edge.forget_node_type();
}
}
@ -569,10 +578,13 @@ impl<K, V> Handle<NodeRef<marker::Dying, K, V, marker::Leaf>, marker::Edge> {
///
/// The only safe way to proceed with the updated handle is to compare it, drop it,
/// or call this method or counterpart `deallocating_next_back_unchecked` again.
unsafe fn deallocating_next_unchecked(
unsafe fn deallocating_next_unchecked<A: Allocator>(
&mut self,
alloc: &A,
) -> Handle<NodeRef<marker::Dying, K, V, marker::LeafOrInternal>, marker::KV> {
super::mem::replace(self, |leaf_edge| unsafe { leaf_edge.deallocating_next().unwrap() })
super::mem::replace(self, |leaf_edge| unsafe {
leaf_edge.deallocating_next(alloc).unwrap()
})
}
/// Moves the leaf edge handle to the previous leaf edge and returns the key and value
@ -587,11 +599,12 @@ impl<K, V> Handle<NodeRef<marker::Dying, K, V, marker::Leaf>, marker::Edge> {
///
/// The only safe way to proceed with the updated handle is to compare it, drop it,
/// or call this method or counterpart `deallocating_next_unchecked` again.
unsafe fn deallocating_next_back_unchecked(
unsafe fn deallocating_next_back_unchecked<A: Allocator>(
&mut self,
alloc: &A,
) -> Handle<NodeRef<marker::Dying, K, V, marker::LeafOrInternal>, marker::KV> {
super::mem::replace(self, |leaf_edge| unsafe {
leaf_edge.deallocating_next_back().unwrap()
leaf_edge.deallocating_next_back(alloc).unwrap()
})
}
}

View file

@ -36,7 +36,7 @@ use core::mem::{self, MaybeUninit};
use core::ptr::{self, NonNull};
use core::slice::SliceIndex;
use crate::alloc::{Allocator, Global, Layout};
use crate::alloc::{Allocator, Layout};
use crate::boxed::Box;
const B: usize = 6;
@ -78,9 +78,9 @@ impl<K, V> LeafNode<K, V> {
}
/// Creates a new boxed `LeafNode`.
fn new() -> Box<Self> {
fn new<A: Allocator>(alloc: &A) -> Box<Self, &A> {
unsafe {
let mut leaf = Box::new_uninit();
let mut leaf = Box::new_uninit_in(alloc);
LeafNode::init(leaf.as_mut_ptr());
leaf.assume_init()
}
@ -110,9 +110,9 @@ impl<K, V> InternalNode<K, V> {
/// An invariant of internal nodes is that they have at least one
/// initialized and valid edge. This function does not set up
/// such an edge.
unsafe fn new() -> Box<Self> {
unsafe fn new<A: Allocator>(alloc: &A) -> Box<Self, &A> {
unsafe {
let mut node = Box::<Self>::new_uninit();
let mut node = Box::<Self, _>::new_uninit_in(alloc);
// We only need to initialize the data; the edges are MaybeUninit.
LeafNode::init(ptr::addr_of_mut!((*node.as_mut_ptr()).data));
node.assume_init()
@ -213,25 +213,28 @@ unsafe impl<K: Send, V: Send, Type> Send for NodeRef<marker::Owned, K, V, Type>
unsafe impl<K: Send, V: Send, Type> Send for NodeRef<marker::Dying, K, V, Type> {}
impl<K, V> NodeRef<marker::Owned, K, V, marker::Leaf> {
pub fn new_leaf() -> Self {
Self::from_new_leaf(LeafNode::new())
pub fn new_leaf<A: Allocator>(alloc: &A) -> Self {
Self::from_new_leaf(LeafNode::new(alloc))
}
fn from_new_leaf(leaf: Box<LeafNode<K, V>>) -> Self {
fn from_new_leaf<A: Allocator>(leaf: Box<LeafNode<K, V>, A>) -> Self {
NodeRef { height: 0, node: NonNull::from(Box::leak(leaf)), _marker: PhantomData }
}
}
impl<K, V> NodeRef<marker::Owned, K, V, marker::Internal> {
fn new_internal(child: Root<K, V>) -> Self {
let mut new_node = unsafe { InternalNode::new() };
fn new_internal<A: Allocator>(child: Root<K, V>, alloc: &A) -> Self {
let mut new_node = unsafe { InternalNode::new(alloc) };
new_node.edges[0].write(child.node);
unsafe { NodeRef::from_new_internal(new_node, child.height + 1) }
}
/// # Safety
/// `height` must not be zero.
unsafe fn from_new_internal(internal: Box<InternalNode<K, V>>, height: usize) -> Self {
unsafe fn from_new_internal<A: Allocator>(
internal: Box<InternalNode<K, V>, A>,
height: usize,
) -> Self {
debug_assert!(height > 0);
let node = NonNull::from(Box::leak(internal)).cast();
let mut this = NodeRef { height, node, _marker: PhantomData };
@ -387,14 +390,15 @@ impl<K, V> NodeRef<marker::Dying, K, V, marker::LeafOrInternal> {
/// Similar to `ascend`, gets a reference to a node's parent node, but also
/// deallocates the current node in the process. This is unsafe because the
/// current node will still be accessible despite being deallocated.
pub unsafe fn deallocate_and_ascend(
pub unsafe fn deallocate_and_ascend<A: Allocator>(
self,
alloc: &A,
) -> Option<Handle<NodeRef<marker::Dying, K, V, marker::Internal>, marker::Edge>> {
let height = self.height;
let node = self.node;
let ret = self.ascend().ok();
unsafe {
Global.deallocate(
alloc.deallocate(
node.cast(),
if height > 0 {
Layout::new::<InternalNode<K, V>>()
@ -555,15 +559,18 @@ impl<K, V> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
impl<K, V> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
/// Returns a new owned tree, with its own root node that is initially empty.
pub fn new() -> Self {
NodeRef::new_leaf().forget_type()
pub fn new<A: Allocator>(alloc: &A) -> Self {
NodeRef::new_leaf(alloc).forget_type()
}
/// Adds a new internal node with a single edge pointing to the previous root node,
/// make that new node the root node, and return it. This increases the height by 1
/// and is the opposite of `pop_internal_level`.
pub fn push_internal_level(&mut self) -> NodeRef<marker::Mut<'_>, K, V, marker::Internal> {
super::mem::take_mut(self, |old_root| NodeRef::new_internal(old_root).forget_type());
pub fn push_internal_level<A: Allocator>(
&mut self,
alloc: &A,
) -> NodeRef<marker::Mut<'_>, K, V, marker::Internal> {
super::mem::take_mut(self, |old_root| NodeRef::new_internal(old_root, alloc).forget_type());
// `self.borrow_mut()`, except that we just forgot we're internal now:
NodeRef { height: self.height, node: self.node, _marker: PhantomData }
@ -578,7 +585,7 @@ impl<K, V> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
/// it will not invalidate other handles or references to the root node.
///
/// Panics if there is no internal level, i.e., if the root node is a leaf.
pub fn pop_internal_level(&mut self) {
pub fn pop_internal_level<A: Allocator>(&mut self, alloc: &A) {
assert!(self.height > 0);
let top = self.node;
@ -593,7 +600,7 @@ impl<K, V> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
self.clear_parent_link();
unsafe {
Global.deallocate(top.cast(), Layout::new::<InternalNode<K, V>>());
alloc.deallocate(top.cast(), Layout::new::<InternalNode<K, V>>());
}
}
}
@ -862,14 +869,19 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, mark
/// this edge. This method splits the node if there isn't enough room.
///
/// The returned pointer points to the inserted value.
fn insert(mut self, key: K, val: V) -> (Option<SplitResult<'a, K, V, marker::Leaf>>, *mut V) {
fn insert<A: Allocator>(
mut self,
key: K,
val: V,
alloc: &A,
) -> (Option<SplitResult<'a, K, V, marker::Leaf>>, *mut V) {
if self.node.len() < CAPACITY {
let val_ptr = self.insert_fit(key, val);
(None, val_ptr)
} else {
let (middle_kv_idx, insertion) = splitpoint(self.idx);
let middle = unsafe { Handle::new_kv(self.node, middle_kv_idx) };
let mut result = middle.split();
let mut result = middle.split(alloc);
let mut insertion_edge = match insertion {
LeftOrRight::Left(insert_idx) => unsafe {
Handle::new_edge(result.left.reborrow_mut(), insert_idx)
@ -918,11 +930,12 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>,
/// Inserts a new key-value pair and an edge that will go to the right of that new pair
/// between this edge and the key-value pair to the right of this edge. This method splits
/// the node if there isn't enough room.
fn insert(
fn insert<A: Allocator>(
mut self,
key: K,
val: V,
edge: Root<K, V>,
alloc: &A,
) -> Option<SplitResult<'a, K, V, marker::Internal>> {
assert!(edge.height == self.node.height - 1);
@ -932,7 +945,7 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>,
} else {
let (middle_kv_idx, insertion) = splitpoint(self.idx);
let middle = unsafe { Handle::new_kv(self.node, middle_kv_idx) };
let mut result = middle.split();
let mut result = middle.split(alloc);
let mut insertion_edge = match insertion {
LeftOrRight::Left(insert_idx) => unsafe {
Handle::new_edge(result.left.reborrow_mut(), insert_idx)
@ -955,19 +968,20 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, mark
/// If the returned result is some `SplitResult`, the `left` field will be the root node.
/// The returned pointer points to the inserted value, which in the case of `SplitResult`
/// is in the `left` or `right` tree.
pub fn insert_recursing(
pub fn insert_recursing<A: Allocator>(
self,
key: K,
value: V,
alloc: &A,
) -> (Option<SplitResult<'a, K, V, marker::LeafOrInternal>>, *mut V) {
let (mut split, val_ptr) = match self.insert(key, value) {
let (mut split, val_ptr) = match self.insert(key, value, alloc) {
(None, val_ptr) => return (None, val_ptr),
(Some(split), val_ptr) => (split.forget_node_type(), val_ptr),
};
loop {
split = match split.left.ascend() {
Ok(parent) => match parent.insert(split.kv.0, split.kv.1, split.right) {
Ok(parent) => match parent.insert(split.kv.0, split.kv.1, split.right, alloc) {
None => return (None, val_ptr),
Some(split) => split.forget_node_type(),
},
@ -1112,8 +1126,8 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, mark
/// - The key and value pointed to by this handle are extracted.
/// - All the key-value pairs to the right of this handle are put into a newly
/// allocated node.
pub fn split(mut self) -> SplitResult<'a, K, V, marker::Leaf> {
let mut new_node = LeafNode::new();
pub fn split<A: Allocator>(mut self, alloc: &A) -> SplitResult<'a, K, V, marker::Leaf> {
let mut new_node = LeafNode::new(alloc);
let kv = self.split_leaf_data(&mut new_node);
@ -1144,10 +1158,10 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>,
/// - The key and value pointed to by this handle are extracted.
/// - All the edges and key-value pairs to the right of this handle are put into
/// a newly allocated node.
pub fn split(mut self) -> SplitResult<'a, K, V, marker::Internal> {
pub fn split<A: Allocator>(mut self, alloc: &A) -> SplitResult<'a, K, V, marker::Internal> {
let old_len = self.node.len();
unsafe {
let mut new_node = InternalNode::new();
let mut new_node = InternalNode::new(alloc);
let kv = self.split_leaf_data(&mut new_node.data);
let new_len = usize::from(new_node.data.len);
move_to_slice(
@ -1252,9 +1266,11 @@ impl<'a, K: 'a, V: 'a> BalancingContext<'a, K, V> {
NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>,
) -> R,
R,
A: Allocator,
>(
self,
result: F,
alloc: &A,
) -> R {
let Handle { node: mut parent_node, idx: parent_idx, _marker } = self.parent;
let old_parent_len = parent_node.len();
@ -1299,9 +1315,9 @@ impl<'a, K: 'a, V: 'a> BalancingContext<'a, K, V> {
left_node.correct_childrens_parent_links(old_left_len + 1..new_left_len + 1);
Global.deallocate(right_node.node.cast(), Layout::new::<InternalNode<K, V>>());
alloc.deallocate(right_node.node.cast(), Layout::new::<InternalNode<K, V>>());
} else {
Global.deallocate(right_node.node.cast(), Layout::new::<LeafNode<K, V>>());
alloc.deallocate(right_node.node.cast(), Layout::new::<LeafNode<K, V>>());
}
}
result(parent_node, left_node)
@ -1311,16 +1327,22 @@ impl<'a, K: 'a, V: 'a> BalancingContext<'a, K, V> {
/// the left child node and returns the shrunk parent node.
///
/// Panics unless we `.can_merge()`.
pub fn merge_tracking_parent(self) -> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
self.do_merge(|parent, _child| parent)
pub fn merge_tracking_parent<A: Allocator>(
self,
alloc: &A,
) -> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
self.do_merge(|parent, _child| parent, alloc)
}
/// Merges the parent's key-value pair and both adjacent child nodes into
/// the left child node and returns that child node.
///
/// Panics unless we `.can_merge()`.
pub fn merge_tracking_child(self) -> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
self.do_merge(|_parent, child| child)
pub fn merge_tracking_child<A: Allocator>(
self,
alloc: &A,
) -> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
self.do_merge(|_parent, child| child, alloc)
}
/// Merges the parent's key-value pair and both adjacent child nodes into
@ -1328,9 +1350,10 @@ impl<'a, K: 'a, V: 'a> BalancingContext<'a, K, V> {
/// where the tracked child edge ended up,
///
/// Panics unless we `.can_merge()`.
pub fn merge_tracking_child_edge(
pub fn merge_tracking_child_edge<A: Allocator>(
self,
track_edge_idx: LeftOrRight<usize>,
alloc: &A,
) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::Edge> {
let old_left_len = self.left_child.len();
let right_len = self.right_child.len();
@ -1338,7 +1361,7 @@ impl<'a, K: 'a, V: 'a> BalancingContext<'a, K, V> {
LeftOrRight::Left(idx) => idx <= old_left_len,
LeftOrRight::Right(idx) => idx <= right_len,
});
let child = self.merge_tracking_child();
let child = self.merge_tracking_child(alloc);
let new_idx = match track_edge_idx {
LeftOrRight::Left(idx) => idx,
LeftOrRight::Right(idx) => old_left_len + 1 + idx,

View file

@ -1,5 +1,6 @@
use super::super::navigate;
use super::*;
use crate::alloc::Global;
use crate::fmt::Debug;
use crate::string::String;
@ -67,10 +68,10 @@ fn test_splitpoint() {
#[test]
fn test_partial_eq() {
let mut root1 = NodeRef::new_leaf();
let mut root1 = NodeRef::new_leaf(&Global);
root1.borrow_mut().push(1, ());
let mut root1 = NodeRef::new_internal(root1.forget_type()).forget_type();
let root2 = Root::new();
let mut root1 = NodeRef::new_internal(root1.forget_type(), &Global).forget_type();
let root2 = Root::new(&Global);
root1.reborrow().assert_back_pointers();
root2.reborrow().assert_back_pointers();
@ -86,9 +87,9 @@ fn test_partial_eq() {
assert!(top_edge_1 == top_edge_1);
assert!(top_edge_1 != top_edge_2);
root1.pop_internal_level();
unsafe { root1.into_dying().deallocate_and_ascend() };
unsafe { root2.into_dying().deallocate_and_ascend() };
root1.pop_internal_level(&Global);
unsafe { root1.into_dying().deallocate_and_ascend(&Global) };
unsafe { root2.into_dying().deallocate_and_ascend(&Global) };
}
#[test]

View file

@ -1,26 +1,29 @@
use super::map::MIN_LEN;
use super::node::{marker, ForceResult::*, Handle, LeftOrRight::*, NodeRef};
use core::alloc::Allocator;
impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::KV> {
/// Removes a key-value pair from the tree, and returns that pair, as well as
/// the leaf edge corresponding to that former pair. It's possible this empties
/// a root node that is internal, which the caller should pop from the map
/// holding the tree. The caller should also decrement the map's length.
pub fn remove_kv_tracking<F: FnOnce()>(
pub fn remove_kv_tracking<F: FnOnce(), A: Allocator>(
self,
handle_emptied_internal_root: F,
alloc: &A,
) -> ((K, V), Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>) {
match self.force() {
Leaf(node) => node.remove_leaf_kv(handle_emptied_internal_root),
Internal(node) => node.remove_internal_kv(handle_emptied_internal_root),
Leaf(node) => node.remove_leaf_kv(handle_emptied_internal_root, alloc),
Internal(node) => node.remove_internal_kv(handle_emptied_internal_root, alloc),
}
}
}
impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::KV> {
fn remove_leaf_kv<F: FnOnce()>(
fn remove_leaf_kv<F: FnOnce(), A: Allocator>(
self,
handle_emptied_internal_root: F,
alloc: &A,
) -> ((K, V), Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>) {
let (old_kv, mut pos) = self.remove();
let len = pos.reborrow().into_node().len();
@ -32,7 +35,7 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, mark
Ok(Left(left_parent_kv)) => {
debug_assert!(left_parent_kv.right_child_len() == MIN_LEN - 1);
if left_parent_kv.can_merge() {
left_parent_kv.merge_tracking_child_edge(Right(idx))
left_parent_kv.merge_tracking_child_edge(Right(idx), alloc)
} else {
debug_assert!(left_parent_kv.left_child_len() > MIN_LEN);
left_parent_kv.steal_left(idx)
@ -41,7 +44,7 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, mark
Ok(Right(right_parent_kv)) => {
debug_assert!(right_parent_kv.left_child_len() == MIN_LEN - 1);
if right_parent_kv.can_merge() {
right_parent_kv.merge_tracking_child_edge(Left(idx))
right_parent_kv.merge_tracking_child_edge(Left(idx), alloc)
} else {
debug_assert!(right_parent_kv.right_child_len() > MIN_LEN);
right_parent_kv.steal_right(idx)
@ -60,7 +63,7 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, mark
// rearrange the parent through the grandparent, thus change the
// link to the parent inside the leaf.
if let Ok(parent) = unsafe { pos.reborrow_mut() }.into_node().ascend() {
if !parent.into_node().forget_type().fix_node_and_affected_ancestors() {
if !parent.into_node().forget_type().fix_node_and_affected_ancestors(alloc) {
handle_emptied_internal_root();
}
}
@ -70,16 +73,17 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, mark
}
impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::KV> {
fn remove_internal_kv<F: FnOnce()>(
fn remove_internal_kv<F: FnOnce(), A: Allocator>(
self,
handle_emptied_internal_root: F,
alloc: &A,
) -> ((K, V), Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>) {
// Remove an adjacent KV from its leaf and then put it back in place of
// the element we were asked to remove. Prefer the left adjacent KV,
// for the reasons listed in `choose_parent_kv`.
let left_leaf_kv = self.left_edge().descend().last_leaf_edge().left_kv();
let left_leaf_kv = unsafe { left_leaf_kv.ok().unwrap_unchecked() };
let (left_kv, left_hole) = left_leaf_kv.remove_leaf_kv(handle_emptied_internal_root);
let (left_kv, left_hole) = left_leaf_kv.remove_leaf_kv(handle_emptied_internal_root, alloc);
// The internal node may have been stolen from or merged. Go back right
// to find where the original KV ended up.

View file

@ -3,16 +3,20 @@
use crate::vec::Vec;
use core::borrow::Borrow;
use core::cmp::Ordering::{Equal, Greater, Less};
use core::cmp::Ordering::{self, Equal, Greater, Less};
use core::cmp::{max, min};
use core::fmt::{self, Debug};
use core::hash::{Hash, Hasher};
use core::iter::{FromIterator, FusedIterator, Peekable};
use core::mem::ManuallyDrop;
use core::ops::{BitAnd, BitOr, BitXor, RangeBounds, Sub};
use super::map::{BTreeMap, Keys};
use super::merge_iter::MergeIterInner;
use super::Recover;
use crate::alloc::{Allocator, Global};
// FIXME(conventions): implement bounded iterators
/// An ordered set based on a B-Tree.
@ -71,15 +75,48 @@ use super::Recover;
///
/// let set = BTreeSet::from([1, 2, 3]);
/// ```
#[derive(Hash, PartialEq, Eq, Ord, PartialOrd)]
#[stable(feature = "rust1", since = "1.0.0")]
#[cfg_attr(not(test), rustc_diagnostic_item = "BTreeSet")]
pub struct BTreeSet<T> {
map: BTreeMap<T, ()>,
pub struct BTreeSet<
T,
#[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
> {
map: BTreeMap<T, (), A>,
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Clone> Clone for BTreeSet<T> {
impl<T: Hash, A: Allocator> Hash for BTreeSet<T, A> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.map.hash(state)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: PartialEq, A: Allocator> PartialEq for BTreeSet<T, A> {
fn eq(&self, other: &BTreeSet<T, A>) -> bool {
self.map.eq(&other.map)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Eq, A: Allocator> Eq for BTreeSet<T, A> {}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: PartialOrd, A: Allocator> PartialOrd for BTreeSet<T, A> {
fn partial_cmp(&self, other: &BTreeSet<T, A>) -> Option<Ordering> {
self.map.partial_cmp(&other.map)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Ord, A: Allocator> Ord for BTreeSet<T, A> {
fn cmp(&self, other: &BTreeSet<T, A>) -> Ordering {
self.map.cmp(&other.map)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Clone, A: Allocator + Clone> Clone for BTreeSet<T, A> {
fn clone(&self) -> Self {
BTreeSet { map: self.map.clone() }
}
@ -117,8 +154,11 @@ impl<T: fmt::Debug> fmt::Debug for Iter<'_, T> {
/// [`IntoIterator`]: core::iter::IntoIterator
#[stable(feature = "rust1", since = "1.0.0")]
#[derive(Debug)]
pub struct IntoIter<T> {
iter: super::map::IntoIter<T, ()>,
pub struct IntoIter<
T,
#[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
> {
iter: super::map::IntoIter<T, (), A>,
}
/// An iterator over a sub-range of items in a `BTreeSet`.
@ -143,11 +183,14 @@ pub struct Range<'a, T: 'a> {
#[must_use = "this returns the difference as an iterator, \
without modifying either input set"]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Difference<'a, T: 'a> {
inner: DifferenceInner<'a, T>,
pub struct Difference<
'a,
T: 'a,
#[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
> {
inner: DifferenceInner<'a, T, A>,
}
#[derive(Debug)]
enum DifferenceInner<'a, T: 'a> {
enum DifferenceInner<'a, T: 'a, A: Allocator> {
Stitch {
// iterate all of `self` and some of `other`, spotting matches along the way
self_iter: Iter<'a, T>,
@ -156,13 +199,32 @@ enum DifferenceInner<'a, T: 'a> {
Search {
// iterate `self`, look up in `other`
self_iter: Iter<'a, T>,
other_set: &'a BTreeSet<T>,
other_set: &'a BTreeSet<T, A>,
},
Iterate(Iter<'a, T>), // simply produce all elements in `self`
}
// Explicit Debug impl necessary because of issue #26925
impl<T: Debug, A: Allocator> Debug for DifferenceInner<'_, T, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
DifferenceInner::Stitch { self_iter, other_iter } => f
.debug_struct("Stitch")
.field("self_iter", self_iter)
.field("other_iter", other_iter)
.finish(),
DifferenceInner::Search { self_iter, other_set } => f
.debug_struct("Search")
.field("self_iter", self_iter)
.field("other_iter", other_set)
.finish(),
DifferenceInner::Iterate(x) => f.debug_tuple("Iterate").field(x).finish(),
}
}
}
#[stable(feature = "collection_debug", since = "1.17.0")]
impl<T: fmt::Debug> fmt::Debug for Difference<'_, T> {
impl<T: fmt::Debug, A: Allocator> fmt::Debug for Difference<'_, T, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("Difference").field(&self.inner).finish()
}
@ -195,11 +257,14 @@ impl<T: fmt::Debug> fmt::Debug for SymmetricDifference<'_, T> {
#[must_use = "this returns the intersection as an iterator, \
without modifying either input set"]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Intersection<'a, T: 'a> {
inner: IntersectionInner<'a, T>,
pub struct Intersection<
'a,
T: 'a,
#[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
> {
inner: IntersectionInner<'a, T, A>,
}
#[derive(Debug)]
enum IntersectionInner<'a, T: 'a> {
enum IntersectionInner<'a, T: 'a, A: Allocator> {
Stitch {
// iterate similarly sized sets jointly, spotting matches along the way
a: Iter<'a, T>,
@ -208,13 +273,30 @@ enum IntersectionInner<'a, T: 'a> {
Search {
// iterate a small set, look up in the large set
small_iter: Iter<'a, T>,
large_set: &'a BTreeSet<T>,
large_set: &'a BTreeSet<T, A>,
},
Answer(Option<&'a T>), // return a specific element or emptiness
}
// Explicit Debug impl necessary because of issue #26925
impl<T: Debug, A: Allocator> Debug for IntersectionInner<'_, T, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
IntersectionInner::Stitch { a, b } => {
f.debug_struct("Stitch").field("a", a).field("b", b).finish()
}
IntersectionInner::Search { small_iter, large_set } => f
.debug_struct("Search")
.field("small_iter", small_iter)
.field("large_set", large_set)
.finish(),
IntersectionInner::Answer(x) => f.debug_tuple("Answer").field(x).finish(),
}
}
}
#[stable(feature = "collection_debug", since = "1.17.0")]
impl<T: fmt::Debug> fmt::Debug for Intersection<'_, T> {
impl<T: Debug, A: Allocator> Debug for Intersection<'_, T, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("Intersection").field(&self.inner).finish()
}
@ -265,6 +347,26 @@ impl<T> BTreeSet<T> {
pub const fn new() -> BTreeSet<T> {
BTreeSet { map: BTreeMap::new() }
}
}
impl<T, A: Allocator> BTreeSet<T, A> {
/// Makes a new `BTreeSet` with a reasonable choice of B.
///
/// # Examples
///
/// ```
/// # #![allow(unused_mut)]
/// # #![feature(allocator_api)]
/// # #![feature(btreemap_alloc)]
/// use std::collections::BTreeSet;
/// use std::alloc::Global;
///
/// let mut set: BTreeSet<i32> = BTreeSet::new_in(Global);
/// ```
#[unstable(feature = "btreemap_alloc", issue = "32838")]
pub fn new_in(alloc: A) -> BTreeSet<T, A> {
BTreeSet { map: BTreeMap::new_in(alloc) }
}
/// Constructs a double-ended iterator over a sub-range of elements in the set.
/// The simplest way is to use the range syntax `min..max`, thus `range(min..max)` will
@ -319,7 +421,7 @@ impl<T> BTreeSet<T> {
/// assert_eq!(diff, [1]);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn difference<'a>(&'a self, other: &'a BTreeSet<T>) -> Difference<'a, T>
pub fn difference<'a>(&'a self, other: &'a BTreeSet<T, A>) -> Difference<'a, T, A>
where
T: Ord,
{
@ -380,7 +482,10 @@ impl<T> BTreeSet<T> {
/// assert_eq!(sym_diff, [1, 3]);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn symmetric_difference<'a>(&'a self, other: &'a BTreeSet<T>) -> SymmetricDifference<'a, T>
pub fn symmetric_difference<'a>(
&'a self,
other: &'a BTreeSet<T, A>,
) -> SymmetricDifference<'a, T>
where
T: Ord,
{
@ -408,7 +513,7 @@ impl<T> BTreeSet<T> {
/// assert_eq!(intersection, [2]);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn intersection<'a>(&'a self, other: &'a BTreeSet<T>) -> Intersection<'a, T>
pub fn intersection<'a>(&'a self, other: &'a BTreeSet<T, A>) -> Intersection<'a, T, A>
where
T: Ord,
{
@ -459,7 +564,7 @@ impl<T> BTreeSet<T> {
/// assert_eq!(union, [1, 2]);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn union<'a>(&'a self, other: &'a BTreeSet<T>) -> Union<'a, T>
pub fn union<'a>(&'a self, other: &'a BTreeSet<T, A>) -> Union<'a, T>
where
T: Ord,
{
@ -479,7 +584,10 @@ impl<T> BTreeSet<T> {
/// assert!(v.is_empty());
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn clear(&mut self) {
pub fn clear(&mut self)
where
A: Clone,
{
self.map.clear()
}
@ -551,7 +659,7 @@ impl<T> BTreeSet<T> {
/// ```
#[must_use]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn is_disjoint(&self, other: &BTreeSet<T>) -> bool
pub fn is_disjoint(&self, other: &BTreeSet<T, A>) -> bool
where
T: Ord,
{
@ -577,7 +685,7 @@ impl<T> BTreeSet<T> {
/// ```
#[must_use]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn is_subset(&self, other: &BTreeSet<T>) -> bool
pub fn is_subset(&self, other: &BTreeSet<T, A>) -> bool
where
T: Ord,
{
@ -657,7 +765,7 @@ impl<T> BTreeSet<T> {
/// ```
#[must_use]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn is_superset(&self, other: &BTreeSet<T>) -> bool
pub fn is_superset(&self, other: &BTreeSet<T, A>) -> bool
where
T: Ord,
{
@ -931,6 +1039,7 @@ impl<T> BTreeSet<T> {
pub fn append(&mut self, other: &mut Self)
where
T: Ord,
A: Clone,
{
self.map.append(&mut other.map);
}
@ -968,6 +1077,7 @@ impl<T> BTreeSet<T> {
pub fn split_off<Q: ?Sized + Ord>(&mut self, value: &Q) -> Self
where
T: Borrow<Q> + Ord,
A: Clone,
{
BTreeSet { map: self.map.split_off(value) }
}
@ -1002,12 +1112,13 @@ impl<T> BTreeSet<T> {
/// assert_eq!(odds.into_iter().collect::<Vec<_>>(), vec![1, 3, 5, 7]);
/// ```
#[unstable(feature = "btree_drain_filter", issue = "70530")]
pub fn drain_filter<'a, F>(&'a mut self, pred: F) -> DrainFilter<'a, T, F>
pub fn drain_filter<'a, F>(&'a mut self, pred: F) -> DrainFilter<'a, T, F, A>
where
T: Ord,
F: 'a + FnMut(&T) -> bool,
{
DrainFilter { pred, inner: self.map.drain_filter_inner() }
let (inner, alloc) = self.map.drain_filter_inner();
DrainFilter { pred, inner, alloc }
}
/// Gets an iterator that visits the elements in the `BTreeSet` in ascending
@ -1093,14 +1204,14 @@ impl<T: Ord> FromIterator<T> for BTreeSet<T> {
// use stable sort to preserve the insertion order.
inputs.sort();
BTreeSet::from_sorted_iter(inputs.into_iter())
BTreeSet::from_sorted_iter(inputs.into_iter(), Global)
}
}
impl<T: Ord> BTreeSet<T> {
fn from_sorted_iter<I: Iterator<Item = T>>(iter: I) -> BTreeSet<T> {
impl<T: Ord, A: Allocator> BTreeSet<T, A> {
fn from_sorted_iter<I: Iterator<Item = T>>(iter: I, alloc: A) -> BTreeSet<T, A> {
let iter = iter.map(|k| (k, ()));
let map = BTreeMap::bulk_build_from_sorted_iter(iter);
let map = BTreeMap::bulk_build_from_sorted_iter(iter, alloc);
BTreeSet { map }
}
}
@ -1124,15 +1235,15 @@ impl<T: Ord, const N: usize> From<[T; N]> for BTreeSet<T> {
// use stable sort to preserve the insertion order.
arr.sort();
let iter = IntoIterator::into_iter(arr).map(|k| (k, ()));
let map = BTreeMap::bulk_build_from_sorted_iter(iter);
let map = BTreeMap::bulk_build_from_sorted_iter(iter, Global);
BTreeSet { map }
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> IntoIterator for BTreeSet<T> {
impl<T, A: Allocator> IntoIterator for BTreeSet<T, A> {
type Item = T;
type IntoIter = IntoIter<T>;
type IntoIter = IntoIter<T, A>;
/// Gets an iterator for moving out the `BTreeSet`'s contents.
///
@ -1146,13 +1257,13 @@ impl<T> IntoIterator for BTreeSet<T> {
/// let v: Vec<_> = set.into_iter().collect();
/// assert_eq!(v, [1, 2, 3, 4]);
/// ```
fn into_iter(self) -> IntoIter<T> {
fn into_iter(self) -> IntoIter<T, A> {
IntoIter { iter: self.map.into_iter() }
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> IntoIterator for &'a BTreeSet<T> {
impl<'a, T, A: Allocator> IntoIterator for &'a BTreeSet<T, A> {
type Item = &'a T;
type IntoIter = Iter<'a, T>;
@ -1163,17 +1274,22 @@ impl<'a, T> IntoIterator for &'a BTreeSet<T> {
/// An iterator produced by calling `drain_filter` on BTreeSet.
#[unstable(feature = "btree_drain_filter", issue = "70530")]
pub struct DrainFilter<'a, T, F>
where
pub struct DrainFilter<
'a,
T,
F,
#[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
> where
T: 'a,
F: 'a + FnMut(&T) -> bool,
{
pred: F,
inner: super::map::DrainFilterInner<'a, T, ()>,
alloc: &'a A,
}
#[unstable(feature = "btree_drain_filter", issue = "70530")]
impl<T, F> Drop for DrainFilter<'_, T, F>
impl<T, F, A: Allocator> Drop for DrainFilter<'_, T, F, A>
where
F: FnMut(&T) -> bool,
{
@ -1183,7 +1299,7 @@ where
}
#[unstable(feature = "btree_drain_filter", issue = "70530")]
impl<T, F> fmt::Debug for DrainFilter<'_, T, F>
impl<T, F, A: Allocator> fmt::Debug for DrainFilter<'_, T, F, A>
where
T: fmt::Debug,
F: FnMut(&T) -> bool,
@ -1194,7 +1310,7 @@ where
}
#[unstable(feature = "btree_drain_filter", issue = "70530")]
impl<'a, T, F> Iterator for DrainFilter<'_, T, F>
impl<'a, T, F, A: Allocator> Iterator for DrainFilter<'_, T, F, A>
where
F: 'a + FnMut(&T) -> bool,
{
@ -1203,7 +1319,7 @@ where
fn next(&mut self) -> Option<T> {
let pred = &mut self.pred;
let mut mapped_pred = |k: &T, _v: &mut ()| pred(k);
self.inner.next(&mut mapped_pred).map(|(k, _)| k)
self.inner.next(&mut mapped_pred, &self.alloc).map(|(k, _)| k)
}
fn size_hint(&self) -> (usize, Option<usize>) {
@ -1212,10 +1328,10 @@ where
}
#[unstable(feature = "btree_drain_filter", issue = "70530")]
impl<T, F> FusedIterator for DrainFilter<'_, T, F> where F: FnMut(&T) -> bool {}
impl<T, F, A: Allocator> FusedIterator for DrainFilter<'_, T, F, A> where F: FnMut(&T) -> bool {}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Ord> Extend<T> for BTreeSet<T> {
impl<T: Ord, A: Allocator> Extend<T> for BTreeSet<T, A> {
#[inline]
fn extend<Iter: IntoIterator<Item = T>>(&mut self, iter: Iter) {
iter.into_iter().for_each(move |elem| {
@ -1230,7 +1346,7 @@ impl<T: Ord> Extend<T> for BTreeSet<T> {
}
#[stable(feature = "extend_ref", since = "1.2.0")]
impl<'a, T: 'a + Ord + Copy> Extend<&'a T> for BTreeSet<T> {
impl<'a, T: 'a + Ord + Copy, A: Allocator> Extend<&'a T> for BTreeSet<T, A> {
fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
self.extend(iter.into_iter().cloned());
}
@ -1250,8 +1366,8 @@ impl<T> Default for BTreeSet<T> {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Ord + Clone> Sub<&BTreeSet<T>> for &BTreeSet<T> {
type Output = BTreeSet<T>;
impl<T: Ord + Clone, A: Allocator + Clone> Sub<&BTreeSet<T, A>> for &BTreeSet<T, A> {
type Output = BTreeSet<T, A>;
/// Returns the difference of `self` and `rhs` as a new `BTreeSet<T>`.
///
@ -1266,14 +1382,17 @@ impl<T: Ord + Clone> Sub<&BTreeSet<T>> for &BTreeSet<T> {
/// let result = &a - &b;
/// assert_eq!(result, BTreeSet::from([1, 2]));
/// ```
fn sub(self, rhs: &BTreeSet<T>) -> BTreeSet<T> {
BTreeSet::from_sorted_iter(self.difference(rhs).cloned())
fn sub(self, rhs: &BTreeSet<T, A>) -> BTreeSet<T, A> {
BTreeSet::from_sorted_iter(
self.difference(rhs).cloned(),
ManuallyDrop::into_inner(self.map.alloc.clone()),
)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Ord + Clone> BitXor<&BTreeSet<T>> for &BTreeSet<T> {
type Output = BTreeSet<T>;
impl<T: Ord + Clone, A: Allocator + Clone> BitXor<&BTreeSet<T, A>> for &BTreeSet<T, A> {
type Output = BTreeSet<T, A>;
/// Returns the symmetric difference of `self` and `rhs` as a new `BTreeSet<T>`.
///
@ -1288,14 +1407,17 @@ impl<T: Ord + Clone> BitXor<&BTreeSet<T>> for &BTreeSet<T> {
/// let result = &a ^ &b;
/// assert_eq!(result, BTreeSet::from([1, 4]));
/// ```
fn bitxor(self, rhs: &BTreeSet<T>) -> BTreeSet<T> {
BTreeSet::from_sorted_iter(self.symmetric_difference(rhs).cloned())
fn bitxor(self, rhs: &BTreeSet<T, A>) -> BTreeSet<T, A> {
BTreeSet::from_sorted_iter(
self.symmetric_difference(rhs).cloned(),
ManuallyDrop::into_inner(self.map.alloc.clone()),
)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Ord + Clone> BitAnd<&BTreeSet<T>> for &BTreeSet<T> {
type Output = BTreeSet<T>;
impl<T: Ord + Clone, A: Allocator + Clone> BitAnd<&BTreeSet<T, A>> for &BTreeSet<T, A> {
type Output = BTreeSet<T, A>;
/// Returns the intersection of `self` and `rhs` as a new `BTreeSet<T>`.
///
@ -1310,14 +1432,17 @@ impl<T: Ord + Clone> BitAnd<&BTreeSet<T>> for &BTreeSet<T> {
/// let result = &a & &b;
/// assert_eq!(result, BTreeSet::from([2, 3]));
/// ```
fn bitand(self, rhs: &BTreeSet<T>) -> BTreeSet<T> {
BTreeSet::from_sorted_iter(self.intersection(rhs).cloned())
fn bitand(self, rhs: &BTreeSet<T, A>) -> BTreeSet<T, A> {
BTreeSet::from_sorted_iter(
self.intersection(rhs).cloned(),
ManuallyDrop::into_inner(self.map.alloc.clone()),
)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Ord + Clone> BitOr<&BTreeSet<T>> for &BTreeSet<T> {
type Output = BTreeSet<T>;
impl<T: Ord + Clone, A: Allocator + Clone> BitOr<&BTreeSet<T, A>> for &BTreeSet<T, A> {
type Output = BTreeSet<T, A>;
/// Returns the union of `self` and `rhs` as a new `BTreeSet<T>`.
///
@ -1332,13 +1457,16 @@ impl<T: Ord + Clone> BitOr<&BTreeSet<T>> for &BTreeSet<T> {
/// let result = &a | &b;
/// assert_eq!(result, BTreeSet::from([1, 2, 3, 4, 5]));
/// ```
fn bitor(self, rhs: &BTreeSet<T>) -> BTreeSet<T> {
BTreeSet::from_sorted_iter(self.union(rhs).cloned())
fn bitor(self, rhs: &BTreeSet<T, A>) -> BTreeSet<T, A> {
BTreeSet::from_sorted_iter(
self.union(rhs).cloned(),
ManuallyDrop::into_inner(self.map.alloc.clone()),
)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Debug> Debug for BTreeSet<T> {
impl<T: Debug, A: Allocator> Debug for BTreeSet<T, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_set().entries(self.iter()).finish()
}
@ -1391,7 +1519,7 @@ impl<T> ExactSizeIterator for Iter<'_, T> {
impl<T> FusedIterator for Iter<'_, T> {}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> Iterator for IntoIter<T> {
impl<T, A: Allocator> Iterator for IntoIter<T, A> {
type Item = T;
fn next(&mut self) -> Option<T> {
@ -1403,20 +1531,20 @@ impl<T> Iterator for IntoIter<T> {
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> DoubleEndedIterator for IntoIter<T> {
impl<T, A: Allocator> DoubleEndedIterator for IntoIter<T, A> {
fn next_back(&mut self) -> Option<T> {
self.iter.next_back().map(|(k, _)| k)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> ExactSizeIterator for IntoIter<T> {
impl<T, A: Allocator> ExactSizeIterator for IntoIter<T, A> {
fn len(&self) -> usize {
self.iter.len()
}
}
#[stable(feature = "fused", since = "1.26.0")]
impl<T> FusedIterator for IntoIter<T> {}
impl<T, A: Allocator> FusedIterator for IntoIter<T, A> {}
#[stable(feature = "btree_range", since = "1.17.0")]
impl<T> Clone for Range<'_, T> {
@ -1457,7 +1585,7 @@ impl<'a, T> DoubleEndedIterator for Range<'a, T> {
impl<T> FusedIterator for Range<'_, T> {}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> Clone for Difference<'_, T> {
impl<T, A: Allocator + Clone> Clone for Difference<'_, T, A> {
fn clone(&self) -> Self {
Difference {
inner: match &self.inner {
@ -1474,7 +1602,7 @@ impl<T> Clone for Difference<'_, T> {
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T: Ord> Iterator for Difference<'a, T> {
impl<'a, T: Ord, A: Allocator> Iterator for Difference<'a, T, A> {
type Item = &'a T;
fn next(&mut self) -> Option<&'a T> {
@ -1521,7 +1649,7 @@ impl<'a, T: Ord> Iterator for Difference<'a, T> {
}
#[stable(feature = "fused", since = "1.26.0")]
impl<T: Ord> FusedIterator for Difference<'_, T> {}
impl<T: Ord, A: Allocator> FusedIterator for Difference<'_, T, A> {}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> Clone for SymmetricDifference<'_, T> {
@ -1559,7 +1687,7 @@ impl<'a, T: Ord> Iterator for SymmetricDifference<'a, T> {
impl<T: Ord> FusedIterator for SymmetricDifference<'_, T> {}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> Clone for Intersection<'_, T> {
impl<T, A: Allocator + Clone> Clone for Intersection<'_, T, A> {
fn clone(&self) -> Self {
Intersection {
inner: match &self.inner {
@ -1575,7 +1703,7 @@ impl<T> Clone for Intersection<'_, T> {
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T: Ord> Iterator for Intersection<'a, T> {
impl<'a, T: Ord, A: Allocator> Iterator for Intersection<'a, T, A> {
type Item = &'a T;
fn next(&mut self) -> Option<&'a T> {
@ -1616,7 +1744,7 @@ impl<'a, T: Ord> Iterator for Intersection<'a, T> {
}
#[stable(feature = "fused", since = "1.26.0")]
impl<T: Ord> FusedIterator for Intersection<'_, T> {}
impl<T: Ord, A: Allocator> FusedIterator for Intersection<'_, T, A> {}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> Clone for Union<'_, T> {

View file

@ -1,5 +1,6 @@
use super::node::{ForceResult::*, Root};
use super::search::SearchResult::*;
use core::alloc::Allocator;
use core::borrow::Borrow;
impl<K, V> Root<K, V> {
@ -28,12 +29,12 @@ impl<K, V> Root<K, V> {
/// and if the ordering of `Q` corresponds to that of `K`.
/// If `self` respects all `BTreeMap` tree invariants, then both
/// `self` and the returned tree will respect those invariants.
pub fn split_off<Q: ?Sized + Ord>(&mut self, key: &Q) -> Self
pub fn split_off<Q: ?Sized + Ord, A: Allocator>(&mut self, key: &Q, alloc: &A) -> Self
where
K: Borrow<Q>,
{
let left_root = self;
let mut right_root = Root::new_pillar(left_root.height());
let mut right_root = Root::new_pillar(left_root.height(), alloc);
let mut left_node = left_root.borrow_mut();
let mut right_node = right_root.borrow_mut();
@ -56,16 +57,16 @@ impl<K, V> Root<K, V> {
}
}
left_root.fix_right_border();
right_root.fix_left_border();
left_root.fix_right_border(alloc);
right_root.fix_left_border(alloc);
right_root
}
/// Creates a tree consisting of empty nodes.
fn new_pillar(height: usize) -> Self {
let mut root = Root::new();
fn new_pillar<A: Allocator>(height: usize, alloc: &A) -> Self {
let mut root = Root::new(alloc);
for _ in 0..height {
root.push_internal_level();
root.push_internal_level(alloc);
}
root
}