1
Fork 0

Auto merge of #79319 - m-ou-se:rollup-d9n5viq, r=m-ou-se

Rollup of 10 pull requests

Successful merges:

 - #76941 (Add f{32,64}::is_subnormal)
 - #77697 (Split each iterator adapter and source into individual modules)
 - #78305 (Stabilize alloc::Layout const functions)
 - #78608 (Stabilize refcell_take)
 - #78793 (Clean up `StructuralEq` docs)
 - #79267 (BTreeMap: address namespace conflicts)
 - #79293 (Add test for eval order for a+=b)
 - #79295 (BTreeMap: fix minor testing mistakes in #78903)
 - #79297 (BTreeMap: swap the names of NodeRef::new and Root::new_leaf)
 - #79299 (Stabilise `then`)

Failed merges:

r? `@ghost`
`@rustbot` modify labels: rollup
This commit is contained in:
bors 2020-11-22 23:59:48 +00:00
commit 32da90b431
53 changed files with 3778 additions and 3551 deletions

View file

@ -67,7 +67,7 @@ impl<K, V> Root<K, V> {
// Push key-value pair and new right subtree.
let tree_height = open_node.height() - 1;
let mut right_tree = Root::new_leaf();
let mut right_tree = Root::new();
for _ in 0..tree_height {
right_tree.push_internal_level();
}

View file

@ -9,7 +9,7 @@ use core::ops::{Index, RangeBounds};
use core::ptr;
use super::borrow::DormantMutRef;
use super::node::{self, marker, ForceResult::*, Handle, NodeRef};
use super::node::{self, marker, ForceResult::*, Handle, NodeRef, Root};
use super::search::{self, SearchResult::*};
use super::unwrap_unchecked;
@ -128,7 +128,7 @@ pub(super) const MIN_LEN: usize = node::MIN_LEN_AFTER_SPLIT;
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub struct BTreeMap<K, V> {
root: Option<node::Root<K, V>>,
root: Option<Root<K, V>>,
length: usize,
}
@ -145,7 +145,7 @@ unsafe impl<#[may_dangle] K, #[may_dangle] V> Drop for BTreeMap<K, V> {
impl<K: Clone, V: Clone> Clone for BTreeMap<K, V> {
fn clone(&self) -> BTreeMap<K, V> {
fn clone_subtree<'a, K: Clone, V: Clone>(
node: node::NodeRef<marker::Immut<'a>, K, V, marker::LeafOrInternal>,
node: NodeRef<marker::Immut<'a>, K, V, marker::LeafOrInternal>,
) -> BTreeMap<K, V>
where
K: 'a,
@ -153,7 +153,7 @@ impl<K: Clone, V: Clone> Clone for BTreeMap<K, V> {
{
match node.force() {
Leaf(leaf) => {
let mut out_tree = BTreeMap { root: Some(node::Root::new_leaf()), length: 0 };
let mut out_tree = BTreeMap { root: Some(Root::new()), length: 0 };
{
let root = out_tree.root.as_mut().unwrap(); // unwrap succeeds because we just wrapped
@ -198,7 +198,7 @@ impl<K: Clone, V: Clone> Clone for BTreeMap<K, V> {
(root, length)
};
out_node.push(k, v, subroot.unwrap_or_else(node::Root::new_leaf));
out_node.push(k, v, subroot.unwrap_or_else(Root::new));
out_tree.length += 1 + sublength;
}
}
@ -1558,7 +1558,7 @@ pub(super) struct DrainFilterInner<'a, K: 'a, V: 'a> {
length: &'a mut usize,
/// Burried reference to the root field in the borrowed map.
/// Wrapped in `Option` to allow drop handler to `take` it.
dormant_root: Option<DormantMutRef<'a, node::Root<K, V>>>,
dormant_root: Option<DormantMutRef<'a, Root<K, V>>>,
/// Contains a leaf edge preceding the next element to be returned, or the last leaf edge.
/// Empty if the map has no root, if iteration went beyond the last leaf edge,
/// or if a panic occurred in the predicate.
@ -2160,8 +2160,8 @@ impl<K, V> BTreeMap<K, V> {
/// If the root node is the empty (non-allocated) root node, allocate our
/// own node. Is an associated function to avoid borrowing the entire BTreeMap.
fn ensure_is_owned(root: &mut Option<node::Root<K, V>>) -> &mut node::Root<K, V> {
root.get_or_insert_with(node::Root::new_leaf)
fn ensure_is_owned(root: &mut Option<Root<K, V>>) -> &mut Root<K, V> {
root.get_or_insert_with(Root::new)
}
}

View file

@ -6,13 +6,14 @@ use crate::fmt::Debug;
use crate::rc::Rc;
use crate::string::{String, ToString};
use crate::vec::Vec;
use std::cmp::Ordering;
use std::convert::TryFrom;
use std::iter::{self, FromIterator};
use std::mem;
use std::ops::Bound::{self, Excluded, Included, Unbounded};
use std::ops::RangeBounds;
use std::panic::{catch_unwind, AssertUnwindSafe};
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::atomic::{AtomicUsize, Ordering::SeqCst};
mod ord_chaos;
use ord_chaos::{Cyclic3, Governed, Governor};
@ -56,24 +57,23 @@ impl<K, V> BTreeMap<K, V> {
assert!(root_node.ascend().is_err());
root_node.assert_back_pointers();
// Check consistenty of `length` and some of the navigation.
// Check consistency of `length` with what navigation code encounters.
assert_eq!(self.length, root_node.calc_length());
assert_eq!(self.length, self.keys().count());
// Lastly, check the invariant causing the least harm.
root_node.assert_min_len(if root_node.height() > 0 { 1 } else { 0 });
} else {
// Check consistenty of `length` and some of the navigation.
assert_eq!(self.length, 0);
assert_eq!(self.length, self.keys().count());
}
// Check that `assert_strictly_ascending` will encounter all keys.
assert_eq!(self.length, self.keys().count());
}
// Panics if the map is corrupted or if the keys are not in strictly
// ascending order, in the current opinion of the `Ord` implementation.
// If the `Ord` implementation does not honor transitivity, this method
// does not guarantee that all the keys are unique, just that adjacent
// keys are unique.
// If the `Ord` implementation violates transitivity, this method does not
// guarantee that all keys are unique, just that adjacent keys are unique.
fn check(&self)
where
K: Debug + Ord,
@ -879,6 +879,7 @@ mod test_drain_filter {
map.check();
}
// Explicitly consumes the iterator, where most test cases drop it instantly.
#[test]
fn consumed_keeping_all() {
let pairs = (0..3).map(|i| (i, i));
@ -887,6 +888,7 @@ mod test_drain_filter {
map.check();
}
// Explicitly consumes the iterator, where most test cases drop it instantly.
#[test]
fn consumed_removing_all() {
let pairs = (0..3).map(|i| (i, i));
@ -896,15 +898,7 @@ mod test_drain_filter {
map.check();
}
#[test]
fn dropped_removing_all() {
let pairs = (0..3).map(|i| (i, i));
let mut map: BTreeMap<_, _> = pairs.collect();
map.drain_filter(|_, _| true);
assert!(map.is_empty());
map.check();
}
// Explicitly consumes the iterator and modifies values through it.
#[test]
fn mutating_and_keeping() {
let pairs = (0..3).map(|i| (i, i));
@ -921,6 +915,7 @@ mod test_drain_filter {
map.check();
}
// Explicitly consumes the iterator and modifies values through it.
#[test]
fn mutating_and_removing() {
let pairs = (0..3).map(|i| (i, i));
@ -1094,7 +1089,7 @@ mod test_drain_filter {
struct D;
impl Drop for D {
fn drop(&mut self) {
if DROPS.fetch_add(1, Ordering::SeqCst) == 1 {
if DROPS.fetch_add(1, SeqCst) == 1 {
panic!("panic in `drop`");
}
}
@ -1105,14 +1100,14 @@ mod test_drain_filter {
catch_unwind(move || {
drop(map.drain_filter(|i, _| {
PREDS.fetch_add(1usize << i, Ordering::SeqCst);
PREDS.fetch_add(1usize << i, SeqCst);
true
}))
})
.unwrap_err();
assert_eq!(PREDS.load(Ordering::SeqCst), 0x011);
assert_eq!(DROPS.load(Ordering::SeqCst), 3);
assert_eq!(PREDS.load(SeqCst), 0x011);
assert_eq!(DROPS.load(SeqCst), 3);
}
#[test]
@ -1123,7 +1118,7 @@ mod test_drain_filter {
struct D;
impl Drop for D {
fn drop(&mut self) {
DROPS.fetch_add(1, Ordering::SeqCst);
DROPS.fetch_add(1, SeqCst);
}
}
@ -1132,7 +1127,7 @@ mod test_drain_filter {
catch_unwind(AssertUnwindSafe(|| {
drop(map.drain_filter(|i, _| {
PREDS.fetch_add(1usize << i, Ordering::SeqCst);
PREDS.fetch_add(1usize << i, SeqCst);
match i {
0 => true,
_ => panic!(),
@ -1141,8 +1136,8 @@ mod test_drain_filter {
}))
.unwrap_err();
assert_eq!(PREDS.load(Ordering::SeqCst), 0x011);
assert_eq!(DROPS.load(Ordering::SeqCst), 1);
assert_eq!(PREDS.load(SeqCst), 0x011);
assert_eq!(DROPS.load(SeqCst), 1);
assert_eq!(map.len(), 2);
assert_eq!(map.first_entry().unwrap().key(), &4);
assert_eq!(map.last_entry().unwrap().key(), &8);
@ -1158,7 +1153,7 @@ mod test_drain_filter {
struct D;
impl Drop for D {
fn drop(&mut self) {
DROPS.fetch_add(1, Ordering::SeqCst);
DROPS.fetch_add(1, SeqCst);
}
}
@ -1167,7 +1162,7 @@ mod test_drain_filter {
{
let mut it = map.drain_filter(|i, _| {
PREDS.fetch_add(1usize << i, Ordering::SeqCst);
PREDS.fetch_add(1usize << i, SeqCst);
match i {
0 => true,
_ => panic!(),
@ -1180,8 +1175,8 @@ mod test_drain_filter {
assert!(matches!(result, Ok(None)));
}
assert_eq!(PREDS.load(Ordering::SeqCst), 0x011);
assert_eq!(DROPS.load(Ordering::SeqCst), 1);
assert_eq!(PREDS.load(SeqCst), 0x011);
assert_eq!(DROPS.load(SeqCst), 1);
assert_eq!(map.len(), 2);
assert_eq!(map.first_entry().unwrap().key(), &4);
assert_eq!(map.last_entry().unwrap().key(), &8);
@ -1315,8 +1310,6 @@ fn test_zst() {
// undefined.
#[test]
fn test_bad_zst() {
use std::cmp::Ordering;
#[derive(Clone, Copy, Debug)]
struct Bad;
@ -1763,7 +1756,7 @@ fn test_append_drop_leak() {
impl Drop for D {
fn drop(&mut self) {
if DROPS.fetch_add(1, Ordering::SeqCst) == 0 {
if DROPS.fetch_add(1, SeqCst) == 0 {
panic!("panic in `drop`");
}
}
@ -1779,7 +1772,7 @@ fn test_append_drop_leak() {
catch_unwind(move || left.append(&mut right)).unwrap_err();
assert_eq!(DROPS.load(Ordering::SeqCst), 4); // Rust issue #47949 ate one little piggy
assert_eq!(DROPS.load(SeqCst), 4); // Rust issue #47949 ate one little piggy
}
#[test]
@ -1894,7 +1887,7 @@ fn test_into_iter_drop_leak_height_0() {
impl Drop for D {
fn drop(&mut self) {
if DROPS.fetch_add(1, Ordering::SeqCst) == 3 {
if DROPS.fetch_add(1, SeqCst) == 3 {
panic!("panic in `drop`");
}
}
@ -1909,7 +1902,7 @@ fn test_into_iter_drop_leak_height_0() {
catch_unwind(move || drop(map.into_iter())).unwrap_err();
assert_eq!(DROPS.load(Ordering::SeqCst), 5);
assert_eq!(DROPS.load(SeqCst), 5);
}
#[test]
@ -1921,18 +1914,18 @@ fn test_into_iter_drop_leak_height_1() {
struct D;
impl Drop for D {
fn drop(&mut self) {
if DROPS.fetch_add(1, Ordering::SeqCst) == PANIC_POINT.load(Ordering::SeqCst) {
if DROPS.fetch_add(1, SeqCst) == PANIC_POINT.load(SeqCst) {
panic!("panic in `drop`");
}
}
}
for panic_point in vec![0, 1, size - 2, size - 1] {
DROPS.store(0, Ordering::SeqCst);
PANIC_POINT.store(panic_point, Ordering::SeqCst);
DROPS.store(0, SeqCst);
PANIC_POINT.store(panic_point, SeqCst);
let map: BTreeMap<_, _> = (0..size).map(|i| (i, D)).collect();
catch_unwind(move || drop(map.into_iter())).unwrap_err();
assert_eq!(DROPS.load(Ordering::SeqCst), size);
assert_eq!(DROPS.load(SeqCst), size);
}
}

View file

@ -2,6 +2,7 @@ use std::cell::Cell;
use std::cmp::Ordering::{self, *};
use std::ptr;
// Minimal type with an `Ord` implementation violating transitivity.
#[derive(Debug)]
pub enum Cyclic3 {
A,
@ -34,6 +35,7 @@ impl PartialEq for Cyclic3 {
impl Eq for Cyclic3 {}
// Controls the ordering of values wrapped by `Governed`.
#[derive(Debug)]
pub struct Governor {
flipped: Cell<bool>,
@ -49,6 +51,9 @@ impl Governor {
}
}
// Type with an `Ord` implementation that forms a total order at any moment
// (assuming that `T` respects total order), but can suddenly be made to invert
// that total order.
#[derive(Debug)]
pub struct Governed<'a, T>(pub T, pub &'a Governor);

View file

@ -134,13 +134,13 @@ pub type Root<K, V> = NodeRef<marker::Owned, K, V, marker::LeafOrInternal>;
impl<K, V> Root<K, V> {
/// Returns a new owned tree, with its own root node that is initially empty.
pub fn new_leaf() -> Self {
NodeRef::new().forget_type()
pub fn new() -> Self {
NodeRef::new_leaf().forget_type()
}
}
impl<K, V> NodeRef<marker::Owned, K, V, marker::Leaf> {
fn new() -> Self {
fn new_leaf() -> Self {
Self::from_new_leaf(Box::new(unsafe { LeafNode::new() }))
}

View file

@ -74,12 +74,12 @@ fn test_splitpoint() {
#[test]
fn test_partial_cmp_eq() {
let mut root1 = NodeRef::new();
let mut root1 = NodeRef::new_leaf();
let mut leaf1 = root1.borrow_mut();
leaf1.push(1, ());
let mut root1 = root1.forget_type();
root1.push_internal_level();
let root2 = Root::new_leaf();
let root2 = Root::new();
root1.reborrow().assert_back_pointers();
root2.reborrow().assert_back_pointers();

View file

@ -50,7 +50,7 @@ where
{
match search_linear(&node, key) {
(idx, true) => Found(unsafe { Handle::new_kv(node, idx) }),
(idx, false) => SearchResult::GoDown(unsafe { Handle::new_edge(node, idx) }),
(idx, false) => GoDown(unsafe { Handle::new_edge(node, idx) }),
}
}

View file

@ -1,9 +1,10 @@
use super::super::DeterministicRng;
use super::*;
use crate::vec::Vec;
use std::cmp::Ordering;
use std::iter::FromIterator;
use std::panic::{catch_unwind, AssertUnwindSafe};
use std::sync::atomic::{AtomicU32, Ordering};
use std::sync::atomic::{AtomicU32, Ordering::SeqCst};
#[test]
fn test_clone_eq() {
@ -355,7 +356,7 @@ fn test_drain_filter_drop_panic_leak() {
struct D(i32);
impl Drop for D {
fn drop(&mut self) {
if DROPS.fetch_add(1, Ordering::SeqCst) == 1 {
if DROPS.fetch_add(1, SeqCst) == 1 {
panic!("panic in `drop`");
}
}
@ -368,14 +369,14 @@ fn test_drain_filter_drop_panic_leak() {
catch_unwind(move || {
drop(set.drain_filter(|d| {
PREDS.fetch_add(1u32 << d.0, Ordering::SeqCst);
PREDS.fetch_add(1u32 << d.0, SeqCst);
true
}))
})
.ok();
assert_eq!(PREDS.load(Ordering::SeqCst), 0x011);
assert_eq!(DROPS.load(Ordering::SeqCst), 3);
assert_eq!(PREDS.load(SeqCst), 0x011);
assert_eq!(DROPS.load(SeqCst), 3);
}
#[test]
@ -387,7 +388,7 @@ fn test_drain_filter_pred_panic_leak() {
struct D(i32);
impl Drop for D {
fn drop(&mut self) {
DROPS.fetch_add(1, Ordering::SeqCst);
DROPS.fetch_add(1, SeqCst);
}
}
@ -398,7 +399,7 @@ fn test_drain_filter_pred_panic_leak() {
catch_unwind(AssertUnwindSafe(|| {
drop(set.drain_filter(|d| {
PREDS.fetch_add(1u32 << d.0, Ordering::SeqCst);
PREDS.fetch_add(1u32 << d.0, SeqCst);
match d.0 {
0 => true,
_ => panic!(),
@ -407,8 +408,8 @@ fn test_drain_filter_pred_panic_leak() {
}))
.ok();
assert_eq!(PREDS.load(Ordering::SeqCst), 0x011);
assert_eq!(DROPS.load(Ordering::SeqCst), 1);
assert_eq!(PREDS.load(SeqCst), 0x011);
assert_eq!(DROPS.load(SeqCst), 1);
assert_eq!(set.len(), 2);
assert_eq!(set.first().unwrap().0, 4);
assert_eq!(set.last().unwrap().0, 8);
@ -498,8 +499,6 @@ fn test_extend_ref() {
#[test]
fn test_recovery() {
use std::cmp::Ordering;
#[derive(Debug)]
struct Foo(&'static str, i32);