1
Fork 0

Auto merge of #70205 - Centril:rollup-0jq9k4s, r=Centril

Rollup of 16 pull requests

Successful merges:

 - #65097 (Make std::sync::Arc compatible with ThreadSanitizer)
 - #69033 (Use generator resume arguments in the async/await lowering)
 - #69997 (add `Option::{zip,zip_with}` methods under "option_zip" gate)
 - #70038 (Remove the call that makes miri fail)
 - #70058 (can_begin_literal_maybe_minus: `true` on `"-"? lit` NTs.)
 - #70111 (BTreeMap: remove shared root)
 - #70139 (add delay_span_bug to TransmuteSizeDiff, just to be sure)
 - #70165 (Remove the erase regions MIR transform)
 - #70166 (Derive PartialEq, Eq and Hash for RangeInclusive)
 - #70176 (Add tests for #58319 and #65131)
 - #70177 (Fix oudated comment for NamedRegionMap)
 - #70184 (expand_include: set `.directory` to dir of included file.)
 - #70187 (more clippy fixes)
 - #70188 (Clean up E0439 explanation)
 - #70189 (Abi::is_signed: assert that we are a Scalar)
 - #70194 (#[must_use] on split_off())

Failed merges:

r? @ghost
This commit is contained in:
bors 2020-03-21 04:34:04 +00:00
commit 5f13820478
136 changed files with 1479 additions and 644 deletions

View file

@ -370,12 +370,17 @@ class RustStdBTreeSetPrinter(object):
("(len: %i)" % self.__val.get_wrapped_value()['map']['length']))
def children(self):
root = self.__val.get_wrapped_value()['map']['root']
node_ptr = root['node']
i = 0
for child in children_of_node(node_ptr, root['height'], False):
yield (str(i), child)
i = i + 1
prev_idx = None
innermap = GdbValue(self.__val.get_wrapped_value()['map'])
if innermap.get_wrapped_value()['length'] > 0:
root = GdbValue(innermap.get_wrapped_value()['root'])
type_name = str(root.type.ty.name).replace('core::option::Option<', '')[:-1]
root = root.get_wrapped_value().cast(gdb.lookup_type(type_name))
node_ptr = root['node']
i = 0
for child in children_of_node(node_ptr, root['height'], False):
yield (str(i), child)
i = i + 1
class RustStdBTreeMapPrinter(object):
@ -391,13 +396,16 @@ class RustStdBTreeMapPrinter(object):
("(len: %i)" % self.__val.get_wrapped_value()['length']))
def children(self):
root = self.__val.get_wrapped_value()['root']
node_ptr = root['node']
i = 0
for child in children_of_node(node_ptr, root['height'], True):
yield (str(i), child[0])
yield (str(i), child[1])
i = i + 1
if self.__val.get_wrapped_value()['length'] > 0:
root = GdbValue(self.__val.get_wrapped_value()['root'])
type_name = str(root.type.ty.name).replace('core::option::Option<', '')[:-1]
root = root.get_wrapped_value().cast(gdb.lookup_type(type_name))
node_ptr = root['node']
i = 0
for child in children_of_node(node_ptr, root['height'], True):
yield (str(i), child[0])
yield (str(i), child[1])
i = i + 1
class RustStdStringPrinter(object):

View file

@ -122,7 +122,7 @@ use UnderflowResult::*;
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub struct BTreeMap<K, V> {
root: node::Root<K, V>,
root: Option<node::Root<K, V>>,
length: usize,
}
@ -147,10 +147,11 @@ impl<K: Clone, V: Clone> Clone for BTreeMap<K, V> {
{
match node.force() {
Leaf(leaf) => {
let mut out_tree = BTreeMap { root: node::Root::new_leaf(), length: 0 };
let mut out_tree = BTreeMap { root: Some(node::Root::new_leaf()), length: 0 };
{
let mut out_node = match out_tree.root.as_mut().force() {
let root = out_tree.root.as_mut().unwrap();
let mut out_node = match root.as_mut().force() {
Leaf(leaf) => leaf,
Internal(_) => unreachable!(),
};
@ -169,9 +170,14 @@ impl<K: Clone, V: Clone> Clone for BTreeMap<K, V> {
}
Internal(internal) => {
let mut out_tree = clone_subtree(internal.first_edge().descend());
out_tree.ensure_root_is_owned();
{
let mut out_node = out_tree.root.push_level();
// Ideally we'd use the return of ensure_root_is_owned
// instead of re-unwrapping here but unfortunately that
// borrows all of out_tree and we need access to the
// length below.
let mut out_node = out_tree.root.as_mut().unwrap().push_level();
let mut in_edge = internal.first_edge();
while let Ok(kv) = in_edge.right_kv() {
let (k, v) = kv.into_kv();
@ -190,7 +196,7 @@ impl<K: Clone, V: Clone> Clone for BTreeMap<K, V> {
(root, length)
};
out_node.push(k, v, subroot);
out_node.push(k, v, subroot.unwrap_or_else(|| node::Root::new_leaf()));
out_tree.length += 1 + sublength;
}
}
@ -203,9 +209,9 @@ impl<K: Clone, V: Clone> Clone for BTreeMap<K, V> {
if self.is_empty() {
// Ideally we'd call `BTreeMap::new` here, but that has the `K:
// Ord` constraint, which this method lacks.
BTreeMap { root: node::Root::shared_empty_root(), length: 0 }
BTreeMap { root: None, length: 0 }
} else {
clone_subtree(self.root.as_ref())
clone_subtree(self.root.as_ref().unwrap().as_ref())
}
}
@ -271,14 +277,14 @@ where
type Key = K;
fn get(&self, key: &Q) -> Option<&K> {
match search::search_tree(self.root.as_ref(), key) {
match search::search_tree(self.root.as_ref()?.as_ref(), key) {
Found(handle) => Some(handle.into_kv().0),
GoDown(_) => None,
}
}
fn take(&mut self, key: &Q) -> Option<K> {
match search::search_tree(self.root.as_mut(), key) {
match search::search_tree(self.root.as_mut()?.as_mut(), key) {
Found(handle) => Some(
OccupiedEntry { handle, length: &mut self.length, _marker: PhantomData }
.remove_kv()
@ -290,7 +296,7 @@ where
fn replace(&mut self, key: K) -> Option<K> {
self.ensure_root_is_owned();
match search::search_tree::<marker::Mut<'_>, K, (), K>(self.root.as_mut(), &key) {
match search::search_tree::<marker::Mut<'_>, K, (), K>(self.root.as_mut()?.as_mut(), &key) {
Found(handle) => Some(mem::replace(handle.into_kv_mut().0, key)),
GoDown(handle) => {
VacantEntry { key, handle, length: &mut self.length, _marker: PhantomData }
@ -344,15 +350,18 @@ pub struct IterMut<'a, K: 'a, V: 'a> {
/// [`BTreeMap`]: struct.BTreeMap.html
#[stable(feature = "rust1", since = "1.0.0")]
pub struct IntoIter<K, V> {
front: Handle<NodeRef<marker::Owned, K, V, marker::Leaf>, marker::Edge>,
back: Handle<NodeRef<marker::Owned, K, V, marker::Leaf>, marker::Edge>,
front: Option<Handle<NodeRef<marker::Owned, K, V, marker::Leaf>, marker::Edge>>,
back: Option<Handle<NodeRef<marker::Owned, K, V, marker::Leaf>, marker::Edge>>,
length: usize,
}
#[stable(feature = "collection_debug", since = "1.17.0")]
impl<K: fmt::Debug, V: fmt::Debug> fmt::Debug for IntoIter<K, V> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let range = Range { front: self.front.reborrow(), back: self.back.reborrow() };
let range = Range {
front: self.front.as_ref().map(|f| f.reborrow()),
back: self.back.as_ref().map(|b| b.reborrow()),
};
f.debug_list().entries(range).finish()
}
}
@ -417,8 +426,8 @@ pub struct ValuesMut<'a, K: 'a, V: 'a> {
/// [`BTreeMap`]: struct.BTreeMap.html
#[stable(feature = "btree_range", since = "1.17.0")]
pub struct Range<'a, K: 'a, V: 'a> {
front: Handle<NodeRef<marker::Immut<'a>, K, V, marker::Leaf>, marker::Edge>,
back: Handle<NodeRef<marker::Immut<'a>, K, V, marker::Leaf>, marker::Edge>,
front: Option<Handle<NodeRef<marker::Immut<'a>, K, V, marker::Leaf>, marker::Edge>>,
back: Option<Handle<NodeRef<marker::Immut<'a>, K, V, marker::Leaf>, marker::Edge>>,
}
#[stable(feature = "collection_debug", since = "1.17.0")]
@ -437,8 +446,8 @@ impl<K: fmt::Debug, V: fmt::Debug> fmt::Debug for Range<'_, K, V> {
/// [`BTreeMap`]: struct.BTreeMap.html
#[stable(feature = "btree_range", since = "1.17.0")]
pub struct RangeMut<'a, K: 'a, V: 'a> {
front: Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>,
back: Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>,
front: Option<Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>>,
back: Option<Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>>,
// Be invariant in `K` and `V`
_marker: PhantomData<&'a mut (K, V)>,
@ -447,7 +456,10 @@ pub struct RangeMut<'a, K: 'a, V: 'a> {
#[stable(feature = "collection_debug", since = "1.17.0")]
impl<K: fmt::Debug, V: fmt::Debug> fmt::Debug for RangeMut<'_, K, V> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let range = Range { front: self.front.reborrow(), back: self.back.reborrow() };
let range = Range {
front: self.front.as_ref().map(|f| f.reborrow()),
back: self.back.as_ref().map(|b| b.reborrow()),
};
f.debug_list().entries(range).finish()
}
}
@ -544,7 +556,7 @@ impl<K: Ord, V> BTreeMap<K, V> {
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn new() -> BTreeMap<K, V> {
BTreeMap { root: node::Root::shared_empty_root(), length: 0 }
BTreeMap { root: None, length: 0 }
}
/// Clears the map, removing all elements.
@ -589,7 +601,7 @@ impl<K: Ord, V> BTreeMap<K, V> {
K: Borrow<Q>,
Q: Ord,
{
match search::search_tree(self.root.as_ref(), key) {
match search::search_tree(self.root.as_ref()?.as_ref(), key) {
Found(handle) => Some(handle.into_kv().1),
GoDown(_) => None,
}
@ -616,7 +628,7 @@ impl<K: Ord, V> BTreeMap<K, V> {
K: Borrow<Q>,
Q: Ord,
{
match search::search_tree(self.root.as_ref(), k) {
match search::search_tree(self.root.as_ref()?.as_ref(), k) {
Found(handle) => Some(handle.into_kv()),
GoDown(_) => None,
}
@ -645,7 +657,7 @@ impl<K: Ord, V> BTreeMap<K, V> {
T: Ord,
K: Borrow<T>,
{
let front = self.root.as_ref().first_leaf_edge();
let front = self.root.as_ref()?.as_ref().first_leaf_edge();
front.right_kv().ok().map(Handle::into_kv)
}
@ -674,7 +686,7 @@ impl<K: Ord, V> BTreeMap<K, V> {
T: Ord,
K: Borrow<T>,
{
let front = self.root.as_mut().first_leaf_edge();
let front = self.root.as_mut()?.as_mut().first_leaf_edge();
if let Ok(kv) = front.right_kv() {
Some(OccupiedEntry {
handle: kv.forget_node_type(),
@ -708,7 +720,7 @@ impl<K: Ord, V> BTreeMap<K, V> {
T: Ord,
K: Borrow<T>,
{
let back = self.root.as_ref().last_leaf_edge();
let back = self.root.as_ref()?.as_ref().last_leaf_edge();
back.left_kv().ok().map(Handle::into_kv)
}
@ -737,7 +749,7 @@ impl<K: Ord, V> BTreeMap<K, V> {
T: Ord,
K: Borrow<T>,
{
let back = self.root.as_mut().last_leaf_edge();
let back = self.root.as_mut()?.as_mut().last_leaf_edge();
if let Ok(kv) = back.left_kv() {
Some(OccupiedEntry {
handle: kv.forget_node_type(),
@ -801,7 +813,7 @@ impl<K: Ord, V> BTreeMap<K, V> {
K: Borrow<Q>,
Q: Ord,
{
match search::search_tree(self.root.as_mut(), key) {
match search::search_tree(self.root.as_mut()?.as_mut(), key) {
Found(handle) => Some(handle.into_kv_mut().1),
GoDown(_) => None,
}
@ -896,7 +908,7 @@ impl<K: Ord, V> BTreeMap<K, V> {
K: Borrow<Q>,
Q: Ord,
{
match search::search_tree(self.root.as_mut(), key) {
match search::search_tree(self.root.as_mut()?.as_mut(), key) {
Found(handle) => Some(
OccupiedEntry { handle, length: &mut self.length, _marker: PhantomData }
.remove_entry(),
@ -992,11 +1004,15 @@ impl<K: Ord, V> BTreeMap<K, V> {
K: Borrow<T>,
R: RangeBounds<T>,
{
let root1 = self.root.as_ref();
let root2 = self.root.as_ref();
let (f, b) = range_search(root1, root2, range);
if let Some(root) = &self.root {
let root1 = root.as_ref();
let root2 = root.as_ref();
let (f, b) = range_search(root1, root2, range);
Range { front: f, back: b }
Range { front: Some(f), back: Some(b) }
} else {
Range { front: None, back: None }
}
}
/// Constructs a mutable double-ended iterator over a sub-range of elements in the map.
@ -1036,11 +1052,15 @@ impl<K: Ord, V> BTreeMap<K, V> {
K: Borrow<T>,
R: RangeBounds<T>,
{
let root1 = self.root.as_mut();
let root2 = unsafe { ptr::read(&root1) };
let (f, b) = range_search(root1, root2, range);
if let Some(root) = &mut self.root {
let root1 = root.as_mut();
let root2 = unsafe { ptr::read(&root1) };
let (f, b) = range_search(root1, root2, range);
RangeMut { front: f, back: b, _marker: PhantomData }
RangeMut { front: Some(f), back: Some(b), _marker: PhantomData }
} else {
RangeMut { front: None, back: None, _marker: PhantomData }
}
}
/// Gets the given key's corresponding entry in the map for in-place manipulation.
@ -1065,7 +1085,7 @@ impl<K: Ord, V> BTreeMap<K, V> {
pub fn entry(&mut self, key: K) -> Entry<'_, K, V> {
// FIXME(@porglezomp) Avoid allocating if we don't insert
self.ensure_root_is_owned();
match search::search_tree(self.root.as_mut(), &key) {
match search::search_tree(self.root.as_mut().unwrap().as_mut(), &key) {
Found(handle) => {
Occupied(OccupiedEntry { handle, length: &mut self.length, _marker: PhantomData })
}
@ -1077,7 +1097,7 @@ impl<K: Ord, V> BTreeMap<K, V> {
fn from_sorted_iter<I: Iterator<Item = (K, V)>>(&mut self, iter: I) {
self.ensure_root_is_owned();
let mut cur_node = self.root.as_mut().last_leaf_edge().into_node();
let mut cur_node = self.root.as_mut().unwrap().as_mut().last_leaf_edge().into_node();
// Iterate through all key-value pairs, pushing them into nodes at the right level.
for (key, value) in iter {
// Try to push key-value pair into the current leaf node.
@ -1126,7 +1146,7 @@ impl<K: Ord, V> BTreeMap<K, V> {
fn fix_right_edge(&mut self) {
// Handle underfull nodes, start from the top.
let mut cur_node = self.root.as_mut();
let mut cur_node = self.root.as_mut().unwrap().as_mut();
while let Internal(internal) = cur_node.force() {
// Check if right-most child is underfull.
let mut last_edge = internal.last_edge();
@ -1187,14 +1207,14 @@ impl<K: Ord, V> BTreeMap<K, V> {
let total_num = self.len();
let mut right = Self::new();
right.root = node::Root::new_leaf();
for _ in 0..(self.root.as_ref().height()) {
right.root.push_level();
let right_root = right.ensure_root_is_owned();
for _ in 0..(self.root.as_ref().unwrap().as_ref().height()) {
right_root.push_level();
}
{
let mut left_node = self.root.as_mut();
let mut right_node = right.root.as_mut();
let mut left_node = self.root.as_mut().unwrap().as_mut();
let mut right_node = right.root.as_mut().unwrap().as_mut();
loop {
let mut split_edge = match search::search_node(left_node, key) {
@ -1223,7 +1243,9 @@ impl<K: Ord, V> BTreeMap<K, V> {
self.fix_right_border();
right.fix_left_border();
if self.root.as_ref().height() < right.root.as_ref().height() {
if self.root.as_ref().unwrap().as_ref().height()
< right.root.as_ref().unwrap().as_ref().height()
{
self.recalc_length();
right.length = total_num - self.len();
} else {
@ -1261,19 +1283,19 @@ impl<K: Ord, V> BTreeMap<K, V> {
res
}
self.length = dfs(self.root.as_ref());
self.length = dfs(self.root.as_ref().unwrap().as_ref());
}
/// Removes empty levels on the top.
fn fix_top(&mut self) {
loop {
{
let node = self.root.as_ref();
let node = self.root.as_ref().unwrap().as_ref();
if node.height() == 0 || node.len() > 0 {
break;
}
}
self.root.pop_level();
self.root.as_mut().unwrap().pop_level();
}
}
@ -1281,7 +1303,7 @@ impl<K: Ord, V> BTreeMap<K, V> {
self.fix_top();
{
let mut cur_node = self.root.as_mut();
let mut cur_node = self.root.as_mut().unwrap().as_mut();
while let Internal(node) = cur_node.force() {
let mut last_kv = node.last_kv();
@ -1307,7 +1329,7 @@ impl<K: Ord, V> BTreeMap<K, V> {
self.fix_top();
{
let mut cur_node = self.root.as_mut();
let mut cur_node = self.root.as_mut().unwrap().as_mut();
while let Internal(node) = cur_node.force() {
let mut first_kv = node.first_kv();
@ -1326,13 +1348,6 @@ impl<K: Ord, V> BTreeMap<K, V> {
self.fix_top();
}
/// If the root node is the shared root node, allocate our own node.
fn ensure_root_is_owned(&mut self) {
if self.root.is_shared_root() {
self.root = node::Root::new_leaf();
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
@ -1458,12 +1473,21 @@ impl<K, V> IntoIterator for BTreeMap<K, V> {
type IntoIter = IntoIter<K, V>;
fn into_iter(self) -> IntoIter<K, V> {
let root1 = unsafe { ptr::read(&self.root).into_ref() };
let root2 = unsafe { ptr::read(&self.root).into_ref() };
if self.root.is_none() {
mem::forget(self);
return IntoIter { front: None, back: None, length: 0 };
}
let root1 = unsafe { unwrap_unchecked(ptr::read(&self.root)).into_ref() };
let root2 = unsafe { unwrap_unchecked(ptr::read(&self.root)).into_ref() };
let len = self.length;
mem::forget(self);
IntoIter { front: root1.first_leaf_edge(), back: root2.last_leaf_edge(), length: len }
IntoIter {
front: Some(root1.first_leaf_edge()),
back: Some(root2.last_leaf_edge()),
length: len,
}
}
}
@ -1478,9 +1502,9 @@ impl<K, V> Drop for IntoIter<K, V> {
// don't have to care about panics this time (they'll abort).
while let Some(_) = self.0.next() {}
// No need to avoid the shared root, because the tree was definitely not empty.
unsafe {
let mut node = ptr::read(&self.0.front).into_node().forget_type();
let mut node =
unwrap_unchecked(ptr::read(&self.0.front)).into_node().forget_type();
while let Some(parent) = node.deallocate_and_ascend() {
node = parent.into_node().forget_type();
}
@ -1495,14 +1519,13 @@ impl<K, V> Drop for IntoIter<K, V> {
}
unsafe {
let mut node = ptr::read(&self.front).into_node().forget_type();
if node.is_shared_root() {
return;
}
// Most of the nodes have been deallocated while traversing
// but one pile from a leaf up to the root is left standing.
while let Some(parent) = node.deallocate_and_ascend() {
node = parent.into_node().forget_type();
if let Some(front) = ptr::read(&self.front) {
let mut node = front.into_node().forget_type();
// Most of the nodes have been deallocated while traversing
// but one pile from a leaf up to the root is left standing.
while let Some(parent) = node.deallocate_and_ascend() {
node = parent.into_node().forget_type();
}
}
}
}
@ -1517,7 +1540,7 @@ impl<K, V> Iterator for IntoIter<K, V> {
None
} else {
self.length -= 1;
Some(unsafe { self.front.next_unchecked() })
Some(unsafe { self.front.as_mut().unwrap().next_unchecked() })
}
}
@ -1533,7 +1556,7 @@ impl<K, V> DoubleEndedIterator for IntoIter<K, V> {
None
} else {
self.length -= 1;
Some(unsafe { self.back.next_back_unchecked() })
Some(unsafe { self.back.as_mut().unwrap().next_back_unchecked() })
}
}
}
@ -1683,7 +1706,7 @@ impl<'a, K, V> Range<'a, K, V> {
}
unsafe fn next_unchecked(&mut self) -> (&'a K, &'a V) {
self.front.next_unchecked()
unwrap_unchecked(self.front.as_mut()).next_unchecked()
}
}
@ -1696,7 +1719,7 @@ impl<'a, K, V> DoubleEndedIterator for Range<'a, K, V> {
impl<'a, K, V> Range<'a, K, V> {
unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a V) {
self.back.next_back_unchecked()
unwrap_unchecked(self.back.as_mut()).next_back_unchecked()
}
}
@ -1734,7 +1757,7 @@ impl<'a, K, V> RangeMut<'a, K, V> {
}
unsafe fn next_unchecked(&mut self) -> (&'a mut K, &'a mut V) {
self.front.next_unchecked()
unwrap_unchecked(self.front.as_mut()).next_unchecked()
}
}
@ -1755,7 +1778,7 @@ impl<K, V> FusedIterator for RangeMut<'_, K, V> {}
impl<'a, K, V> RangeMut<'a, K, V> {
unsafe fn next_back_unchecked(&mut self) -> (&'a mut K, &'a mut V) {
self.back.next_back_unchecked()
unwrap_unchecked(self.back.as_mut()).next_back_unchecked()
}
}
@ -1969,8 +1992,8 @@ impl<K, V> BTreeMap<K, V> {
pub fn iter(&self) -> Iter<'_, K, V> {
Iter {
range: Range {
front: self.root.as_ref().first_leaf_edge(),
back: self.root.as_ref().last_leaf_edge(),
front: self.root.as_ref().map(|r| r.as_ref().first_leaf_edge()),
back: self.root.as_ref().map(|r| r.as_ref().last_leaf_edge()),
},
length: self.length,
}
@ -1999,13 +2022,17 @@ impl<K, V> BTreeMap<K, V> {
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn iter_mut(&mut self) -> IterMut<'_, K, V> {
let root1 = self.root.as_mut();
let root2 = unsafe { ptr::read(&root1) };
IterMut {
range: RangeMut {
front: root1.first_leaf_edge(),
back: root2.last_leaf_edge(),
_marker: PhantomData,
range: if let Some(root) = &mut self.root {
let root1 = root.as_mut();
let root2 = unsafe { ptr::read(&root1) };
RangeMut {
front: Some(root1.first_leaf_edge()),
back: Some(root2.last_leaf_edge()),
_marker: PhantomData,
}
} else {
RangeMut { front: None, back: None, _marker: PhantomData }
},
length: self.length,
}
@ -2116,6 +2143,12 @@ impl<K, V> BTreeMap<K, V> {
pub fn is_empty(&self) -> bool {
self.len() == 0
}
/// If the root node is the empty (non-allocated) root node, allocate our
/// own node.
fn ensure_root_is_owned(&mut self) -> &mut node::Root<K, V> {
self.root.get_or_insert_with(|| node::Root::new_leaf())
}
}
impl<'a, K: Ord, V> Entry<'a, K, V> {

View file

@ -44,34 +44,7 @@ const B: usize = 6;
pub const MIN_LEN: usize = B - 1;
pub const CAPACITY: usize = 2 * B - 1;
/// The underlying representation of leaf nodes. Note that it is often unsafe to actually store
/// these, since only the first `len` keys and values are assumed to be initialized. As such,
/// these should always be put behind pointers, and specifically behind `BoxedNode` in the owned
/// case.
///
/// We have a separate type for the header and rely on it matching the prefix of `LeafNode`, in
/// order to statically allocate a single dummy node to avoid allocations. This struct is
/// `repr(C)` to prevent them from being reordered. `LeafNode` does not just contain a
/// `NodeHeader` because we do not want unnecessary padding between `len` and the keys.
/// Crucially, `NodeHeader` can be safely transmuted to different K and V. (This is exploited
/// by `as_header`.)
#[repr(C)]
struct NodeHeader<K, V> {
/// We use `*const` as opposed to `*mut` so as to be covariant in `K` and `V`.
/// This either points to an actual node or is null.
parent: *const InternalNode<K, V>,
/// This node's index into the parent node's `edges` array.
/// `*node.parent.edges[node.parent_idx]` should be the same thing as `node`.
/// This is only guaranteed to be initialized when `parent` is non-null.
parent_idx: MaybeUninit<u16>,
/// The number of keys and values this node stores.
///
/// This next to `parent_idx` to encourage the compiler to join `len` and
/// `parent_idx` into the same 32-bit word, reducing space overhead.
len: u16,
}
/// The underlying representation of leaf nodes.
#[repr(C)]
struct LeafNode<K, V> {
/// We use `*const` as opposed to `*mut` so as to be covariant in `K` and `V`.
@ -111,21 +84,6 @@ impl<K, V> LeafNode<K, V> {
}
}
impl<K, V> NodeHeader<K, V> {
fn is_shared_root(&self) -> bool {
ptr::eq(self, &EMPTY_ROOT_NODE as *const _ as *const _)
}
}
// We need to implement Sync here in order to make a static instance.
unsafe impl Sync for NodeHeader<(), ()> {}
// An empty node used as a placeholder for the root node, to avoid allocations.
// We use just a header in order to save space, since no operation on an empty tree will
// ever take a pointer past the first key.
static EMPTY_ROOT_NODE: NodeHeader<(), ()> =
NodeHeader { parent: ptr::null(), parent_idx: MaybeUninit::uninit(), len: 0 };
/// The underlying representation of internal nodes. As with `LeafNode`s, these should be hidden
/// behind `BoxedNode`s to prevent dropping uninitialized keys and values. Any pointer to an
/// `InternalNode` can be directly casted to a pointer to the underlying `LeafNode` portion of the
@ -154,12 +112,9 @@ impl<K, V> InternalNode<K, V> {
}
/// A managed, non-null pointer to a node. This is either an owned pointer to
/// `LeafNode<K, V>`, an owned pointer to `InternalNode<K, V>`, or a (not owned)
/// pointer to `NodeHeader<(), ()` (more specifically, the pointer to EMPTY_ROOT_NODE).
/// All of these types have a `NodeHeader<K, V>` prefix, meaning that they have at
/// least the same size as `NodeHeader<K, V>` and store the same kinds of data at the same
/// offsets; and they have a pointer alignment at least as large as `NodeHeader<K, V>`'s.
/// However, `BoxedNode` contains no information as to which of the three types
/// `LeafNode<K, V>` or an owned pointer to `InternalNode<K, V>`.
///
/// However, `BoxedNode` contains no information as to which of the two types
/// of nodes it actually contains, and, partially due to this lack of information,
/// has no destructor.
struct BoxedNode<K, V> {
@ -184,8 +139,9 @@ impl<K, V> BoxedNode<K, V> {
}
}
/// Either an owned tree or a shared, empty tree. Note that this does not have a destructor,
/// and must be cleaned up manually if it is an owned tree.
/// An owned tree.
///
/// Note that this does not have a destructor, and must be cleaned up manually.
pub struct Root<K, V> {
node: BoxedNode<K, V>,
/// The number of levels below the root node.
@ -196,20 +152,6 @@ unsafe impl<K: Sync, V: Sync> Sync for Root<K, V> {}
unsafe impl<K: Send, V: Send> Send for Root<K, V> {}
impl<K, V> Root<K, V> {
/// Whether the instance of `Root` wraps a shared, empty root node. If not,
/// the entire tree is uniquely owned by the owner of the `Root` instance.
pub fn is_shared_root(&self) -> bool {
self.as_ref().is_shared_root()
}
/// Returns a shared tree, wrapping a shared root node that is eternally empty.
pub fn shared_empty_root() -> Self {
Root {
node: unsafe { BoxedNode::from_ptr(NonNull::from(&EMPTY_ROOT_NODE).cast()) },
height: 0,
}
}
/// Returns a new owned tree, with its own root node that is initially empty.
pub fn new_leaf() -> Self {
Root { node: BoxedNode::from_leaf(Box::new(unsafe { LeafNode::new() })), height: 0 }
@ -245,7 +187,6 @@ impl<K, V> Root<K, V> {
/// Adds a new internal node with a single edge, pointing to the previous root, and make that
/// new node the root. This increases the height by 1 and is the opposite of `pop_level`.
pub fn push_level(&mut self) -> NodeRef<marker::Mut<'_>, K, V, marker::Internal> {
debug_assert!(!self.is_shared_root());
let mut new_node = Box::new(unsafe { InternalNode::new() });
new_node.edges[0].write(unsafe { BoxedNode::from_ptr(self.node.as_ptr()) });
@ -308,11 +249,6 @@ impl<K, V> Root<K, V> {
/// `Leaf`, the `NodeRef` points to a leaf node, when this is `Internal` the
/// `NodeRef` points to an internal node, and when this is `LeafOrInternal` the
/// `NodeRef` could be pointing to either type of node.
/// Note that in case of a leaf node, this might still be the shared root!
/// Only turn this into a `LeafNode` reference if you know it is not the shared root!
/// Shared references must be dereferenceable *for the entire size of their pointee*,
/// so '&LeafNode` or `&InternalNode` pointing to the shared root is undefined behavior.
/// Turning this into a `NodeHeader` reference is always safe.
pub struct NodeRef<BorrowType, K, V, Type> {
/// The number of levels below the node.
height: usize,
@ -354,7 +290,7 @@ impl<BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
/// Note that, despite being safe, calling this function can have the side effect
/// of invalidating mutable references that unsafe code has created.
pub fn len(&self) -> usize {
self.as_header().len as usize
self.as_leaf().len as usize
}
/// Returns the height of this node in the whole tree. Zero height denotes the
@ -374,35 +310,24 @@ impl<BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
NodeRef { height: self.height, node: self.node, root: self.root, _marker: PhantomData }
}
/// Exposes the leaf "portion" of any leaf or internal node that is not the shared root.
/// Exposes the leaf "portion" of any leaf or internal node.
/// If the node is a leaf, this function simply opens up its data.
/// If the node is an internal node, so not a leaf, it does have all the data a leaf has
/// (header, keys and values), and this function exposes that.
/// Unsafe because the node must not be the shared root. For more information,
/// see the `NodeRef` comments.
unsafe fn as_leaf(&self) -> &LeafNode<K, V> {
debug_assert!(!self.is_shared_root());
self.node.as_ref()
}
fn as_header(&self) -> &NodeHeader<K, V> {
unsafe { &*(self.node.as_ptr() as *const NodeHeader<K, V>) }
}
/// Returns whether the node is the shared, empty root.
pub fn is_shared_root(&self) -> bool {
self.as_header().is_shared_root()
fn as_leaf(&self) -> &LeafNode<K, V> {
// The node must be valid for at least the LeafNode portion.
// This is not a reference in the NodeRef type because we don't know if
// it should be unique or shared.
unsafe { self.node.as_ref() }
}
/// Borrows a view into the keys stored in the node.
/// Unsafe because the caller must ensure that the node is not the shared root.
pub unsafe fn keys(&self) -> &[K] {
pub fn keys(&self) -> &[K] {
self.reborrow().into_key_slice()
}
/// Borrows a view into the values stored in the node.
/// Unsafe because the caller must ensure that the node is not the shared root.
unsafe fn vals(&self) -> &[V] {
fn vals(&self) -> &[V] {
self.reborrow().into_val_slice()
}
@ -416,7 +341,7 @@ impl<BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
pub fn ascend(
self,
) -> Result<Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge>, Self> {
let parent_as_leaf = self.as_header().parent as *const LeafNode<K, V>;
let parent_as_leaf = self.as_leaf().parent as *const LeafNode<K, V>;
if let Some(non_zero) = NonNull::new(parent_as_leaf as *mut _) {
Ok(Handle {
node: NodeRef {
@ -425,7 +350,7 @@ impl<BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
root: self.root,
_marker: PhantomData,
},
idx: unsafe { usize::from(*self.as_header().parent_idx.as_ptr()) },
idx: unsafe { usize::from(*self.as_leaf().parent_idx.as_ptr()) },
_marker: PhantomData,
})
} else {
@ -464,7 +389,6 @@ impl<K, V> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
pub unsafe fn deallocate_and_ascend(
self,
) -> Option<Handle<NodeRef<marker::Owned, K, V, marker::Internal>, marker::Edge>> {
assert!(!self.is_shared_root());
let height = self.height;
let node = self.node;
let ret = self.ascend().ok();
@ -507,41 +431,37 @@ impl<'a, K, V, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
/// (header, keys and values), and this function exposes that.
///
/// Returns a raw ptr to avoid asserting exclusive access to the entire node.
/// This also implies you can invoke this member on the shared root, but the resulting pointer
/// might not be properly aligned and definitely would not allow accessing keys and values.
fn as_leaf_mut(&mut self) -> *mut LeafNode<K, V> {
self.node.as_ptr()
}
/// Unsafe because the caller must ensure that the node is not the shared root.
unsafe fn keys_mut(&mut self) -> &mut [K] {
self.reborrow_mut().into_key_slice_mut()
fn keys_mut(&mut self) -> &mut [K] {
// SAFETY: the caller will not be able to call further methods on self
// until the key slice reference is dropped, as we have unique access
// for the lifetime of the borrow.
unsafe { self.reborrow_mut().into_key_slice_mut() }
}
/// Unsafe because the caller must ensure that the node is not the shared root.
unsafe fn vals_mut(&mut self) -> &mut [V] {
self.reborrow_mut().into_val_slice_mut()
fn vals_mut(&mut self) -> &mut [V] {
// SAFETY: the caller will not be able to call further methods on self
// until the value slice reference is dropped, as we have unique access
// for the lifetime of the borrow.
unsafe { self.reborrow_mut().into_val_slice_mut() }
}
}
impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Immut<'a>, K, V, Type> {
/// Unsafe because the caller must ensure that the node is not the shared root.
unsafe fn into_key_slice(self) -> &'a [K] {
debug_assert!(!self.is_shared_root());
// We cannot be the shared root, so `as_leaf` is okay.
slice::from_raw_parts(MaybeUninit::first_ptr(&self.as_leaf().keys), self.len())
fn into_key_slice(self) -> &'a [K] {
unsafe { slice::from_raw_parts(MaybeUninit::first_ptr(&self.as_leaf().keys), self.len()) }
}
/// Unsafe because the caller must ensure that the node is not the shared root.
unsafe fn into_val_slice(self) -> &'a [V] {
debug_assert!(!self.is_shared_root());
// We cannot be the shared root, so `as_leaf` is okay.
slice::from_raw_parts(MaybeUninit::first_ptr(&self.as_leaf().vals), self.len())
fn into_val_slice(self) -> &'a [V] {
unsafe { slice::from_raw_parts(MaybeUninit::first_ptr(&self.as_leaf().vals), self.len()) }
}
/// Unsafe because the caller must ensure that the node is not the shared root.
unsafe fn into_slices(self) -> (&'a [K], &'a [V]) {
let k = ptr::read(&self);
fn into_slices(self) -> (&'a [K], &'a [V]) {
// SAFETY: equivalent to reborrow() except not requiring Type: 'a
let k = unsafe { ptr::read(&self) };
(k.into_key_slice(), self.into_val_slice())
}
}
@ -553,28 +473,27 @@ impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
unsafe { &mut *(self.root as *mut Root<K, V>) }
}
/// Unsafe because the caller must ensure that the node is not the shared root.
unsafe fn into_key_slice_mut(mut self) -> &'a mut [K] {
debug_assert!(!self.is_shared_root());
// We cannot be the shared root, so `as_leaf_mut` is okay.
slice::from_raw_parts_mut(
MaybeUninit::first_ptr_mut(&mut (*self.as_leaf_mut()).keys),
self.len(),
)
fn into_key_slice_mut(mut self) -> &'a mut [K] {
// SAFETY: The keys of a node must always be initialized up to length.
unsafe {
slice::from_raw_parts_mut(
MaybeUninit::first_ptr_mut(&mut (*self.as_leaf_mut()).keys),
self.len(),
)
}
}
/// Unsafe because the caller must ensure that the node is not the shared root.
unsafe fn into_val_slice_mut(mut self) -> &'a mut [V] {
debug_assert!(!self.is_shared_root());
slice::from_raw_parts_mut(
MaybeUninit::first_ptr_mut(&mut (*self.as_leaf_mut()).vals),
self.len(),
)
fn into_val_slice_mut(mut self) -> &'a mut [V] {
// SAFETY: The values of a node must always be initialized up to length.
unsafe {
slice::from_raw_parts_mut(
MaybeUninit::first_ptr_mut(&mut (*self.as_leaf_mut()).vals),
self.len(),
)
}
}
/// Unsafe because the caller must ensure that the node is not the shared root.
unsafe fn into_slices_mut(mut self) -> (&'a mut [K], &'a mut [V]) {
debug_assert!(!self.is_shared_root());
fn into_slices_mut(mut self) -> (&'a mut [K], &'a mut [V]) {
// We cannot use the getters here, because calling the second one
// invalidates the reference returned by the first.
// More precisely, it is the call to `len` that is the culprit,
@ -582,8 +501,13 @@ impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
// overlap with the keys (and even the values, for ZST keys).
let len = self.len();
let leaf = self.as_leaf_mut();
let keys = slice::from_raw_parts_mut(MaybeUninit::first_ptr_mut(&mut (*leaf).keys), len);
let vals = slice::from_raw_parts_mut(MaybeUninit::first_ptr_mut(&mut (*leaf).vals), len);
// SAFETY: The keys and values of a node must always be initialized up to length.
let keys = unsafe {
slice::from_raw_parts_mut(MaybeUninit::first_ptr_mut(&mut (*leaf).keys), len)
};
let vals = unsafe {
slice::from_raw_parts_mut(MaybeUninit::first_ptr_mut(&mut (*leaf).vals), len)
};
(keys, vals)
}
}
@ -592,7 +516,6 @@ impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Leaf> {
/// Adds a key/value pair the end of the node.
pub fn push(&mut self, key: K, val: V) {
assert!(self.len() < CAPACITY);
debug_assert!(!self.is_shared_root());
let idx = self.len();
@ -607,7 +530,6 @@ impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Leaf> {
/// Adds a key/value pair to the beginning of the node.
pub fn push_front(&mut self, key: K, val: V) {
assert!(self.len() < CAPACITY);
debug_assert!(!self.is_shared_root());
unsafe {
slice_insert(self.keys_mut(), 0, key);
@ -624,7 +546,6 @@ impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
pub fn push(&mut self, key: K, val: V, edge: Root<K, V>) {
assert!(edge.height == self.height - 1);
assert!(self.len() < CAPACITY);
debug_assert!(!self.is_shared_root());
let idx = self.len();
@ -658,7 +579,6 @@ impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
pub fn push_front(&mut self, key: K, val: V, edge: Root<K, V>) {
assert!(edge.height == self.height - 1);
assert!(self.len() < CAPACITY);
debug_assert!(!self.is_shared_root());
unsafe {
slice_insert(self.keys_mut(), 0, key);
@ -744,8 +664,7 @@ impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
}
}
/// Unsafe because the caller must ensure that the node is not the shared root.
unsafe fn into_kv_pointers_mut(mut self) -> (*mut K, *mut V) {
fn into_kv_pointers_mut(mut self) -> (*mut K, *mut V) {
(self.keys_mut().as_mut_ptr(), self.vals_mut().as_mut_ptr())
}
}
@ -904,7 +823,6 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge
fn insert_fit(&mut self, key: K, val: V) -> *mut V {
// Necessary for correctness, but in a private module
debug_assert!(self.node.len() < CAPACITY);
debug_assert!(!self.node.is_shared_root());
unsafe {
slice_insert(self.node.keys_mut(), self.idx, key);
@ -1081,7 +999,6 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::KV>
/// - All the key/value pairs to the right of this handle are put into a newly
/// allocated node.
pub fn split(mut self) -> (NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, K, V, Root<K, V>) {
assert!(!self.node.is_shared_root());
unsafe {
let mut new_node = Box::new(LeafNode::new());
@ -1113,7 +1030,6 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::KV>
pub fn remove(
mut self,
) -> (Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>, K, V) {
assert!(!self.node.is_shared_root());
unsafe {
let k = slice_remove(self.node.keys_mut(), self.idx);
let v = slice_remove(self.node.vals_mut(), self.idx);

View file

@ -67,19 +67,16 @@ where
Q: Ord,
K: Borrow<Q>,
{
// This function is defined over all borrow types (immutable, mutable, owned),
// and may be called on the shared root in each case.
// This function is defined over all borrow types (immutable, mutable, owned).
// Using `keys()` is fine here even if BorrowType is mutable, as all we return
// is an index -- not a reference.
let len = node.len();
if len > 0 {
let keys = unsafe { node.keys() }; // safe because a non-empty node cannot be the shared root
for (i, k) in keys.iter().enumerate() {
match key.cmp(k.borrow()) {
Ordering::Greater => {}
Ordering::Equal => return (i, true),
Ordering::Less => return (i, false),
}
let keys = node.keys();
for (i, k) in keys.iter().enumerate() {
match key.cmp(k.borrow()) {
Ordering::Greater => {}
Ordering::Equal => return (i, true),
Ordering::Less => return (i, false),
}
}
(len, false)

View file

@ -80,6 +80,7 @@
#![feature(box_into_raw_non_null)]
#![feature(box_patterns)]
#![feature(box_syntax)]
#![feature(cfg_sanitize)]
#![feature(cfg_target_has_atomic)]
#![feature(coerce_unsized)]
#![feature(const_generic_impls_guard)]

View file

@ -40,6 +40,23 @@ mod tests;
/// necessarily) at _exactly_ `MAX_REFCOUNT + 1` references.
const MAX_REFCOUNT: usize = (isize::MAX) as usize;
#[cfg(not(sanitize = "thread"))]
macro_rules! acquire {
($x:expr) => {
atomic::fence(Acquire)
};
}
// ThreadSanitizer does not support memory fences. To avoid false positive
// reports in Arc / Weak implementation use atomic loads for synchronization
// instead.
#[cfg(sanitize = "thread")]
macro_rules! acquire {
($x:expr) => {
$x.load(Acquire)
};
}
/// A thread-safe reference-counting pointer. 'Arc' stands for 'Atomically
/// Reference Counted'.
///
@ -402,7 +419,7 @@ impl<T> Arc<T> {
return Err(this);
}
atomic::fence(Acquire);
acquire!(this.inner().strong);
unsafe {
let elem = ptr::read(&this.ptr.as_ref().data);
@ -739,7 +756,7 @@ impl<T: ?Sized> Arc<T> {
ptr::drop_in_place(&mut self.ptr.as_mut().data);
if self.inner().weak.fetch_sub(1, Release) == 1 {
atomic::fence(Acquire);
acquire!(self.inner().weak);
Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()))
}
}
@ -1243,7 +1260,7 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Arc<T> {
//
// [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
// [2]: (https://github.com/rust-lang/rust/pull/41714)
atomic::fence(Acquire);
acquire!(self.inner().strong);
unsafe {
self.drop_slow();
@ -1701,7 +1718,7 @@ impl<T: ?Sized> Drop for Weak<T> {
let inner = if let Some(inner) = self.inner() { inner } else { return };
if inner.weak.fetch_sub(1, Release) == 1 {
atomic::fence(Acquire);
acquire!(inner.weak);
unsafe { Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())) }
}
}

View file

@ -67,7 +67,7 @@ fn test_basic_large() {
#[test]
fn test_basic_small() {
let mut map = BTreeMap::new();
// Empty, shared root:
// Empty, root is absent (None):
assert_eq!(map.remove(&1), None);
assert_eq!(map.len(), 0);
assert_eq!(map.get(&1), None);
@ -123,7 +123,7 @@ fn test_basic_small() {
assert_eq!(map.values().collect::<Vec<_>>(), vec![&4]);
assert_eq!(map.remove(&2), Some(4));
// Empty but private root:
// Empty but root is owned (Some(...)):
assert_eq!(map.len(), 0);
assert_eq!(map.get(&1), None);
assert_eq!(map.get_mut(&1), None);
@ -263,13 +263,6 @@ fn test_iter_mut_mutation() {
do_test_iter_mut_mutation::<Align32>(144);
}
#[test]
fn test_into_key_slice_with_shared_root_past_bounds() {
let mut map: BTreeMap<Align32, ()> = BTreeMap::new();
assert_eq!(map.get(&Align32(1)), None);
assert_eq!(map.get_mut(&Align32(1)), None);
}
#[test]
fn test_values_mut() {
let mut a = BTreeMap::new();

View file

@ -1377,6 +1377,7 @@ impl<T> Vec<T> {
/// assert_eq!(vec2, [2, 3]);
/// ```
#[inline]
#[must_use = "use `.truncate()` if you don't need the other half"]
#[stable(feature = "split_off", since = "1.4.0")]
pub fn split_off(&mut self, at: usize) -> Self {
assert!(at <= self.len(), "`at` out of bounds");

View file

@ -2,6 +2,84 @@
//! Asynchronous values.
#[cfg(not(bootstrap))]
use crate::{
ops::{Generator, GeneratorState},
pin::Pin,
ptr::NonNull,
task::{Context, Poll},
};
mod future;
#[stable(feature = "futures_api", since = "1.36.0")]
pub use self::future::Future;
/// This type is needed because:
///
/// a) Generators cannot implement `for<'a, 'b> Generator<&'a mut Context<'b>>`, so we need to pass
/// a raw pointer (see https://github.com/rust-lang/rust/issues/68923).
/// b) Raw pointers and `NonNull` aren't `Send` or `Sync`, so that would make every single future
/// non-Send/Sync as well, and we don't want that.
///
/// It also simplifies the HIR lowering of `.await`.
#[doc(hidden)]
#[unstable(feature = "gen_future", issue = "50547")]
#[cfg(not(bootstrap))]
#[derive(Debug, Copy, Clone)]
pub struct ResumeTy(NonNull<Context<'static>>);
#[unstable(feature = "gen_future", issue = "50547")]
#[cfg(not(bootstrap))]
unsafe impl Send for ResumeTy {}
#[unstable(feature = "gen_future", issue = "50547")]
#[cfg(not(bootstrap))]
unsafe impl Sync for ResumeTy {}
/// Wrap a generator in a future.
///
/// This function returns a `GenFuture` underneath, but hides it in `impl Trait` to give
/// better error messages (`impl Future` rather than `GenFuture<[closure.....]>`).
// This is `const` to avoid extra errors after we recover from `const async fn`
#[doc(hidden)]
#[unstable(feature = "gen_future", issue = "50547")]
#[cfg(not(bootstrap))]
#[inline]
pub const fn from_generator<T>(gen: T) -> impl Future<Output = T::Return>
where
T: Generator<ResumeTy, Yield = ()>,
{
struct GenFuture<T: Generator<ResumeTy, Yield = ()>>(T);
// We rely on the fact that async/await futures are immovable in order to create
// self-referential borrows in the underlying generator.
impl<T: Generator<ResumeTy, Yield = ()>> !Unpin for GenFuture<T> {}
impl<T: Generator<ResumeTy, Yield = ()>> Future for GenFuture<T> {
type Output = T::Return;
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
// Safety: Safe because we're !Unpin + !Drop, and this is just a field projection.
let gen = unsafe { Pin::map_unchecked_mut(self, |s| &mut s.0) };
// Resume the generator, turning the `&mut Context` into a `NonNull` raw pointer. The
// `.await` lowering will safely cast that back to a `&mut Context`.
match gen.resume(ResumeTy(NonNull::from(cx).cast::<Context<'static>>())) {
GeneratorState::Yielded(()) => Poll::Pending,
GeneratorState::Complete(x) => Poll::Ready(x),
}
}
}
GenFuture(gen)
}
#[doc(hidden)]
#[unstable(feature = "gen_future", issue = "50547")]
#[cfg(not(bootstrap))]
#[inline]
pub unsafe fn poll_with_context<F>(f: Pin<&mut F>, mut cx: ResumeTy) -> Poll<F::Output>
where
F: Future,
{
F::poll(f, cx.0.as_mut())
}

View file

@ -114,6 +114,6 @@ pub fn black_box<T>(dummy: T) -> T {
// more than we want, but it's so far good enough.
unsafe {
asm!("" : : "r"(&dummy));
return dummy;
dummy
}
}

View file

@ -140,6 +140,7 @@
#![feature(associated_type_bounds)]
#![feature(const_type_id)]
#![feature(const_caller_location)]
#![feature(option_zip)]
#![feature(no_niche)] // rust-lang/rust#68303
#[prelude_import]

View file

@ -1,5 +1,5 @@
use crate::fmt;
use crate::hash::{Hash, Hasher};
use crate::hash::Hash;
/// An unbounded range (`..`).
///
@ -330,7 +330,7 @@ impl<Idx: PartialOrd<Idx>> RangeTo<Idx> {
/// assert_eq!(arr[1..=3], [ 1,2,3 ]); // RangeInclusive
/// ```
#[doc(alias = "..=")]
#[derive(Clone)] // not Copy -- see #27186
#[derive(Clone, PartialEq, Eq, Hash)] // not Copy -- see #27186
#[stable(feature = "inclusive_range", since = "1.26.0")]
pub struct RangeInclusive<Idx> {
// Note that the fields here are not public to allow changing the
@ -350,26 +350,6 @@ pub struct RangeInclusive<Idx> {
pub(crate) exhausted: bool,
}
#[stable(feature = "inclusive_range", since = "1.26.0")]
impl<Idx: PartialEq> PartialEq for RangeInclusive<Idx> {
#[inline]
fn eq(&self, other: &Self) -> bool {
self.start == other.start && self.end == other.end && self.exhausted == other.exhausted
}
}
#[stable(feature = "inclusive_range", since = "1.26.0")]
impl<Idx: Eq> Eq for RangeInclusive<Idx> {}
#[stable(feature = "inclusive_range", since = "1.26.0")]
impl<Idx: Hash> Hash for RangeInclusive<Idx> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.start.hash(state);
self.end.hash(state);
self.exhausted.hash(state);
}
}
impl<Idx> RangeInclusive<Idx> {
/// Creates a new inclusive range. Equivalent to writing `start..=end`.
///

View file

@ -913,6 +913,63 @@ impl<T> Option<T> {
pub fn replace(&mut self, value: T) -> Option<T> {
mem::replace(self, Some(value))
}
/// Zips `self` with another `Option`.
///
/// If `self` is `Some(s)` and `other` is `Some(o)`, this method returns `Some((s, o))`.
/// Otherwise, `None` is returned.
///
/// # Examples
///
/// ```
/// #![feature(option_zip)]
/// let x = Some(1);
/// let y = Some("hi");
/// let z = None::<u8>;
///
/// assert_eq!(x.zip(y), Some((1, "hi")));
/// assert_eq!(x.zip(z), None);
/// ```
#[unstable(feature = "option_zip", issue = "70086")]
pub fn zip<U>(self, other: Option<U>) -> Option<(T, U)> {
self.zip_with(other, |a, b| (a, b))
}
/// Zips `self` and another `Option` with function `f`.
///
/// If `self` is `Some(s)` and `other` is `Some(o)`, this method returns `Some(f(s, o))`.
/// Otherwise, `None` is returned.
///
/// # Examples
///
/// ```
/// #![feature(option_zip)]
///
/// #[derive(Debug, PartialEq)]
/// struct Point {
/// x: f64,
/// y: f64,
/// }
///
/// impl Point {
/// fn new(x: f64, y: f64) -> Self {
/// Self { x, y }
/// }
/// }
///
/// let x = Some(17.5);
/// let y = Some(42.7);
///
/// assert_eq!(x.zip_with(y, Point::new), Some(Point { x: 17.5, y: 42.7 }));
/// assert_eq!(x.zip_with(None, Point::new), None);
/// ```
#[unstable(feature = "option_zip", issue = "70086")]
pub fn zip_with<U, F, R>(self, other: Option<U>, f: F) -> Option<R>
where
F: FnOnce(T, U) -> R,
{
Some(f(self?, other?))
}
}
impl<T: Copy> Option<&T> {

View file

@ -129,21 +129,3 @@ fn test_discriminant_send_sync() {
is_send_sync::<Discriminant<Regular>>();
is_send_sync::<Discriminant<NotSendSync>>();
}
#[test]
fn test_const_forget() {
const _: () = forget(0i32);
const _: () = forget(Vec::<Vec<Box<i32>>>::new());
// Writing this function signature without const-forget
// triggers compiler errors:
// 1) That we use a non-const fn inside a const fn
// 2) without the forget, it complains about the destructor of Box
const fn const_forget_box<T>(x: Box<T>) {
forget(x);
}
// Call the forget_box at runtime,
// as we can't const-construct a box yet.
const_forget_box(Box::new(0i32));
}

View file

@ -85,7 +85,7 @@ impl LintLevelSets {
level = cmp::min(*driver_level, level);
}
return (level, src);
(level, src)
}
pub fn get_lint_id_level(

View file

@ -467,7 +467,7 @@ impl<'tcx> ScopeTree {
}
debug!("temporary_scope({:?}) = None", expr_id);
return None;
None
}
/// Returns the lifetime of the variable `id`.
@ -498,7 +498,7 @@ impl<'tcx> ScopeTree {
debug!("is_subscope_of({:?}, {:?})=true", subscope, superscope);
return true;
true
}
/// Returns the ID of the innermost containing body.

View file

@ -1447,11 +1447,11 @@ impl<'tcx> TyCtxt<'tcx> {
_ => return None,
};
return Some(FreeRegionInfo {
Some(FreeRegionInfo {
def_id: suitable_region_binding_scope,
boundregion: bound_region,
is_impl_item,
});
})
}
pub fn return_type_impl_trait(&self, scope_def_id: DefId) -> Option<(Ty<'tcx>, Span)> {

View file

@ -440,7 +440,7 @@ pub fn super_relate_tys<R: TypeRelation<'tcx>>(
(Some(sz_a_val), Some(sz_b_val)) => Err(TypeError::FixedArraySize(
expected_found(relation, &sz_a_val, &sz_b_val),
)),
_ => return Err(err),
_ => Err(err),
}
}
}

View file

@ -1612,7 +1612,7 @@ impl<'tcx> PolyExistentialProjection<'tcx> {
}
pub fn item_def_id(&self) -> DefId {
return self.skip_binder().item_def_id;
self.skip_binder().item_def_id
}
}
@ -2000,8 +2000,8 @@ impl<'tcx> TyS<'tcx> {
#[inline]
pub fn is_unsafe_ptr(&self) -> bool {
match self.kind {
RawPtr(_) => return true,
_ => return false,
RawPtr(_) => true,
_ => false,
}
}

View file

@ -524,7 +524,7 @@ impl<'a, 'tcx> TypeFolder<'tcx> for SubstFolder<'a, 'tcx> {
self.root_ty = None;
}
return t1;
t1
}
fn fold_const(&mut self, c: &'tcx ty::Const<'tcx>) -> &'tcx ty::Const<'tcx> {

View file

@ -424,7 +424,7 @@ impl Token {
NtExpr(..) | NtBlock(..) | NtLiteral(..) => true,
_ => false,
},
_ => self.can_begin_literal_or_bool(),
_ => self.can_begin_literal_maybe_minus(),
}
}
@ -448,13 +448,22 @@ impl Token {
/// Returns `true` if the token is any literal, a minus (which can prefix a literal,
/// for example a '-42', or one of the boolean idents).
///
/// Keep this in sync with `Lit::from_token`.
pub fn can_begin_literal_or_bool(&self) -> bool {
/// In other words, would this token be a valid start of `parse_literal_maybe_minus`?
///
/// Keep this in sync with and `Lit::from_token`, excluding unary negation.
pub fn can_begin_literal_maybe_minus(&self) -> bool {
match self.uninterpolate().kind {
Literal(..) | BinOp(Minus) => true,
Ident(name, false) if name.is_bool_lit() => true,
Interpolated(ref nt) => match &**nt {
NtExpr(e) | NtLiteral(e) => matches!(e.kind, ast::ExprKind::Lit(_)),
NtLiteral(_) => true,
NtExpr(e) => match &e.kind {
ast::ExprKind::Lit(_) => true,
ast::ExprKind::Unary(ast::UnOp::Neg, e) => {
matches!(&e.kind, ast::ExprKind::Lit(_))
}
_ => false,
},
_ => false,
},
_ => false,

View file

@ -189,7 +189,7 @@ impl Lit {
/// Converts arbitrary token into an AST literal.
///
/// Keep this in sync with `Token::can_begin_literal_or_bool`.
/// Keep this in sync with `Token::can_begin_literal_or_bool` excluding unary negation.
pub fn from_token(token: &Token) -> Result<Lit, LitError> {
let lit = match token.uninterpolate().kind {
token::Ident(name, false) if name.is_bool_lit() => {

View file

@ -470,6 +470,15 @@ impl<'hir> LoweringContext<'_, 'hir> {
}
}
/// Lower an `async` construct to a generator that is then wrapped so it implements `Future`.
///
/// This results in:
///
/// ```text
/// std::future::from_generator(static move? |_task_context| -> <ret_ty> {
/// <body>
/// })
/// ```
pub(super) fn make_async_expr(
&mut self,
capture_clause: CaptureBy,
@ -480,17 +489,42 @@ impl<'hir> LoweringContext<'_, 'hir> {
body: impl FnOnce(&mut Self) -> hir::Expr<'hir>,
) -> hir::ExprKind<'hir> {
let output = match ret_ty {
Some(ty) => FnRetTy::Ty(ty),
None => FnRetTy::Default(span),
Some(ty) => hir::FnRetTy::Return(self.lower_ty(&ty, ImplTraitContext::disallowed())),
None => hir::FnRetTy::DefaultReturn(span),
};
let ast_decl = FnDecl { inputs: vec![], output };
let decl = self.lower_fn_decl(&ast_decl, None, /* impl trait allowed */ false, None);
let body_id = self.lower_fn_body(&ast_decl, |this| {
this.generator_kind = Some(hir::GeneratorKind::Async(async_gen_kind));
body(this)
// Resume argument type. We let the compiler infer this to simplify the lowering. It is
// fully constrained by `future::from_generator`.
let input_ty = hir::Ty { hir_id: self.next_id(), kind: hir::TyKind::Infer, span };
// The closure/generator `FnDecl` takes a single (resume) argument of type `input_ty`.
let decl = self.arena.alloc(hir::FnDecl {
inputs: arena_vec![self; input_ty],
output,
c_variadic: false,
implicit_self: hir::ImplicitSelfKind::None,
});
// `static || -> <ret_ty> { body }`:
// Lower the argument pattern/ident. The ident is used again in the `.await` lowering.
let (pat, task_context_hid) = self.pat_ident_binding_mode(
span,
Ident::with_dummy_span(sym::_task_context),
hir::BindingAnnotation::Mutable,
);
let param = hir::Param { attrs: &[], hir_id: self.next_id(), pat, span };
let params = arena_vec![self; param];
let body_id = self.lower_body(move |this| {
this.generator_kind = Some(hir::GeneratorKind::Async(async_gen_kind));
let old_ctx = this.task_context;
this.task_context = Some(task_context_hid);
let res = body(this);
this.task_context = old_ctx;
(params, res)
});
// `static |_task_context| -> <ret_ty> { body }`:
let generator_kind = hir::ExprKind::Closure(
capture_clause,
decl,
@ -523,13 +557,14 @@ impl<'hir> LoweringContext<'_, 'hir> {
/// ```rust
/// match <expr> {
/// mut pinned => loop {
/// match ::std::future::poll_with_tls_context(unsafe {
/// <::std::pin::Pin>::new_unchecked(&mut pinned)
/// }) {
/// match unsafe { ::std::future::poll_with_context(
/// <::std::pin::Pin>::new_unchecked(&mut pinned),
/// task_context,
/// ) } {
/// ::std::task::Poll::Ready(result) => break result,
/// ::std::task::Poll::Pending => {}
/// }
/// yield ();
/// task_context = yield ();
/// }
/// }
/// ```
@ -561,12 +596,23 @@ impl<'hir> LoweringContext<'_, 'hir> {
let (pinned_pat, pinned_pat_hid) =
self.pat_ident_binding_mode(span, pinned_ident, hir::BindingAnnotation::Mutable);
// ::std::future::poll_with_tls_context(unsafe {
// ::std::pin::Pin::new_unchecked(&mut pinned)
// })`
let task_context_ident = Ident::with_dummy_span(sym::_task_context);
// unsafe {
// ::std::future::poll_with_context(
// ::std::pin::Pin::new_unchecked(&mut pinned),
// task_context,
// )
// }
let poll_expr = {
let pinned = self.expr_ident(span, pinned_ident, pinned_pat_hid);
let ref_mut_pinned = self.expr_mut_addr_of(span, pinned);
let task_context = if let Some(task_context_hid) = self.task_context {
self.expr_ident_mut(span, task_context_ident, task_context_hid)
} else {
// Use of `await` outside of an async context, we cannot use `task_context` here.
self.expr_err(span)
};
let pin_ty_id = self.next_id();
let new_unchecked_expr_kind = self.expr_call_std_assoc_fn(
pin_ty_id,
@ -575,14 +621,13 @@ impl<'hir> LoweringContext<'_, 'hir> {
"new_unchecked",
arena_vec![self; ref_mut_pinned],
);
let new_unchecked =
self.arena.alloc(self.expr(span, new_unchecked_expr_kind, ThinVec::new()));
let unsafe_expr = self.expr_unsafe(new_unchecked);
self.expr_call_std_path(
let new_unchecked = self.expr(span, new_unchecked_expr_kind, ThinVec::new());
let call = self.expr_call_std_path(
gen_future_span,
&[sym::future, sym::poll_with_tls_context],
arena_vec![self; unsafe_expr],
)
&[sym::future, sym::poll_with_context],
arena_vec![self; new_unchecked, task_context],
);
self.arena.alloc(self.expr_unsafe(call))
};
// `::std::task::Poll::Ready(result) => break result`
@ -622,6 +667,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
self.stmt_expr(span, match_expr)
};
// task_context = yield ();
let yield_stmt = {
let unit = self.expr_unit(span);
let yield_expr = self.expr(
@ -629,7 +675,18 @@ impl<'hir> LoweringContext<'_, 'hir> {
hir::ExprKind::Yield(unit, hir::YieldSource::Await),
ThinVec::new(),
);
self.stmt_expr(span, yield_expr)
let yield_expr = self.arena.alloc(yield_expr);
if let Some(task_context_hid) = self.task_context {
let lhs = self.expr_ident(span, task_context_ident, task_context_hid);
let assign =
self.expr(span, hir::ExprKind::Assign(lhs, yield_expr, span), AttrVec::new());
self.stmt_expr(span, assign)
} else {
// Use of `await` outside of an async context. Return `yield_expr` so that we can
// proceed with type checking.
self.stmt(span, hir::StmtKind::Semi(yield_expr))
}
};
let loop_block = self.block_all(span, arena_vec![self; inner_match_stmt, yield_stmt], None);

View file

@ -814,7 +814,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
}
/// Construct `ExprKind::Err` for the given `span`.
fn expr_err(&mut self, span: Span) -> hir::Expr<'hir> {
crate fn expr_err(&mut self, span: Span) -> hir::Expr<'hir> {
self.expr(span, hir::ExprKind::Err, AttrVec::new())
}
@ -960,7 +960,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
id
}
fn lower_body(
pub(super) fn lower_body(
&mut self,
f: impl FnOnce(&mut Self) -> (&'hir [hir::Param<'hir>], hir::Expr<'hir>),
) -> hir::BodyId {

View file

@ -116,6 +116,10 @@ struct LoweringContext<'a, 'hir: 'a> {
generator_kind: Option<hir::GeneratorKind>,
/// When inside an `async` context, this is the `HirId` of the
/// `task_context` local bound to the resume argument of the generator.
task_context: Option<hir::HirId>,
/// Used to get the current `fn`'s def span to point to when using `await`
/// outside of an `async fn`.
current_item: Option<Span>,
@ -294,6 +298,7 @@ pub fn lower_crate<'a, 'hir>(
item_local_id_counters: Default::default(),
node_id_to_hir_id: IndexVec::new(),
generator_kind: None,
task_context: None,
current_item: None,
lifetimes_to_define: Vec::new(),
is_collecting_in_band_lifetimes: false,

View file

@ -87,7 +87,7 @@ fn decodable_substructure(
let blkarg = cx.ident_of("_d", trait_span);
let blkdecoder = cx.expr_ident(trait_span, blkarg);
return match *substr.fields {
match *substr.fields {
StaticStruct(_, ref summary) => {
let nfields = match *summary {
Unnamed(ref fields, _) => fields.len(),
@ -178,7 +178,7 @@ fn decodable_substructure(
)
}
_ => cx.bug("expected StaticEnum or StaticStruct in derive(Decodable)"),
};
}
}
/// Creates a decoder for a single enum variant/struct:

View file

@ -53,7 +53,7 @@ fn default_substructure(
let default_ident = cx.std_path(&[kw::Default, sym::Default, kw::Default]);
let default_call = |span| cx.expr_call_global(span, default_ident.clone(), Vec::new());
return match *substr.fields {
match *substr.fields {
StaticStruct(_, ref summary) => match *summary {
Unnamed(ref fields, is_tuple) => {
if !is_tuple {
@ -83,5 +83,5 @@ fn default_substructure(
DummyResult::raw_expr(trait_span, true)
}
_ => cx.span_bug(trait_span, "method in `derive(Default)`"),
};
}
}

View file

@ -173,7 +173,7 @@ fn encodable_substructure(
],
));
return match *substr.fields {
match *substr.fields {
Struct(_, ref fields) => {
let emit_struct_field = cx.ident_of("emit_struct_field", trait_span);
let mut stmts = Vec::new();
@ -283,5 +283,5 @@ fn encodable_substructure(
}
_ => cx.bug("expected Struct or EnumMatching in derive(Encodable)"),
};
}
}

View file

@ -489,7 +489,6 @@ impl<'a> TraitDef<'a> {
// set earlier; see
// librustc_expand/expand.rs:MacroExpander::fully_expand_fragment()
// librustc_expand/base.rs:Annotatable::derive_allowed()
return;
}
}
}

View file

@ -359,7 +359,7 @@ pub mod printf {
//
// Note: `move` used to capture copies of the cursors as they are *now*.
let fallback = move || {
return Some((
Some((
Substitution::Format(Format {
span: start.slice_between(next).unwrap(),
parameter: None,
@ -371,7 +371,7 @@ pub mod printf {
position: InnerSpan::new(start.at, next.at),
}),
next.slice_after(),
));
))
};
// Next parsing state.

View file

@ -4,6 +4,7 @@ use rustc_ast::token;
use rustc_ast::tokenstream::TokenStream;
use rustc_ast_pretty::pprust;
use rustc_expand::base::{self, *};
use rustc_expand::module::DirectoryOwnership;
use rustc_expand::panictry;
use rustc_parse::{self, new_sub_parser_from_file, parser::Parser};
use rustc_session::lint::builtin::INCOMPLETE_INCLUDE;
@ -11,6 +12,7 @@ use rustc_span::symbol::Symbol;
use rustc_span::{self, Pos, Span};
use smallvec::SmallVec;
use std::rc::Rc;
use rustc_data_structures::sync::Lrc;
@ -101,7 +103,7 @@ pub fn expand_include<'cx>(
None => return DummyResult::any(sp),
};
// The file will be added to the code map by the parser
let file = match cx.resolve_path(file, sp) {
let mut file = match cx.resolve_path(file, sp) {
Ok(f) => f,
Err(mut err) => {
err.emit();
@ -110,6 +112,15 @@ pub fn expand_include<'cx>(
};
let p = new_sub_parser_from_file(cx.parse_sess(), &file, None, sp);
// If in the included file we have e.g., `mod bar;`,
// then the path of `bar.rs` should be relative to the directory of `file`.
// See https://github.com/rust-lang/rust/pull/69838/files#r395217057 for a discussion.
// `MacroExpander::fully_expand_fragment` later restores, so "stack discipline" is maintained.
file.pop();
cx.current_expansion.directory_ownership = DirectoryOwnership::Owned { relative: None };
let mod_path = cx.current_expansion.module.mod_path.clone();
cx.current_expansion.module = Rc::new(ModuleData { mod_path, directory: file });
struct ExpandResult<'a> {
p: Parser<'a>,
}

View file

@ -146,7 +146,7 @@ impl<'a> ArchiveBuilder<'a> for LlvmArchiveBuilder<'a> {
}
// ok, don't skip this
return false;
false
})
}

View file

@ -83,7 +83,7 @@ pub fn encode(identifier: &str, bytecode: &[u8]) -> Vec<u8> {
encoded.push(0);
}
return encoded;
encoded
}
pub struct DecodedBytecode<'a> {
@ -132,7 +132,7 @@ impl<'a> DecodedBytecode<'a> {
pub fn bytecode(&self) -> Vec<u8> {
let mut data = Vec::new();
DeflateDecoder::new(self.encoded_bytecode).read_to_end(&mut data).unwrap();
return data;
data
}
pub fn identifier(&self) -> &'a str {

View file

@ -96,15 +96,11 @@ impl BackendTypes for CodegenCx<'ll, 'tcx> {
impl CodegenCx<'ll, 'tcx> {
pub fn const_array(&self, ty: &'ll Type, elts: &[&'ll Value]) -> &'ll Value {
unsafe {
return llvm::LLVMConstArray(ty, elts.as_ptr(), elts.len() as c_uint);
}
unsafe { llvm::LLVMConstArray(ty, elts.as_ptr(), elts.len() as c_uint) }
}
pub fn const_vector(&self, elts: &[&'ll Value]) -> &'ll Value {
unsafe {
return llvm::LLVMConstVector(elts.as_ptr(), elts.len() as c_uint);
}
unsafe { llvm::LLVMConstVector(elts.as_ptr(), elts.len() as c_uint) }
}
pub fn const_bytes(&self, bytes: &[u8]) -> &'ll Value {
@ -330,7 +326,7 @@ pub fn val_ty(v: &Value) -> &Type {
pub fn bytes_in_context(llcx: &'ll llvm::Context, bytes: &[u8]) -> &'ll Value {
unsafe {
let ptr = bytes.as_ptr() as *const c_char;
return llvm::LLVMConstStringInContext(llcx, ptr, bytes.len() as c_uint, True);
llvm::LLVMConstStringInContext(llcx, ptr, bytes.len() as c_uint, True)
}
}

View file

@ -800,7 +800,7 @@ impl CodegenCx<'b, 'tcx> {
ifn!("llvm.dbg.declare", fn(self.type_metadata(), self.type_metadata()) -> void);
ifn!("llvm.dbg.value", fn(self.type_metadata(), t_i64, self.type_metadata()) -> void);
}
return None;
None
}
}

View file

@ -203,7 +203,7 @@ impl TypeMap<'ll, 'tcx> {
let key = self.unique_id_interner.intern(&unique_type_id);
self.type_to_unique_id.insert(type_, UniqueTypeId(key));
return UniqueTypeId(key);
UniqueTypeId(key)
}
/// Gets the `UniqueTypeId` for an enum variant. Enum variants are not really
@ -314,7 +314,7 @@ impl RecursiveTypeDescription<'ll, 'tcx> {
member_holding_stub,
member_descriptions,
);
return MetadataCreationResult::new(metadata_stub, true);
MetadataCreationResult::new(metadata_stub, true)
}
}
}
@ -364,7 +364,7 @@ fn fixed_vec_metadata(
)
};
return MetadataCreationResult::new(metadata, false);
MetadataCreationResult::new(metadata, false)
}
fn vec_slice_metadata(
@ -445,7 +445,7 @@ fn subroutine_type_metadata(
return_if_metadata_created_in_meantime!(cx, unique_type_id);
return MetadataCreationResult::new(
MetadataCreationResult::new(
unsafe {
llvm::LLVMRustDIBuilderCreateSubroutineType(
DIB(cx),
@ -454,7 +454,7 @@ fn subroutine_type_metadata(
)
},
false,
);
)
}
// FIXME(1563): This is all a bit of a hack because 'trait pointer' is an ill-
@ -781,7 +781,7 @@ fn file_metadata_raw(
let key = (file_name, directory);
match debug_context(cx).created_files.borrow_mut().entry(key) {
Entry::Occupied(o) => return o.get(),
Entry::Occupied(o) => o.get(),
Entry::Vacant(v) => {
let (file_name, directory) = v.key();
debug!("file_metadata: file_name: {:?}, directory: {:?}", file_name, directory);
@ -831,7 +831,7 @@ fn basic_type_metadata(cx: &CodegenCx<'ll, 'tcx>, t: Ty<'tcx>) -> &'ll DIType {
)
};
return ty_metadata;
ty_metadata
}
fn foreign_type_metadata(
@ -1273,11 +1273,11 @@ fn prepare_union_metadata(
fn use_enum_fallback(cx: &CodegenCx<'_, '_>) -> bool {
// On MSVC we have to use the fallback mode, because LLVM doesn't
// lower variant parts to PDB.
return cx.sess().target.target.options.is_like_msvc
cx.sess().target.target.options.is_like_msvc
// LLVM version 7 did not release with an important bug fix;
// but the required patch is in the LLVM 8. Rust LLVM reports
// 8 as well.
|| llvm_util::get_major_version() < 8;
|| llvm_util::get_major_version() < 8
}
// FIXME(eddyb) maybe precompute this? Right now it's computed once
@ -2075,7 +2075,7 @@ fn prepare_enum_metadata(
}
};
return create_and_register_recursive_type_forward_declaration(
create_and_register_recursive_type_forward_declaration(
cx,
enum_type,
unique_type_id,
@ -2088,7 +2088,7 @@ fn prepare_enum_metadata(
containing_scope,
span,
}),
);
)
}
/// Creates debug information for a composite type, that is, anything that

View file

@ -444,7 +444,7 @@ impl DebugInfoMethods<'tcx> for CodegenCx<'ll, 'tcx> {
vec![]
};
return create_DIArray(DIB(cx), &template_params[..]);
create_DIArray(DIB(cx), &template_params[..])
}
fn get_parameter_names(cx: &CodegenCx<'_, '_>, generics: &ty::Generics) -> Vec<Symbol> {

View file

@ -24,9 +24,7 @@ pub fn is_node_local_to_unit(cx: &CodegenCx<'_, '_>, def_id: DefId) -> bool {
#[allow(non_snake_case)]
pub fn create_DIArray(builder: &DIBuilder<'ll>, arr: &[Option<&'ll DIDescriptor>]) -> &'ll DIArray {
return unsafe {
llvm::LLVMRustDIBuilderGetOrCreateArray(builder, arr.as_ptr(), arr.len() as u32)
};
unsafe { llvm::LLVMRustDIBuilderGetOrCreateArray(builder, arr.as_ptr(), arr.len() as u32) }
}
#[inline]

View file

@ -27,13 +27,13 @@ impl ArchiveRO {
/// If this archive is used with a mutable method, then an error will be
/// raised.
pub fn open(dst: &Path) -> Result<ArchiveRO, String> {
return unsafe {
unsafe {
let s = path_to_c_string(dst);
let ar = super::LLVMRustOpenArchive(s.as_ptr()).ok_or_else(|| {
super::last_error().unwrap_or_else(|| "failed to open archive".to_owned())
})?;
Ok(ArchiveRO { raw: ar })
};
}
}
pub fn iter(&self) -> Iter<'_> {

View file

@ -119,7 +119,7 @@ impl Command {
for k in &self.env_remove {
ret.env_remove(k);
}
return ret;
ret
}
// extensions

View file

@ -852,7 +852,7 @@ impl CrateInfo {
info.missing_lang_items.insert(cnum, missing);
}
return info;
info
}
}
@ -887,7 +887,7 @@ pub fn provide_both(providers: &mut Providers<'_>) {
}
}
}
return tcx.sess.opts.optimize;
tcx.sess.opts.optimize
};
providers.dllimport_foreign_items = |tcx, krate| {

View file

@ -125,9 +125,9 @@ impl<'dom, Node: Idx> Iterator for Iter<'dom, Node> {
} else {
self.node = Some(dom);
}
return Some(node);
Some(node)
} else {
return None;
None
}
}
}

View file

@ -752,7 +752,7 @@ impl RustcDefaultCalls {
PrintRequest::NativeStaticLibs => {}
}
}
return Compilation::Stop;
Compilation::Stop
}
}

View file

@ -1,5 +1,6 @@
The length of the platform-intrinsic function `simd_shuffle`
wasn't specified. Erroneous code example:
The length of the platform-intrinsic function `simd_shuffle` wasn't specified.
Erroneous code example:
```compile_fail,E0439
#![feature(platform_intrinsics)]

View file

@ -778,7 +778,7 @@ fn may_begin_with(token: &Token, name: Name) -> bool {
}
sym::ty => token.can_begin_type(),
sym::ident => get_macro_ident(token).is_some(),
sym::literal => token.can_begin_literal_or_bool(),
sym::literal => token.can_begin_literal_maybe_minus(),
sym::vis => match token.kind {
// The follow-set of :vis + "priv" keyword + interpolated
token::Comma | token::Ident(..) | token::Interpolated(_) => true,

View file

@ -175,6 +175,6 @@ impl AssertModuleSource<'tcx> {
return true;
}
debug!("check_config: no match found");
return false;
false
}
}

View file

@ -132,7 +132,6 @@ where
}
Err(err) => {
sess.err(&format!("failed to write dep-graph to `{}`: {}", path_buf.display(), err));
return;
}
}
}

View file

@ -136,7 +136,7 @@ impl TypeRelation<'tcx> for Equate<'combine, 'infcx, 'tcx> {
} else {
// Fast path for the common case.
self.relate(a.skip_binder(), b.skip_binder())?;
return Ok(a.clone());
Ok(a.clone())
}
}
}

View file

@ -142,6 +142,6 @@ impl<'a, 'tcx> NiceRegionError<'a, 'tcx> {
.span_label(span_2, String::new())
.span_label(span, span_label)
.emit();
return Some(ErrorReported);
Some(ErrorReported)
}
}

View file

@ -30,7 +30,7 @@ impl<'a, 'tcx> CombineFields<'a, 'tcx> {
let span = self.trace.cause.span;
return self.infcx.commit_if_ok(|snapshot| {
self.infcx.commit_if_ok(|snapshot| {
// First, we instantiate each bound region in the supertype with a
// fresh placeholder region.
let (b_prime, placeholder_map) = self.infcx.replace_bound_vars_with_placeholders(b);
@ -53,7 +53,7 @@ impl<'a, 'tcx> CombineFields<'a, 'tcx> {
debug!("higher_ranked_sub: OK result={:?}", result);
Ok(ty::Binder::bind(result))
});
})
}
}

View file

@ -452,12 +452,10 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> {
debug!("Expanding value of {:?} from {:?} to {:?}", b_vid, cur_region, lub);
*b_data = VarValue::Value(lub);
return true;
true
}
VarValue::ErrorValue => {
return false;
}
VarValue::ErrorValue => false,
}
}
@ -804,7 +802,7 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> {
}
}
return graph;
graph
}
fn collect_error_for_expanding_node(

View file

@ -877,7 +877,7 @@ where
// If sub-roots are equal, then `for_vid` and
// `vid` are related via subtyping.
debug!("TypeGeneralizer::tys: occurs check failed");
return Err(TypeError::Mismatch);
Err(TypeError::Mismatch)
} else {
match variables.probe(vid) {
TypeVariableValue::Known { value: u } => {
@ -898,7 +898,7 @@ where
let u = self.tcx().mk_ty_var(new_var_id);
debug!("generalize: replacing original vid={:?} with new={:?}", vid, u);
return Ok(u);
Ok(u)
}
}
}

View file

@ -505,7 +505,7 @@ impl<'tcx> RegionConstraintCollector<'tcx> {
self.undo_log.push(AddVar(vid));
}
debug!("created new region variable {:?} in {:?} with origin {:?}", vid, universe, origin);
return vid;
vid
}
/// Returns the universe for the given variable.

View file

@ -527,10 +527,10 @@ impl Cursor<'_> {
if self.first() == '\'' {
self.bump();
let kind = Char { terminated: true };
return Literal { kind, suffix_start: self.len_consumed() };
Literal { kind, suffix_start: self.len_consumed() }
} else {
Lifetime { starts_with_number }
}
return Lifetime { starts_with_number };
}
fn single_quoted_string(&mut self) -> bool {

View file

@ -269,7 +269,7 @@ impl EarlyLintPass for UnsafeCode {
})
}
_ => return,
_ => {}
}
}

View file

@ -273,7 +273,6 @@ fn lint_int_literal<'a, 'tcx>(
cx.sess()
.source_map()
.span_to_snippet(lit.span)
.ok()
.expect("must get snippet from literal"),
t.name_str(),
min,
@ -338,7 +337,6 @@ fn lint_uint_literal<'a, 'tcx>(
cx.sess()
.source_map()
.span_to_snippet(lit.span)
.ok()
.expect("must get snippet from literal"),
t.name_str(),
min,

View file

@ -543,7 +543,7 @@ impl EarlyLintPass for UnusedParens {
// Do not lint on `(..)` as that will result in the other arms being useless.
Paren(_)
// The other cases do not contain sub-patterns.
| Wild | Rest | Lit(..) | MacCall(..) | Range(..) | Ident(.., None) | Path(..) => return,
| Wild | Rest | Lit(..) | MacCall(..) | Range(..) | Ident(.., None) | Path(..) => {},
// These are list-like patterns; parens can always be removed.
TupleStruct(_, ps) | Tuple(ps) | Slice(ps) | Or(ps) => for p in ps {
self.check_unused_parens_pat(cx, p, false, false);

View file

@ -264,7 +264,7 @@ impl<'a> CrateLoader<'a> {
ret = Some(cnum);
}
});
return ret;
ret
}
fn verify_no_symbol_conflicts(&self, span: Span, root: &CrateRoot<'_>) {

View file

@ -6,7 +6,7 @@ use rustc_hir::itemlikevisit::ItemLikeVisitor;
crate fn collect(tcx: TyCtxt<'_>) -> Vec<ForeignModule> {
let mut collector = Collector { tcx, modules: Vec::new() };
tcx.hir().krate().visit_all_item_likes(&mut collector);
return collector.modules;
collector.modules
}
struct Collector<'tcx> {

View file

@ -16,7 +16,7 @@ crate fn collect(tcx: TyCtxt<'_>) -> Vec<String> {
}
}
return collector.args;
collector.args
}
struct Collector {

View file

@ -949,7 +949,7 @@ fn get_metadata_section(
let start = Instant::now();
let ret = get_metadata_section_imp(target, flavor, filename, loader);
info!("reading {:?} => {:?}", filename.file_name().unwrap(), start.elapsed());
return ret;
ret
}
/// A trivial wrapper for `Mmap` that implements `StableDeref`.

View file

@ -15,7 +15,7 @@ crate fn collect(tcx: TyCtxt<'_>) -> Vec<NativeLibrary> {
let mut collector = Collector { tcx, libs: Vec::new() };
tcx.hir().krate().visit_all_item_likes(&mut collector);
collector.process_command_line();
return collector.libs;
collector.libs
}
crate fn relevant_lib(sess: &Session, lib: &NativeLibrary) -> bool {

View file

@ -170,7 +170,7 @@ provide! { <'tcx> tcx, def_id, other, cdata,
.iter()
.filter_map(|&(exported_symbol, export_level)| {
if let ExportedSymbol::NonGeneric(def_id) = exported_symbol {
return Some((def_id, export_level))
Some((def_id, export_level))
} else {
None
}

View file

@ -273,7 +273,7 @@ impl<'a, 'tcx> Visitor<'tcx> for GatherBorrows<'a, 'tcx> {
assert_eq!(borrow_data.borrowed_place, *place);
}
return self.super_rvalue(rvalue, location);
self.super_rvalue(rvalue, location)
}
}

View file

@ -500,7 +500,7 @@ impl<'tcx> MirBorrowckCtxt<'_, 'tcx> {
}
}
return None;
None
}
/// We've found an enum/struct/union type with the substitutions

View file

@ -64,13 +64,16 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
}
};
debug!(
"equate_inputs_and_outputs: normalized_input_tys = {:?}, local_decls = {:?}",
normalized_input_tys, body.local_decls
);
// Equate expected input tys with those in the MIR.
for (&normalized_input_ty, argument_index) in normalized_input_tys.iter().zip(0..) {
// In MIR, argument N is stored in local N+1.
let local = Local::new(argument_index + 1);
debug!("equate_inputs_and_outputs: normalized_input_ty = {:?}", normalized_input_ty);
let mir_input_ty = body.local_decls[local].ty;
let mir_input_span = body.local_decls[local].source_info.span;
self.equate_normalized_input_or_output(

View file

@ -56,7 +56,7 @@ impl<'mir, 'tcx> InterpCx<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>> {
self.return_to_block(ret.map(|r| r.1))?;
self.dump_place(*dest);
return Ok(true);
Ok(true)
}
/// "Intercept" a function call to a panic-related function

View file

@ -122,7 +122,7 @@ pub(crate) fn has_rustc_mir_with(attrs: &[ast::Attribute], name: Symbol) -> Opti
}
}
}
return None;
None
}
pub struct MoveDataParamEnv<'tcx> {
@ -171,7 +171,7 @@ where
return None;
}
}
return None;
None
};
let print_preflow_to = name_found(tcx.sess, attributes, sym::borrowck_graphviz_preflow);

View file

@ -39,7 +39,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
let loc_ty = self
.tcx
.type_of(self.tcx.require_lang_item(PanicLocationLangItem, None))
.subst(*self.tcx, self.tcx.mk_substs([self.tcx.lifetimes.re_static.into()].iter()));
.subst(*self.tcx, self.tcx.mk_substs([self.tcx.lifetimes.re_erased.into()].iter()));
let loc_layout = self.layout_of(loc_ty).unwrap();
let location = self.allocate(loc_layout, MemoryKind::CallerLocation);

View file

@ -604,7 +604,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
.not_undef()
.and_then(|raw_discr| self.force_bits(raw_discr, discr_val.layout.size))
.map_err(|_| err_ub!(InvalidDiscriminant(raw_discr.erase_tag())))?;
let real_discr = if discr_val.layout.ty.is_signed() {
let real_discr = if discr_val.layout.abi.is_signed() {
// going from layout tag type to typeck discriminant type
// requires first sign extending with the discriminant layout
let sexted = sign_extend(bits_discr, discr_val.layout.size) as i128;

View file

@ -64,7 +64,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
Ge => l >= r,
_ => bug!("Invalid operation on char: {:?}", bin_op),
};
return (Scalar::from_bool(res), false, self.tcx.types.bool);
(Scalar::from_bool(res), false, self.tcx.types.bool)
}
fn binary_bool_op(
@ -87,7 +87,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
BitXor => l ^ r,
_ => bug!("Invalid operation on bool: {:?}", bin_op),
};
return (Scalar::from_bool(res), false, self.tcx.types.bool);
(Scalar::from_bool(res), false, self.tcx.types.bool)
}
fn binary_float_op<F: Float + Into<Scalar<M::PointerTag>>>(
@ -113,7 +113,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
Rem => ((l % r).value.into(), ty),
_ => bug!("invalid float op: `{:?}`", bin_op),
};
return (val, false, ty);
(val, false, ty)
}
fn binary_int_op(

View file

@ -212,9 +212,7 @@ impl<'tcx, Tag> MPlaceTy<'tcx, Tag> {
if self.layout.is_unsized() {
// We need to consult `meta` metadata
match self.layout.ty.kind {
ty::Slice(..) | ty::Str => {
return self.mplace.meta.unwrap_meta().to_machine_usize(cx);
}
ty::Slice(..) | ty::Str => self.mplace.meta.unwrap_meta().to_machine_usize(cx),
_ => bug!("len not supported on unsized type {:?}", self.layout.ty),
}
} else {
@ -920,6 +918,10 @@ where
// most likey we *are* running `typeck` right now. Investigate whether we can bail out
// on `typeck_tables().has_errors` at all const eval entry points.
debug!("Size mismatch when transmuting!\nsrc: {:#?}\ndest: {:#?}", src, dest);
self.tcx.sess.delay_span_bug(
self.tcx.span,
"size-changing transmute, should have been caught by transmute checking",
);
throw_inval!(TransmuteSizeDiff(src.layout.ty, dest.layout.ty));
}
// Unsized copies rely on interpreting `src.meta` with `dest.layout`, we want

View file

@ -240,7 +240,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
match instance.def {
ty::InstanceDef::Intrinsic(..) => {
assert!(caller_abi == Abi::RustIntrinsic || caller_abi == Abi::PlatformIntrinsic);
return M::call_intrinsic(self, span, instance, args, ret, unwind);
M::call_intrinsic(self, span, instance, args, ret, unwind)
}
ty::InstanceDef::VtableShim(..)
| ty::InstanceDef::ReifyShim(..)

View file

@ -751,7 +751,7 @@ fn should_monomorphize_locally<'tcx>(tcx: TyCtxt<'tcx>, instance: &Instance<'tcx
bug!("cannot create local mono-item for {:?}", def_id)
}
return true;
true
}
/// For a given pair of source and target type that occur in an unsizing coercion,

View file

@ -32,6 +32,7 @@ impl<'tcx> MirPass<'tcx> for CleanupNonCodegenStatements {
fn run_pass(&self, tcx: TyCtxt<'tcx>, _source: MirSource<'tcx>, body: &mut BodyAndCache<'tcx>) {
let mut delete = DeleteNonCodegenStatements { tcx };
delete.visit_body(body);
body.user_type_annotations.raw.clear();
}
}

View file

@ -483,7 +483,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
err.span_label(source_info.span, format!("{:?}", panic));
err.emit()
});
return None;
None
}
fn check_unary_op(

View file

@ -1,63 +0,0 @@
//! This pass erases all early-bound regions from the types occurring in the MIR.
//! We want to do this once just before codegen, so codegen does not have to take
//! care erasing regions all over the place.
//! N.B., we do _not_ erase regions of statements that are relevant for
//! "types-as-contracts"-validation, namely, `AcquireValid` and `ReleaseValid`.
use crate::transform::{MirPass, MirSource};
use rustc::mir::visit::{MutVisitor, TyContext};
use rustc::mir::*;
use rustc::ty::subst::SubstsRef;
use rustc::ty::{self, Ty, TyCtxt};
struct EraseRegionsVisitor<'tcx> {
tcx: TyCtxt<'tcx>,
}
impl EraseRegionsVisitor<'tcx> {
pub fn new(tcx: TyCtxt<'tcx>) -> Self {
EraseRegionsVisitor { tcx }
}
}
impl MutVisitor<'tcx> for EraseRegionsVisitor<'tcx> {
fn tcx(&self) -> TyCtxt<'tcx> {
self.tcx
}
fn visit_ty(&mut self, ty: &mut Ty<'tcx>, _: TyContext) {
*ty = self.tcx.erase_regions(ty);
}
fn visit_region(&mut self, region: &mut ty::Region<'tcx>, _: Location) {
*region = self.tcx.lifetimes.re_erased;
}
fn visit_const(&mut self, constant: &mut &'tcx ty::Const<'tcx>, _: Location) {
*constant = self.tcx.erase_regions(constant);
}
fn visit_substs(&mut self, substs: &mut SubstsRef<'tcx>, _: Location) {
*substs = self.tcx.erase_regions(substs);
}
fn process_projection_elem(&mut self, elem: &PlaceElem<'tcx>) -> Option<PlaceElem<'tcx>> {
if let PlaceElem::Field(field, ty) = elem {
let new_ty = self.tcx.erase_regions(ty);
if new_ty != *ty {
return Some(PlaceElem::Field(*field, new_ty));
}
}
None
}
}
pub struct EraseRegions;
impl<'tcx> MirPass<'tcx> for EraseRegions {
fn run_pass(&self, tcx: TyCtxt<'tcx>, _: MirSource<'tcx>, body: &mut BodyAndCache<'tcx>) {
EraseRegionsVisitor::new(tcx).visit_body(body);
}
}

View file

@ -357,18 +357,11 @@ impl MutVisitor<'tcx> for TransformVisitor<'tcx> {
}
}
fn make_generator_state_argument_indirect<'tcx>(
tcx: TyCtxt<'tcx>,
def_id: DefId,
body: &mut BodyAndCache<'tcx>,
) {
fn make_generator_state_argument_indirect<'tcx>(tcx: TyCtxt<'tcx>, body: &mut BodyAndCache<'tcx>) {
let gen_ty = body.local_decls.raw[1].ty;
let region = ty::ReFree(ty::FreeRegion { scope: def_id, bound_region: ty::BoundRegion::BrEnv });
let region = tcx.mk_region(region);
let ref_gen_ty = tcx.mk_ref(region, ty::TypeAndMut { ty: gen_ty, mutbl: hir::Mutability::Mut });
let ref_gen_ty =
tcx.mk_ref(tcx.lifetimes.re_erased, ty::TypeAndMut { ty: gen_ty, mutbl: Mutability::Mut });
// Replace the by value generator argument
body.local_decls.raw[1].ty = ref_gen_ty;
@ -874,7 +867,6 @@ fn elaborate_generator_drops<'tcx>(
fn create_generator_drop_shim<'tcx>(
tcx: TyCtxt<'tcx>,
transform: &TransformVisitor<'tcx>,
def_id: DefId,
source: MirSource<'tcx>,
gen_ty: Ty<'tcx>,
body: &mut BodyAndCache<'tcx>,
@ -912,7 +904,7 @@ fn create_generator_drop_shim<'tcx>(
local_info: LocalInfo::Other,
};
make_generator_state_argument_indirect(tcx, def_id, &mut body);
make_generator_state_argument_indirect(tcx, &mut body);
// Change the generator argument from &mut to *mut
body.local_decls[SELF_ARG] = LocalDecl {
@ -1047,7 +1039,6 @@ fn can_unwind<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>) -> bool {
fn create_generator_resume_function<'tcx>(
tcx: TyCtxt<'tcx>,
transform: TransformVisitor<'tcx>,
def_id: DefId,
source: MirSource<'tcx>,
body: &mut BodyAndCache<'tcx>,
can_return: bool,
@ -1112,7 +1103,7 @@ fn create_generator_resume_function<'tcx>(
insert_switch(body, cases, &transform, TerminatorKind::Unreachable);
make_generator_state_argument_indirect(tcx, def_id, body);
make_generator_state_argument_indirect(tcx, body);
make_generator_state_argument_pinned(tcx, body);
no_landing_pads(tcx, body);
@ -1332,11 +1323,11 @@ impl<'tcx> MirPass<'tcx> for StateTransform {
// Create a copy of our MIR and use it to create the drop shim for the generator
let drop_shim =
create_generator_drop_shim(tcx, &transform, def_id, source, gen_ty, body, drop_clean);
create_generator_drop_shim(tcx, &transform, source, gen_ty, body, drop_clean);
body.generator_drop = Some(box drop_shim);
// Create the Generator::resume function
create_generator_resume_function(tcx, transform, def_id, source, body, can_return);
create_generator_resume_function(tcx, transform, source, body, can_return);
}
}

View file

@ -22,7 +22,6 @@ pub mod copy_prop;
pub mod deaggregator;
pub mod dump_mir;
pub mod elaborate_drops;
pub mod erase_regions;
pub mod generator;
pub mod inline;
pub mod instcombine;
@ -296,8 +295,6 @@ fn run_optimization_passes<'tcx>(
&simplify::SimplifyCfg::new("elaborate-drops"),
// No lifetime analysis based on borrowing can be done from here on out.
// From here on out, regions are gone.
&erase_regions::EraseRegions,
// Optimizations begin.
&unreachable_prop::UnreachablePropagation,
&uninhabited_enum_branching::UninhabitedEnumBranching,
@ -341,6 +338,9 @@ fn optimized_mir(tcx: TyCtxt<'_>, def_id: DefId) -> &BodyAndCache<'_> {
let mut body = body.steal();
run_optimization_passes(tcx, &mut body, def_id, None);
body.ensure_predecessors();
debug_assert!(!body.has_free_regions(), "Free regions in optimized MIR");
tcx.arena.alloc(body)
}
@ -358,5 +358,7 @@ fn promoted_mir(tcx: TyCtxt<'_>, def_id: DefId) -> &IndexVec<Promoted, BodyAndCa
body.ensure_predecessors();
}
debug_assert!(!promoted.has_free_regions(), "Free regions in promoted MIR");
tcx.intern_promoted(promoted)
}

View file

@ -913,7 +913,13 @@ impl<'a, 'tcx> Promoter<'a, 'tcx> {
ty,
val: ty::ConstKind::Unevaluated(
def_id,
InternalSubsts::identity_for_item(tcx, def_id),
InternalSubsts::for_item(tcx, def_id, |param, _| {
if let ty::GenericParamDefKind::Lifetime = param.kind {
tcx.lifetimes.re_erased.into()
} else {
tcx.mk_param_from_def(param)
}
}),
Some(promoted_id),
),
}),

View file

@ -6,7 +6,7 @@ use rustc::middle::lang_items;
use rustc::middle::region;
use rustc::mir::*;
use rustc::ty::subst::Subst;
use rustc::ty::{self, Ty, TyCtxt};
use rustc::ty::{self, Ty, TyCtxt, TypeFoldable};
use rustc_attr::{self as attr, UnwindAttr};
use rustc_hir as hir;
use rustc_hir::def_id::DefId;
@ -43,8 +43,7 @@ fn mir_build(tcx: TyCtxt<'_>, def_id: DefId) -> BodyAndCache<'_> {
..
})
| Node::TraitItem(hir::TraitItem {
kind:
hir::TraitItemKind::Fn(hir::FnSig { decl, .. }, hir::TraitFn::Provided(body_id)),
kind: hir::TraitItemKind::Fn(hir::FnSig { decl, .. }, hir::TraitFn::Provided(body_id)),
..
}) => (*body_id, decl.output.span()),
Node::Item(hir::Item { kind: hir::ItemKind::Static(ty, _, body_id), .. })
@ -128,12 +127,8 @@ fn mir_build(tcx: TyCtxt<'_>, def_id: DefId) -> BodyAndCache<'_> {
let ty = if fn_sig.c_variadic && index == fn_sig.inputs().len() {
let va_list_did =
tcx.require_lang_item(lang_items::VaListTypeLangItem, Some(arg.span));
let region = tcx.mk_region(ty::ReScope(region::Scope {
id: body.value.hir_id.local_id,
data: region::ScopeData::CallSite,
}));
tcx.type_of(va_list_did).subst(tcx, &[region.into()])
tcx.type_of(va_list_did).subst(tcx, &[tcx.lifetimes.re_erased.into()])
} else {
fn_sig.inputs()[index]
};
@ -189,6 +184,20 @@ fn mir_build(tcx: TyCtxt<'_>, def_id: DefId) -> BodyAndCache<'_> {
let mut body = BodyAndCache::new(body);
body.ensure_predecessors();
// The borrow checker will replace all the regions here with its own
// inference variables. There's no point having non-erased regions here.
// The exception is `body.user_type_annotations`, which is used unmodified
// by borrow checking.
debug_assert!(
!(body.local_decls.has_free_regions()
|| body.basic_blocks().has_free_regions()
|| body.var_debug_info.has_free_regions()
|| body.yield_ty.has_free_regions()),
"Unexpected free regions in MIR: {:?}",
body,
);
body
})
}
@ -209,7 +218,7 @@ fn liberated_closure_env_ty(
};
let closure_env_ty = tcx.closure_env_ty(closure_def_id, closure_substs).unwrap();
tcx.liberate_late_bound_regions(closure_def_id, &closure_env_ty)
tcx.erase_late_bound_regions(&closure_env_ty)
}
#[derive(Debug, PartialEq, Eq)]
@ -368,7 +377,7 @@ impl BlockContext {
}
}
return None;
None
}
/// Looks at the topmost frame on the BlockContext and reports

View file

@ -98,7 +98,7 @@ fn mirror_stmts<'a, 'tcx>(
}
}
}
return result;
result
}
crate fn to_expr_ref<'a, 'tcx>(

View file

@ -320,7 +320,7 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke
going with stringified version"
);
}
return tokens_for_real;
tokens_for_real
}
fn prepend_attrs(

View file

@ -996,7 +996,7 @@ impl<'a> Parser<'a> {
let expr = self.mk_expr(lo.to(self.prev_token.span), ExprKind::Lit(literal), attrs);
self.maybe_recover_from_bad_qpath(expr, true)
}
None => return Err(self.expected_expression_found()),
None => Err(self.expected_expression_found()),
}
}
@ -1374,6 +1374,7 @@ impl<'a> Parser<'a> {
}
/// Matches `'-' lit | lit` (cf. `ast_validation::AstValidator::check_expr_within_pat`).
/// Keep this in sync with `Token::can_begin_literal_maybe_minus`.
pub fn parse_literal_maybe_minus(&mut self) -> PResult<'a, P<Expr>> {
maybe_whole_expr!(self);
@ -1713,7 +1714,7 @@ impl<'a> Parser<'a> {
}
let hi = self.token.span;
self.bump();
return Ok(self.mk_expr(lo.to(hi), ExprKind::Match(scrutinee, arms), attrs));
Ok(self.mk_expr(lo.to(hi), ExprKind::Match(scrutinee, arms), attrs))
}
pub(super) fn parse_arm(&mut self) -> PResult<'a, Arm> {

View file

@ -314,7 +314,7 @@ impl<'a> Parser<'a> {
" struct ".into(),
Applicability::MaybeIncorrect, // speculative
);
return Err(err);
Err(err)
} else if self.look_ahead(1, |t| *t == token::OpenDelim(token::Paren)) {
let ident = self.parse_ident().unwrap();
self.bump(); // `(`
@ -362,7 +362,7 @@ impl<'a> Parser<'a> {
);
}
}
return Err(err);
Err(err)
} else if self.look_ahead(1, |t| *t == token::Lt) {
let ident = self.parse_ident().unwrap();
self.eat_to_tokens(&[&token::Gt]);
@ -384,7 +384,7 @@ impl<'a> Parser<'a> {
Applicability::MachineApplicable,
);
}
return Err(err);
Err(err)
} else {
Ok(())
}
@ -910,7 +910,7 @@ impl<'a> Parser<'a> {
let span = self.sess.source_map().def_span(span);
let msg = format!("{} is not supported in {}", kind.descr(), ctx);
self.struct_span_err(span, &msg).emit();
return None;
None
}
fn error_on_foreign_const(&self, span: Span, ident: Ident) {
@ -1509,7 +1509,7 @@ impl<'a> Parser<'a> {
})
// `extern ABI fn`
|| self.check_keyword(kw::Extern)
&& self.look_ahead(1, |t| t.can_begin_literal_or_bool())
&& self.look_ahead(1, |t| t.can_begin_literal_maybe_minus())
&& self.look_ahead(2, |t| t.is_keyword(kw::Fn))
}

View file

@ -696,7 +696,7 @@ impl<'a> Parser<'a> {
self.look_ahead(dist, |t| {
t.is_path_start() // e.g. `MY_CONST`;
|| t.kind == token::Dot // e.g. `.5` for recovery;
|| t.can_begin_literal_or_bool() // e.g. `42`.
|| t.can_begin_literal_maybe_minus() // e.g. `42`.
|| t.is_whole_expr()
})
}
@ -918,7 +918,7 @@ impl<'a> Parser<'a> {
}
err.emit();
}
return Ok((fields, etc));
Ok((fields, etc))
}
/// Recover on `...` as if it were `..` to avoid further errors.

View file

@ -278,7 +278,7 @@ impl<'a> Parser<'a> {
_ => {}
}
e.span_label(sp, "expected `{`");
return Err(e);
Err(e)
}
/// Parses a block. Inner attributes are allowed.

View file

@ -864,7 +864,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> {
first_merge,
any_changed
);
return any_changed;
any_changed
}
// Indicates that a local variable was *defined*; we know that no

View file

@ -222,7 +222,7 @@ impl<'a, 'hir> CheckLoopVisitor<'a, 'hir> {
return true;
}
}
return false;
false
}
fn emit_unlabled_cf_in_while_condition(&mut self, span: Span, cf_type: &str) {
struct_span_err!(

View file

@ -30,9 +30,7 @@ fn item_might_be_inlined(tcx: TyCtxt<'tcx>, item: &hir::Item<'_>, attrs: Codegen
}
match item.kind {
hir::ItemKind::Fn(ref sig, ..) if sig.header.is_const() => {
return true;
}
hir::ItemKind::Fn(ref sig, ..) if sig.header.is_const() => true,
hir::ItemKind::Impl { .. } | hir::ItemKind::Fn(..) => {
let generics = tcx.generics_of(tcx.hir().local_def_id(item.hir_id));
generics.requires_monomorphization(tcx)

View file

@ -465,7 +465,7 @@ fn new_index(tcx: TyCtxt<'tcx>) -> Index<'tcx> {
|v| intravisit::walk_crate(v, krate),
);
}
return index;
index
}
/// Cross-references the feature names of unstable APIs with enabled

View file

@ -1423,7 +1423,7 @@ impl<'a, 'tcx> ObsoleteVisiblePrivateTypesVisitor<'a, 'tcx> {
Some(_) | None => false,
}
} else {
return false;
false
}
}
@ -1837,7 +1837,7 @@ impl SearchInterfaceForPrivateItemsVisitor<'tcx> {
&& self.tcx.is_private_dep(item_id.krate);
log::debug!("leaks_private_dep(item_id={:?})={}", item_id, ret);
return ret;
ret
}
}

View file

@ -200,7 +200,7 @@ impl<'a> visit::Visitor<'a> for DefCollector<'a> {
fn visit_pat(&mut self, pat: &'a Pat) {
match pat.kind {
PatKind::MacCall(..) => return self.visit_macro_invoc(pat.id),
PatKind::MacCall(..) => self.visit_macro_invoc(pat.id),
_ => visit::walk_pat(self, pat),
}
}

View file

@ -1108,7 +1108,7 @@ impl<'a, 'b> ImportResolver<'a, 'b> {
match binding.kind {
// Never suggest the name that has binding error
// i.e., the name that cannot be previously resolved
NameBindingKind::Res(Res::Err, _) => return None,
NameBindingKind::Res(Res::Err, _) => None,
_ => Some(&i.name),
}
}

View file

@ -380,7 +380,7 @@ impl<'a> LateResolutionVisitor<'a, '_, '_> {
_ => (),
}
};
return has_self_arg;
has_self_arg
}
fn followed_by_brace(&self, span: Span) -> (bool, Option<(Span, String)>) {
@ -430,7 +430,7 @@ impl<'a> LateResolutionVisitor<'a, '_, '_> {
break;
}
}
return (followed_by_brace, closing_brace);
(followed_by_brace, closing_brace)
}
/// Provides context-dependent help for errors reported by the `smart_resolve_path_fragment`

View file

@ -133,7 +133,7 @@ impl RegionExt for Region {
/// that it corresponds to.
///
/// FIXME. This struct gets converted to a `ResolveLifetimes` for
/// actual use. It has the same data, but indexed by `DefIndex`. This
/// actual use. It has the same data, but indexed by `LocalDefId`. This
/// is silly.
#[derive(Default)]
struct NamedRegionMap {

View file

@ -83,7 +83,7 @@ fn sub_namespace_match(candidate: Option<MacroKind>, requirement: Option<MacroKi
// line-breaks and is slow.
fn fast_print_path(path: &ast::Path) -> Symbol {
if path.segments.len() == 1 {
return path.segments[0].ident.name;
path.segments[0].ident.name
} else {
let mut path_str = String::with_capacity(64);
for (i, segment) in path.segments.iter().enumerate() {

View file

@ -534,7 +534,7 @@ impl<'l, 'tcx> SaveContext<'l, 'tcx> {
let variant = &def.non_enum_variant();
filter!(self.span_utils, ident.span);
let span = self.span_from_span(ident.span);
return Some(Data::RefData(Ref {
Some(Data::RefData(Ref {
kind: RefKind::Variable,
span,
ref_id: self
@ -542,7 +542,7 @@ impl<'l, 'tcx> SaveContext<'l, 'tcx> {
.find_field_index(ident, variant)
.map(|index| id_from_def_id(variant.fields[index].did))
.unwrap_or_else(|| null_id()),
}));
}))
}
ty::Tuple(..) => None,
_ => {

View file

@ -1140,7 +1140,7 @@ pub fn parse_error_format(
_ => {}
}
return error_format;
error_format
}
fn parse_crate_edition(matches: &getopts::Matches) -> Edition {

View file

@ -99,10 +99,6 @@ impl<'sm> CachingSourceMapView<'sm> {
cache_entry.line_end = line_bounds.1;
cache_entry.time_stamp = self.time_stamp;
return Some((
cache_entry.file.clone(),
cache_entry.line_number,
pos - cache_entry.line_start,
));
Some((cache_entry.file.clone(), cache_entry.line_number, pos - cache_entry.line_start))
}
}

Some files were not shown because too many files have changed in this diff Show more