1
Fork 0

Auto merge of #41773 - frewsxcv:rollup, r=frewsxcv

Rollup of 9 pull requests

- Successful merges: #41064, #41307, #41512, #41582, #41678, #41722, #41734, #41761, #41763
- Failed merges:
This commit is contained in:
bors 2017-05-05 23:20:32 +00:00
commit 42a4f373c9
75 changed files with 1495 additions and 955 deletions

@ -1 +1 @@
Subproject commit 616b98444ff4eb5260deee95ee3e090dfd98b947
Subproject commit 6fa139b1630a9bb95dcd60cfc90aff9c19e54580

View file

@ -277,8 +277,7 @@ impl<T> Arc<T> {
atomic::fence(Acquire);
unsafe {
let ptr = *this.ptr;
let elem = ptr::read(&(*ptr).data);
let elem = ptr::read(&this.ptr.as_ref().data);
// Make a weak pointer to clean up the implicit strong-weak reference
let _weak = Weak { ptr: this.ptr };
@ -306,7 +305,7 @@ impl<T> Arc<T> {
/// ```
#[stable(feature = "rc_raw", since = "1.17.0")]
pub fn into_raw(this: Self) -> *const T {
let ptr = unsafe { &(**this.ptr).data as *const _ };
let ptr: *const T = &*this;
mem::forget(this);
ptr
}
@ -345,7 +344,7 @@ impl<T> Arc<T> {
// `data` field from the pointer.
let ptr = (ptr as *const u8).offset(-offset_of!(ArcInner<T>, data));
Arc {
ptr: Shared::new(ptr as *const _),
ptr: Shared::new(ptr as *mut u8 as *mut _),
}
}
}
@ -452,17 +451,17 @@ impl<T: ?Sized> Arc<T> {
// `ArcInner` structure itself is `Sync` because the inner data is
// `Sync` as well, so we're ok loaning out an immutable pointer to these
// contents.
unsafe { &**self.ptr }
unsafe { self.ptr.as_ref() }
}
// Non-inlined part of `drop`.
#[inline(never)]
unsafe fn drop_slow(&mut self) {
let ptr = self.ptr.as_mut_ptr();
let ptr = self.ptr.as_ptr();
// Destroy the data at this time, even though we may not free the box
// allocation itself (there may still be weak pointers lying around).
ptr::drop_in_place(&mut (*ptr).data);
ptr::drop_in_place(&mut self.ptr.as_mut().data);
if self.inner().weak.fetch_sub(1, Release) == 1 {
atomic::fence(Acquire);
@ -488,9 +487,7 @@ impl<T: ?Sized> Arc<T> {
/// assert!(!Arc::ptr_eq(&five, &other_five));
/// ```
pub fn ptr_eq(this: &Self, other: &Self) -> bool {
let this_ptr: *const ArcInner<T> = *this.ptr;
let other_ptr: *const ArcInner<T> = *other.ptr;
this_ptr == other_ptr
this.ptr.as_ptr() == other.ptr.as_ptr()
}
}
@ -621,7 +618,7 @@ impl<T: Clone> Arc<T> {
// here (due to zeroing) because data is no longer accessed by
// other threads (due to there being no more strong refs at this
// point).
let mut swap = Arc::new(ptr::read(&(**weak.ptr).data));
let mut swap = Arc::new(ptr::read(&weak.ptr.as_ref().data));
mem::swap(this, &mut swap);
mem::forget(swap);
}
@ -634,8 +631,7 @@ impl<T: Clone> Arc<T> {
// As with `get_mut()`, the unsafety is ok because our reference was
// either unique to begin with, or became one upon cloning the contents.
unsafe {
let inner = &mut *this.ptr.as_mut_ptr();
&mut inner.data
&mut this.ptr.as_mut().data
}
}
}
@ -677,8 +673,7 @@ impl<T: ?Sized> Arc<T> {
// the Arc itself to be `mut`, so we're returning the only possible
// reference to the inner data.
unsafe {
let inner = &mut *this.ptr.as_mut_ptr();
Some(&mut inner.data)
Some(&mut this.ptr.as_mut().data)
}
} else {
None
@ -878,7 +873,7 @@ impl<T: ?Sized> Weak<T> {
#[inline]
fn inner(&self) -> &ArcInner<T> {
// See comments above for why this is "safe"
unsafe { &**self.ptr }
unsafe { self.ptr.as_ref() }
}
}
@ -962,7 +957,7 @@ impl<T: ?Sized> Drop for Weak<T> {
/// assert!(other_weak_foo.upgrade().is_none());
/// ```
fn drop(&mut self) {
let ptr = *self.ptr;
let ptr = self.ptr.as_ptr();
// If we find out that we were the last weak pointer, then its time to
// deallocate the data entirely. See the discussion in Arc::drop() about
@ -1143,7 +1138,7 @@ impl<T: ?Sized + fmt::Debug> fmt::Debug for Arc<T> {
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized> fmt::Pointer for Arc<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Pointer::fmt(&*self.ptr, f)
fmt::Pointer::fmt(&self.ptr, f)
}
}

View file

@ -156,7 +156,7 @@ fn make_place<T>() -> IntermediateBox<T> {
let align = mem::align_of::<T>();
let p = if size == 0 {
heap::EMPTY as *mut u8
mem::align_of::<T>() as *mut u8
} else {
let p = unsafe { heap::allocate(size, align) };
if p.is_null() {

View file

@ -138,7 +138,9 @@ pub fn usable_size(size: usize, align: usize) -> usize {
///
/// This preserves the non-null invariant for types like `Box<T>`. The address
/// may overlap with non-zero-size memory allocations.
pub const EMPTY: *mut () = 0x1 as *mut ();
#[rustc_deprecated(since = "1.19", reason = "Use Unique/Shared::empty() instead")]
#[unstable(feature = "heap_api", issue = "27700")]
pub const EMPTY: *mut () = 1 as *mut ();
/// The allocator for unique pointers.
// This function must not unwind. If it does, MIR trans will fail.
@ -147,7 +149,7 @@ pub const EMPTY: *mut () = 0x1 as *mut ();
#[inline]
unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
if size == 0 {
EMPTY as *mut u8
align as *mut u8
} else {
let ptr = allocate(size, align);
if ptr.is_null() {

View file

@ -22,13 +22,13 @@ use core::cmp;
/// involved. This type is excellent for building your own data structures like Vec and VecDeque.
/// In particular:
///
/// * Produces heap::EMPTY on zero-sized types
/// * Produces heap::EMPTY on zero-length allocations
/// * Produces Unique::empty() on zero-sized types
/// * Produces Unique::empty() on zero-length allocations
/// * Catches all overflows in capacity computations (promotes them to "capacity overflow" panics)
/// * Guards against 32-bit systems allocating more than isize::MAX bytes
/// * Guards against overflowing your length
/// * Aborts on OOM
/// * Avoids freeing heap::EMPTY
/// * Avoids freeing Unique::empty()
/// * Contains a ptr::Unique and thus endows the user with all related benefits
///
/// This type does not in anyway inspect the memory that it manages. When dropped it *will*
@ -55,15 +55,13 @@ impl<T> RawVec<T> {
/// it makes a RawVec with capacity `usize::MAX`. Useful for implementing
/// delayed allocation.
pub fn new() -> Self {
unsafe {
// !0 is usize::MAX. This branch should be stripped at compile time.
let cap = if mem::size_of::<T>() == 0 { !0 } else { 0 };
// !0 is usize::MAX. This branch should be stripped at compile time.
let cap = if mem::size_of::<T>() == 0 { !0 } else { 0 };
// heap::EMPTY doubles as "unallocated" and "zero-sized allocation"
RawVec {
ptr: Unique::new(heap::EMPTY as *mut T),
cap: cap,
}
// Unique::empty() doubles as "unallocated" and "zero-sized allocation"
RawVec {
ptr: Unique::empty(),
cap: cap,
}
}
@ -101,7 +99,7 @@ impl<T> RawVec<T> {
// handles ZSTs and `cap = 0` alike
let ptr = if alloc_size == 0 {
heap::EMPTY as *mut u8
mem::align_of::<T>() as *mut u8
} else {
let align = mem::align_of::<T>();
let ptr = if zeroed {
@ -148,10 +146,10 @@ impl<T> RawVec<T> {
impl<T> RawVec<T> {
/// Gets a raw pointer to the start of the allocation. Note that this is
/// heap::EMPTY if `cap = 0` or T is zero-sized. In the former case, you must
/// Unique::empty() if `cap = 0` or T is zero-sized. In the former case, you must
/// be careful.
pub fn ptr(&self) -> *mut T {
*self.ptr
self.ptr.as_ptr()
}
/// Gets the capacity of the allocation.
@ -563,7 +561,7 @@ unsafe impl<#[may_dangle] T> Drop for RawVec<T> {
let num_bytes = elem_size * self.cap;
unsafe {
heap::deallocate(*self.ptr as *mut _, num_bytes, align);
heap::deallocate(self.ptr() as *mut u8, num_bytes, align);
}
}
}

View file

@ -230,7 +230,7 @@ use core::cell::Cell;
use core::cmp::Ordering;
use core::fmt;
use core::hash::{Hash, Hasher};
use core::intrinsics::{abort, assume};
use core::intrinsics::abort;
use core::marker;
use core::marker::Unsize;
use core::mem::{self, align_of_val, forget, size_of, size_of_val, uninitialized};
@ -358,7 +358,7 @@ impl<T> Rc<T> {
/// ```
#[stable(feature = "rc_raw", since = "1.17.0")]
pub fn into_raw(this: Self) -> *const T {
let ptr = unsafe { &mut (*this.ptr.as_mut_ptr()).value as *const _ };
let ptr: *const T = &*this;
mem::forget(this);
ptr
}
@ -395,7 +395,11 @@ impl<T> Rc<T> {
pub unsafe fn from_raw(ptr: *const T) -> Self {
// To find the corresponding pointer to the `RcBox` we need to subtract the offset of the
// `value` field from the pointer.
Rc { ptr: Shared::new((ptr as *const u8).offset(-offset_of!(RcBox<T>, value)) as *const _) }
let ptr = (ptr as *const u8).offset(-offset_of!(RcBox<T>, value));
Rc {
ptr: Shared::new(ptr as *mut u8 as *mut _)
}
}
}
@ -451,7 +455,7 @@ impl<T> Rc<[T]> {
// Free the original allocation without freeing its (moved) contents.
box_free(Box::into_raw(value));
Rc { ptr: Shared::new(ptr as *const _) }
Rc { ptr: Shared::new(ptr as *mut _) }
}
}
}
@ -553,8 +557,9 @@ impl<T: ?Sized> Rc<T> {
#[stable(feature = "rc_unique", since = "1.4.0")]
pub fn get_mut(this: &mut Self) -> Option<&mut T> {
if Rc::is_unique(this) {
let inner = unsafe { &mut *this.ptr.as_mut_ptr() };
Some(&mut inner.value)
unsafe {
Some(&mut this.ptr.as_mut().value)
}
} else {
None
}
@ -578,9 +583,7 @@ impl<T: ?Sized> Rc<T> {
/// assert!(!Rc::ptr_eq(&five, &other_five));
/// ```
pub fn ptr_eq(this: &Self, other: &Self) -> bool {
let this_ptr: *const RcBox<T> = *this.ptr;
let other_ptr: *const RcBox<T> = *other.ptr;
this_ptr == other_ptr
this.ptr.as_ptr() == other.ptr.as_ptr()
}
}
@ -623,7 +626,7 @@ impl<T: Clone> Rc<T> {
} else if Rc::weak_count(this) != 0 {
// Can just steal the data, all that's left is Weaks
unsafe {
let mut swap = Rc::new(ptr::read(&(**this.ptr).value));
let mut swap = Rc::new(ptr::read(&this.ptr.as_ref().value));
mem::swap(this, &mut swap);
swap.dec_strong();
// Remove implicit strong-weak ref (no need to craft a fake
@ -637,8 +640,9 @@ impl<T: Clone> Rc<T> {
// reference count is guaranteed to be 1 at this point, and we required
// the `Rc<T>` itself to be `mut`, so we're returning the only possible
// reference to the inner value.
let inner = unsafe { &mut *this.ptr.as_mut_ptr() };
&mut inner.value
unsafe {
&mut this.ptr.as_mut().value
}
}
}
@ -683,12 +687,12 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc<T> {
/// ```
fn drop(&mut self) {
unsafe {
let ptr = self.ptr.as_mut_ptr();
let ptr = self.ptr.as_ptr();
self.dec_strong();
if self.strong() == 0 {
// destroy the contained object
ptr::drop_in_place(&mut (*ptr).value);
ptr::drop_in_place(self.ptr.as_mut());
// remove the implicit "strong weak" pointer now that we've
// destroyed the contents.
@ -925,7 +929,7 @@ impl<T: ?Sized + fmt::Debug> fmt::Debug for Rc<T> {
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized> fmt::Pointer for Rc<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Pointer::fmt(&*self.ptr, f)
fmt::Pointer::fmt(&self.ptr, f)
}
}
@ -1067,7 +1071,7 @@ impl<T: ?Sized> Drop for Weak<T> {
/// ```
fn drop(&mut self) {
unsafe {
let ptr = *self.ptr;
let ptr = self.ptr.as_ptr();
self.dec_weak();
// the weak count starts at 1, and will only go to zero if all
@ -1175,12 +1179,7 @@ impl<T: ?Sized> RcBoxPtr<T> for Rc<T> {
#[inline(always)]
fn inner(&self) -> &RcBox<T> {
unsafe {
// Safe to assume this here, as if it weren't true, we'd be breaking
// the contract anyway.
// This allows the null check to be elided in the destructor if we
// manipulated the reference count in the same function.
assume(!(*(&self.ptr as *const _ as *const *const ())).is_null());
&(**self.ptr)
self.ptr.as_ref()
}
}
}
@ -1189,12 +1188,7 @@ impl<T: ?Sized> RcBoxPtr<T> for Weak<T> {
#[inline(always)]
fn inner(&self) -> &RcBox<T> {
unsafe {
// Safe to assume this here, as if it weren't true, we'd be breaking
// the contract anyway.
// This allows the null check to be elided in the destructor if we
// manipulated the reference count in the same function.
assume(!(*(&self.ptr as *const _ as *const *const ())).is_null());
&(**self.ptr)
self.ptr.as_ref()
}
}
}

View file

@ -31,7 +31,6 @@
#![feature(alloc)]
#![feature(core_intrinsics)]
#![feature(dropck_eyepatch)]
#![feature(heap_api)]
#![feature(generic_param_attrs)]
#![feature(staged_api)]
#![cfg_attr(test, feature(test))]
@ -48,7 +47,6 @@ use std::mem;
use std::ptr;
use std::slice;
use alloc::heap;
use alloc::raw_vec::RawVec;
/// An arena that can hold objects of only one type.
@ -140,7 +138,7 @@ impl<T> TypedArena<T> {
unsafe {
if mem::size_of::<T>() == 0 {
self.ptr.set(intrinsics::arith_offset(self.ptr.get() as *mut u8, 1) as *mut T);
let ptr = heap::EMPTY as *mut T;
let ptr = mem::align_of::<T>() as *mut T;
// Don't drop the object. This `write` is equivalent to `forget`.
ptr::write(ptr, object);
&mut *ptr

View file

@ -152,12 +152,12 @@ impl<K, V> BoxedNode<K, V> {
}
unsafe fn from_ptr(ptr: NonZero<*const LeafNode<K, V>>) -> Self {
BoxedNode { ptr: Unique::new(*ptr as *mut LeafNode<K, V>) }
BoxedNode { ptr: Unique::new(ptr.get() as *mut LeafNode<K, V>) }
}
fn as_ptr(&self) -> NonZero<*const LeafNode<K, V>> {
unsafe {
NonZero::new(*self.ptr as *const LeafNode<K, V>)
NonZero::new(self.ptr.as_ptr())
}
}
}
@ -241,7 +241,7 @@ impl<K, V> Root<K, V> {
pub fn pop_level(&mut self) {
debug_assert!(self.height > 0);
let top = *self.node.ptr as *mut u8;
let top = self.node.ptr.as_ptr() as *mut u8;
self.node = unsafe {
BoxedNode::from_ptr(self.as_mut()
@ -308,7 +308,7 @@ unsafe impl<K: Send, V: Send, Type> Send
impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Internal> {
fn as_internal(&self) -> &InternalNode<K, V> {
unsafe {
&*(*self.node as *const InternalNode<K, V>)
&*(self.node.get() as *const InternalNode<K, V>)
}
}
}
@ -316,7 +316,7 @@ impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Internal> {
impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
fn as_internal_mut(&mut self) -> &mut InternalNode<K, V> {
unsafe {
&mut *(*self.node as *mut InternalNode<K, V>)
&mut *(self.node.get() as *mut InternalNode<K, V>)
}
}
}
@ -358,7 +358,7 @@ impl<BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
fn as_leaf(&self) -> &LeafNode<K, V> {
unsafe {
&**self.node
&*self.node.get()
}
}
@ -510,7 +510,7 @@ impl<'a, K, V, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
fn as_leaf_mut(&mut self) -> &mut LeafNode<K, V> {
unsafe {
&mut *(*self.node as *mut LeafNode<K, V>)
&mut *(self.node.get() as *mut LeafNode<K, V>)
}
}
@ -1253,13 +1253,13 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::
}
heap::deallocate(
*right_node.node as *mut u8,
right_node.node.get() as *mut u8,
mem::size_of::<InternalNode<K, V>>(),
mem::align_of::<InternalNode<K, V>>()
);
} else {
heap::deallocate(
*right_node.node as *mut u8,
right_node.node.get() as *mut u8,
mem::size_of::<LeafNode<K, V>>(),
mem::align_of::<LeafNode<K, V>>()
);

View file

@ -161,7 +161,7 @@ impl<T> LinkedList<T> {
match self.head {
None => self.tail = node,
Some(head) => (*head.as_mut_ptr()).prev = node,
Some(mut head) => head.as_mut().prev = node,
}
self.head = node;
@ -173,12 +173,12 @@ impl<T> LinkedList<T> {
#[inline]
fn pop_front_node(&mut self) -> Option<Box<Node<T>>> {
self.head.map(|node| unsafe {
let node = Box::from_raw(node.as_mut_ptr());
let node = Box::from_raw(node.as_ptr());
self.head = node.next;
match self.head {
None => self.tail = None,
Some(head) => (*head.as_mut_ptr()).prev = None,
Some(mut head) => head.as_mut().prev = None,
}
self.len -= 1;
@ -196,7 +196,7 @@ impl<T> LinkedList<T> {
match self.tail {
None => self.head = node,
Some(tail) => (*tail.as_mut_ptr()).next = node,
Some(mut tail) => tail.as_mut().next = node,
}
self.tail = node;
@ -208,12 +208,12 @@ impl<T> LinkedList<T> {
#[inline]
fn pop_back_node(&mut self) -> Option<Box<Node<T>>> {
self.tail.map(|node| unsafe {
let node = Box::from_raw(node.as_mut_ptr());
let node = Box::from_raw(node.as_ptr());
self.tail = node.prev;
match self.tail {
None => self.head = None,
Some(tail) => (*tail.as_mut_ptr()).next = None,
Some(mut tail) => tail.as_mut().next = None,
}
self.len -= 1;
@ -285,11 +285,11 @@ impl<T> LinkedList<T> {
pub fn append(&mut self, other: &mut Self) {
match self.tail {
None => mem::swap(self, other),
Some(tail) => {
if let Some(other_head) = other.head.take() {
Some(mut tail) => {
if let Some(mut other_head) = other.head.take() {
unsafe {
(*tail.as_mut_ptr()).next = Some(other_head);
(*other_head.as_mut_ptr()).prev = Some(tail);
tail.as_mut().next = Some(other_head);
other_head.as_mut().prev = Some(tail);
}
self.tail = other.tail.take();
@ -477,7 +477,9 @@ impl<T> LinkedList<T> {
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn front(&self) -> Option<&T> {
self.head.map(|node| unsafe { &(**node).element })
unsafe {
self.head.as_ref().map(|node| &node.as_ref().element)
}
}
/// Provides a mutable reference to the front element, or `None` if the list
@ -503,7 +505,9 @@ impl<T> LinkedList<T> {
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn front_mut(&mut self) -> Option<&mut T> {
self.head.map(|node| unsafe { &mut (*node.as_mut_ptr()).element })
unsafe {
self.head.as_mut().map(|node| &mut node.as_mut().element)
}
}
/// Provides a reference to the back element, or `None` if the list is
@ -523,7 +527,9 @@ impl<T> LinkedList<T> {
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn back(&self) -> Option<&T> {
self.tail.map(|node| unsafe { &(**node).element })
unsafe {
self.tail.as_ref().map(|node| &node.as_ref().element)
}
}
/// Provides a mutable reference to the back element, or `None` if the list
@ -549,7 +555,9 @@ impl<T> LinkedList<T> {
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn back_mut(&mut self) -> Option<&mut T> {
self.tail.map(|node| unsafe { &mut (*node.as_mut_ptr()).element })
unsafe {
self.tail.as_mut().map(|node| &mut node.as_mut().element)
}
}
/// Adds an element first in the list.
@ -694,9 +702,9 @@ impl<T> LinkedList<T> {
let second_part_head;
unsafe {
second_part_head = (*split_node.unwrap().as_mut_ptr()).next.take();
if let Some(head) = second_part_head {
(*head.as_mut_ptr()).prev = None;
second_part_head = split_node.unwrap().as_mut().next.take();
if let Some(mut head) = second_part_head {
head.as_mut().prev = None;
}
}
@ -788,7 +796,8 @@ impl<'a, T> Iterator for Iter<'a, T> {
None
} else {
self.head.map(|node| unsafe {
let node = &**node;
// Need an unbound lifetime to get 'a
let node = &*node.as_ptr();
self.len -= 1;
self.head = node.next;
&node.element
@ -810,7 +819,8 @@ impl<'a, T> DoubleEndedIterator for Iter<'a, T> {
None
} else {
self.tail.map(|node| unsafe {
let node = &**node;
// Need an unbound lifetime to get 'a
let node = &*node.as_ptr();
self.len -= 1;
self.tail = node.prev;
&node.element
@ -835,7 +845,8 @@ impl<'a, T> Iterator for IterMut<'a, T> {
None
} else {
self.head.map(|node| unsafe {
let node = &mut *node.as_mut_ptr();
// Need an unbound lifetime to get 'a
let node = &mut *node.as_ptr();
self.len -= 1;
self.head = node.next;
&mut node.element
@ -857,7 +868,8 @@ impl<'a, T> DoubleEndedIterator for IterMut<'a, T> {
None
} else {
self.tail.map(|node| unsafe {
let node = &mut *node.as_mut_ptr();
// Need an unbound lifetime to get 'a
let node = &mut *node.as_ptr();
self.len -= 1;
self.tail = node.prev;
&mut node.element
@ -903,8 +915,8 @@ impl<'a, T> IterMut<'a, T> {
pub fn insert_next(&mut self, element: T) {
match self.head {
None => self.list.push_back(element),
Some(head) => unsafe {
let prev = match (**head).prev {
Some(mut head) => unsafe {
let mut prev = match head.as_ref().prev {
None => return self.list.push_front(element),
Some(prev) => prev,
};
@ -915,8 +927,8 @@ impl<'a, T> IterMut<'a, T> {
element: element,
})));
(*prev.as_mut_ptr()).next = node;
(*head.as_mut_ptr()).prev = node;
prev.as_mut().next = node;
head.as_mut().prev = node;
self.list.len += 1;
},
@ -948,7 +960,9 @@ impl<'a, T> IterMut<'a, T> {
if self.len == 0 {
None
} else {
self.head.map(|node| unsafe { &mut (*node.as_mut_ptr()).element })
unsafe {
self.head.as_mut().map(|node| &mut node.as_mut().element)
}
}
}
}
@ -1276,21 +1290,21 @@ mod tests {
assert_eq!(0, list.len);
return;
}
Some(node) => node_ptr = &**node,
Some(node) => node_ptr = &*node.as_ptr(),
}
loop {
match (last_ptr, node_ptr.prev) {
(None, None) => {}
(None, _) => panic!("prev link for head"),
(Some(p), Some(pptr)) => {
assert_eq!(p as *const Node<T>, *pptr as *const Node<T>);
assert_eq!(p as *const Node<T>, pptr.as_ptr() as *const Node<T>);
}
_ => panic!("prev link is none, not good"),
}
match node_ptr.next {
Some(next) => {
last_ptr = Some(node_ptr);
node_ptr = &**next;
node_ptr = &*next.as_ptr();
len += 1;
}
None => {

View file

@ -67,7 +67,6 @@
#![stable(feature = "rust1", since = "1.0.0")]
use alloc::boxed::Box;
use alloc::heap::EMPTY;
use alloc::raw_vec::RawVec;
use borrow::ToOwned;
use borrow::Cow;
@ -1776,9 +1775,9 @@ impl<T> SpecExtend<T, IntoIter<T>> for Vec<T> {
// A common case is passing a vector into a function which immediately
// re-collects into a vector. We can short circuit this if the IntoIter
// has not been advanced at all.
if *iterator.buf == iterator.ptr as *mut T {
if iterator.buf.as_ptr() as *const _ == iterator.ptr {
unsafe {
let vec = Vec::from_raw_parts(*iterator.buf as *mut T,
let vec = Vec::from_raw_parts(iterator.buf.as_ptr(),
iterator.len(),
iterator.cap);
mem::forget(iterator);
@ -2192,7 +2191,8 @@ impl<T> Iterator for IntoIter<T> {
self.ptr = arith_offset(self.ptr as *const i8, 1) as *mut T;
// Use a non-null pointer value
Some(ptr::read(EMPTY as *mut T))
// (self.ptr might be null because of wrapping)
Some(ptr::read(1 as *mut T))
} else {
let old = self.ptr;
self.ptr = self.ptr.offset(1);
@ -2231,7 +2231,8 @@ impl<T> DoubleEndedIterator for IntoIter<T> {
self.end = arith_offset(self.end as *const i8, -1) as *mut T;
// Use a non-null pointer value
Some(ptr::read(EMPTY as *mut T))
// (self.end might be null because of wrapping)
Some(ptr::read(1 as *mut T))
} else {
self.end = self.end.offset(-1);
@ -2269,7 +2270,7 @@ unsafe impl<#[may_dangle] T> Drop for IntoIter<T> {
for _x in self.by_ref() {}
// RawVec handles deallocation
let _ = unsafe { RawVec::from_raw_parts(self.buf.as_mut_ptr(), self.cap) };
let _ = unsafe { RawVec::from_raw_parts(self.buf.as_ptr(), self.cap) };
}
}
@ -2334,7 +2335,7 @@ impl<'a, T> Drop for Drain<'a, T> {
if self.tail_len > 0 {
unsafe {
let source_vec = &mut *self.vec.as_mut_ptr();
let source_vec = self.vec.as_mut();
// memmove back untouched tail, update to new length
let start = source_vec.len();
let tail = self.tail_start;
@ -2456,8 +2457,7 @@ impl<'a, I: Iterator> Drop for Splice<'a, I> {
unsafe {
if self.drain.tail_len == 0 {
let vec = &mut *self.drain.vec.as_mut_ptr();
vec.extend(self.replace_with.by_ref());
self.drain.vec.as_mut().extend(self.replace_with.by_ref());
return
}
@ -2498,7 +2498,7 @@ impl<'a, T> Drain<'a, T> {
/// Fill that range as much as possible with new elements from the `replace_with` iterator.
/// Return whether we filled the entire range. (`replace_with.next()` didnt return `None`.)
unsafe fn fill<I: Iterator<Item=T>>(&mut self, replace_with: &mut I) -> bool {
let vec = &mut *self.vec.as_mut_ptr();
let vec = self.vec.as_mut();
let range_start = vec.len;
let range_end = self.tail_start;
let range_slice = slice::from_raw_parts_mut(
@ -2518,7 +2518,7 @@ impl<'a, T> Drain<'a, T> {
/// Make room for inserting more elements before the tail.
unsafe fn move_tail(&mut self, extra_capacity: usize) {
let vec = &mut *self.vec.as_mut_ptr();
let vec = self.vec.as_mut();
let used_capacity = self.tail_start + self.tail_len;
vec.buf.reserve(used_capacity, extra_capacity);

View file

@ -2160,7 +2160,7 @@ impl<'a, T: 'a> Drop for Drain<'a, T> {
fn drop(&mut self) {
for _ in self.by_ref() {}
let source_deque = unsafe { &mut *self.deque.as_mut_ptr() };
let source_deque = unsafe { self.deque.as_mut() };
// T = source_deque_tail; H = source_deque_head; t = drain_tail; h = drain_head
//

View file

@ -132,7 +132,6 @@
//! use std::cell::Cell;
//! use std::ptr::Shared;
//! use std::intrinsics::abort;
//! use std::intrinsics::assume;
//!
//! struct Rc<T: ?Sized> {
//! ptr: Shared<RcBox<T>>
@ -171,8 +170,7 @@
//! impl<T: ?Sized> RcBoxPtr<T> for Rc<T> {
//! fn inner(&self) -> &RcBox<T> {
//! unsafe {
//! assume(!(*(&self.ptr as *const _ as *const *const ())).is_null());
//! &(**self.ptr)
//! self.ptr.as_ref()
//! }
//! }
//! }

View file

@ -13,7 +13,7 @@
reason = "needs an RFC to flesh out the design",
issue = "27730")]
use ops::{CoerceUnsized, Deref};
use ops::CoerceUnsized;
/// Unsafe trait to indicate what types are usable with the NonZero struct
pub unsafe trait Zeroable {}
@ -46,15 +46,10 @@ impl<T: Zeroable> NonZero<T> {
pub const unsafe fn new(inner: T) -> NonZero<T> {
NonZero(inner)
}
}
impl<T: Zeroable> Deref for NonZero<T> {
type Target = T;
#[inline]
fn deref(&self) -> &T {
let NonZero(ref inner) = *self;
inner
/// Gets the inner value.
pub fn get(self) -> T {
self.0
}
}

View file

@ -17,7 +17,7 @@
#![stable(feature = "rust1", since = "1.0.0")]
use intrinsics;
use ops::{CoerceUnsized, Deref};
use ops::CoerceUnsized;
use fmt;
use hash;
use marker::{PhantomData, Unsize};
@ -957,13 +957,25 @@ impl<T: ?Sized> PartialOrd for *mut T {
}
/// A wrapper around a raw non-null `*mut T` that indicates that the possessor
/// of this wrapper owns the referent. This in turn implies that the
/// `Unique<T>` is `Send`/`Sync` if `T` is `Send`/`Sync`, unlike a raw
/// `*mut T` (which conveys no particular ownership semantics). It
/// also implies that the referent of the pointer should not be
/// modified without a unique path to the `Unique` reference. Useful
/// for building abstractions like `Vec<T>` or `Box<T>`, which
/// internally use raw pointers to manage the memory that they own.
/// of this wrapper owns the referent. Useful for building abstractions like
/// `Box<T>`, `Vec<T>`, `String`, and `HashMap<K, V>`.
///
/// Unlike `*mut T`, `Unique<T>` behaves "as if" it were an instance of `T`.
/// It implements `Send`/`Sync` if `T` is `Send`/`Sync`. It also implies
/// the kind of strong aliasing guarantees an instance of `T` can expect:
/// the referent of the pointer should not be modified without a unique path to
/// its owning Unique.
///
/// If you're uncertain of whether it's correct to use `Unique` for your purposes,
/// consider using `Shared`, which has weaker semantics.
///
/// Unlike `*mut T`, the pointer must always be non-null, even if the pointer
/// is never dereferenced. This is so that enums may use this forbidden value
/// as a discriminant -- `Option<Unique<T>>` has the same size as `Unique<T>`.
/// However the pointer may still dangle if it isn't dereferenced.
///
/// Unlike `*mut T`, `Unique<T>` is covariant over `T`. This should always be correct
/// for any type which upholds Unique's aliasing requirements.
#[allow(missing_debug_implementations)]
#[unstable(feature = "unique", reason = "needs an RFC to flesh out design",
issue = "27730")]
@ -991,6 +1003,20 @@ unsafe impl<T: Send + ?Sized> Send for Unique<T> { }
#[unstable(feature = "unique", issue = "27730")]
unsafe impl<T: Sync + ?Sized> Sync for Unique<T> { }
#[unstable(feature = "unique", issue = "27730")]
impl<T: Sized> Unique<T> {
/// Creates a new `Shared` that is dangling, but well-aligned.
///
/// This is useful for initializing types which lazily allocate, like
/// `Vec::new` does.
pub fn empty() -> Self {
unsafe {
let ptr = mem::align_of::<T>() as *mut T;
Unique::new(ptr)
}
}
}
#[unstable(feature = "unique", issue = "27730")]
impl<T: ?Sized> Unique<T> {
/// Creates a new `Unique`.
@ -1002,41 +1028,72 @@ impl<T: ?Sized> Unique<T> {
Unique { pointer: NonZero::new(ptr), _marker: PhantomData }
}
/// Acquires the underlying `*mut` pointer.
pub fn as_ptr(self) -> *mut T {
self.pointer.get() as *mut T
}
/// Dereferences the content.
pub unsafe fn get(&self) -> &T {
&**self.pointer
///
/// The resulting lifetime is bound to self so this behaves "as if"
/// it were actually an instance of T that is getting borrowed. If a longer
/// (unbound) lifetime is needed, use `&*my_ptr.ptr()`.
pub unsafe fn as_ref(&self) -> &T {
&*self.as_ptr()
}
/// Mutably dereferences the content.
pub unsafe fn get_mut(&mut self) -> &mut T {
&mut ***self
///
/// The resulting lifetime is bound to self so this behaves "as if"
/// it were actually an instance of T that is getting borrowed. If a longer
/// (unbound) lifetime is needed, use `&mut *my_ptr.ptr()`.
pub unsafe fn as_mut(&mut self) -> &mut T {
&mut *self.as_ptr()
}
}
#[unstable(feature = "shared", issue = "27730")]
impl<T: ?Sized> Clone for Unique<T> {
fn clone(&self) -> Self {
*self
}
}
#[unstable(feature = "shared", issue = "27730")]
impl<T: ?Sized> Copy for Unique<T> { }
#[unstable(feature = "unique", issue = "27730")]
impl<T: ?Sized, U: ?Sized> CoerceUnsized<Unique<U>> for Unique<T> where T: Unsize<U> { }
#[unstable(feature = "unique", issue= "27730")]
impl<T:?Sized> Deref for Unique<T> {
type Target = *mut T;
#[inline]
fn deref(&self) -> &*mut T {
unsafe { mem::transmute(&*self.pointer) }
}
}
#[unstable(feature = "unique", issue = "27730")]
impl<T> fmt::Pointer for Unique<T> {
impl<T: ?Sized> fmt::Pointer for Unique<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Pointer::fmt(&*self.pointer, f)
fmt::Pointer::fmt(&self.as_ptr(), f)
}
}
/// A wrapper around a raw non-null `*mut T` that indicates that the possessor
/// A wrapper around a raw `*mut T` that indicates that the possessor
/// of this wrapper has shared ownership of the referent. Useful for
/// building abstractions like `Rc<T>` or `Arc<T>`, which internally
/// use raw pointers to manage the memory that they own.
/// building abstractions like `Rc<T>`, `Arc<T>`, or doubly-linked lists, which
/// internally use aliased raw pointers to manage the memory that they own.
///
/// This is similar to `Unique`, except that it doesn't make any aliasing
/// guarantees, and doesn't derive Send and Sync. Note that unlike `&T`,
/// Shared has no special mutability requirements. Shared may mutate data
/// aliased by other Shared pointers. More precise rules require Rust to
/// develop an actual aliasing model.
///
/// Unlike `*mut T`, the pointer must always be non-null, even if the pointer
/// is never dereferenced. This is so that enums may use this forbidden value
/// as a discriminant -- `Option<Shared<T>>` has the same size as `Shared<T>`.
/// However the pointer may still dangle if it isn't dereferenced.
///
/// Unlike `*mut T`, `Shared<T>` is covariant over `T`. If this is incorrect
/// for your use case, you should include some PhantomData in your type to
/// provide invariance, such as `PhantomData<Cell<T>>` or `PhantomData<&'a mut T>`.
/// Usually this won't be necessary; covariance is correct for Rc, Arc, and LinkedList
/// because they provide a public API that follows the normal shared XOR mutable
/// rules of Rust.
#[allow(missing_debug_implementations)]
#[unstable(feature = "shared", reason = "needs an RFC to flesh out design",
issue = "27730")]
@ -1060,6 +1117,20 @@ impl<T: ?Sized> !Send for Shared<T> { }
#[unstable(feature = "shared", issue = "27730")]
impl<T: ?Sized> !Sync for Shared<T> { }
#[unstable(feature = "shared", issue = "27730")]
impl<T: Sized> Shared<T> {
/// Creates a new `Shared` that is dangling, but well-aligned.
///
/// This is useful for initializing types which lazily allocate, like
/// `Vec::new` does.
pub fn empty() -> Self {
unsafe {
let ptr = mem::align_of::<T>() as *mut T;
Shared::new(ptr)
}
}
}
#[unstable(feature = "shared", issue = "27730")]
impl<T: ?Sized> Shared<T> {
/// Creates a new `Shared`.
@ -1067,16 +1138,38 @@ impl<T: ?Sized> Shared<T> {
/// # Safety
///
/// `ptr` must be non-null.
pub unsafe fn new(ptr: *const T) -> Self {
pub unsafe fn new(ptr: *mut T) -> Self {
Shared { pointer: NonZero::new(ptr), _marker: PhantomData }
}
}
#[unstable(feature = "shared", issue = "27730")]
impl<T: ?Sized> Shared<T> {
/// Acquires the underlying `*mut` pointer.
pub fn as_ptr(self) -> *mut T {
self.pointer.get() as *mut T
}
/// Dereferences the content.
///
/// The resulting lifetime is bound to self so this behaves "as if"
/// it were actually an instance of T that is getting borrowed. If a longer
/// (unbound) lifetime is needed, use `&*my_ptr.ptr()`.
pub unsafe fn as_ref(&self) -> &T {
&*self.as_ptr()
}
/// Mutably dereferences the content.
///
/// The resulting lifetime is bound to self so this behaves "as if"
/// it were actually an instance of T that is getting borrowed. If a longer
/// (unbound) lifetime is needed, use `&mut *my_ptr.ptr_mut()`.
pub unsafe fn as_mut(&mut self) -> &mut T {
&mut *self.as_ptr()
}
/// Acquires the underlying pointer as a `*mut` pointer.
#[rustc_deprecated(since = "1.19", reason = "renamed to `as_ptr` for ergonomics/consistency")]
#[unstable(feature = "shared", issue = "27730")]
pub unsafe fn as_mut_ptr(&self) -> *mut T {
**self as _
self.as_ptr()
}
}
@ -1094,18 +1187,8 @@ impl<T: ?Sized> Copy for Shared<T> { }
impl<T: ?Sized, U: ?Sized> CoerceUnsized<Shared<U>> for Shared<T> where T: Unsize<U> { }
#[unstable(feature = "shared", issue = "27730")]
impl<T: ?Sized> Deref for Shared<T> {
type Target = *const T;
#[inline]
fn deref(&self) -> &*const T {
unsafe { mem::transmute(&*self.pointer) }
}
}
#[unstable(feature = "shared", issue = "27730")]
impl<T> fmt::Pointer for Shared<T> {
impl<T: ?Sized> fmt::Pointer for Shared<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Pointer::fmt(&*self.pointer, f)
fmt::Pointer::fmt(&self.as_ptr(), f)
}
}

View file

@ -31,12 +31,12 @@ fn test_match_on_nonzero_option() {
NonZero::new(42)
});
match a {
Some(val) => assert_eq!(*val, 42),
Some(val) => assert_eq!(val.get(), 42),
None => panic!("unexpected None while matching on Some(NonZero(_))")
}
match unsafe { Some(NonZero::new(43)) } {
Some(val) => assert_eq!(*val, 43),
Some(val) => assert_eq!(val.get(), 43),
None => panic!("unexpected None while matching on Some(NonZero(_))")
}
}

View file

@ -166,10 +166,10 @@ fn test_set_memory() {
#[test]
fn test_unsized_unique() {
let xs: &mut [i32] = &mut [1, 2, 3];
let ptr = unsafe { Unique::new(xs as *mut [i32]) };
let ys = unsafe { &mut **ptr };
let zs: &mut [i32] = &mut [1, 2, 3];
let xs: &[i32] = &[1, 2, 3];
let ptr = unsafe { Unique::new(xs as *const [i32] as *mut [i32]) };
let ys = unsafe { ptr.as_ref() };
let zs: &[i32] = &[1, 2, 3];
assert!(ys == zs);
}

View file

@ -62,14 +62,14 @@ pub struct Bytes {
impl Deref for Bytes {
type Target = [u8];
fn deref(&self) -> &[u8] {
unsafe { slice::from_raw_parts(*self.ptr, self.len) }
unsafe { slice::from_raw_parts(self.ptr.as_ptr(), self.len) }
}
}
impl Drop for Bytes {
fn drop(&mut self) {
unsafe {
libc::free(*self.ptr as *mut _);
libc::free(self.ptr.as_ptr() as *mut _);
}
}
}

@ -1 +1 @@
Subproject commit c34a802d1eb037b44c5252078c7270b5472e0f65
Subproject commit 03562b0cb26a00f49d4eaf18ca3e49608110b0c8

View file

@ -81,6 +81,7 @@ pub enum DepNode<D: Clone + Debug> {
TransCrateItem(D),
TransInlinedItem(D),
TransWriteMetadata,
CrateVariances,
// Nodes representing bits of computed IR in the tcx. Each shared
// table in the tcx (or elsewhere) maps to one of these
@ -89,6 +90,8 @@ pub enum DepNode<D: Clone + Debug> {
// predicates for an item wind up in `ItemSignature`).
AssociatedItems(D),
ItemSignature(D),
ItemVarianceConstraints(D),
ItemVariances(D),
IsForeignItem(D),
TypeParamPredicates((D, D)),
SizedConstraint(D),
@ -180,6 +183,7 @@ impl<D: Clone + Debug> DepNode<D> {
TransCrateItem,
AssociatedItems,
ItemSignature,
ItemVariances,
IsForeignItem,
AssociatedItemDefIds,
InherentImpls,
@ -201,6 +205,7 @@ impl<D: Clone + Debug> DepNode<D> {
MirKrate => Some(MirKrate),
TypeckBodiesKrate => Some(TypeckBodiesKrate),
Coherence => Some(Coherence),
CrateVariances => Some(CrateVariances),
Resolve => Some(Resolve),
Variance => Some(Variance),
PrivacyAccessLevels(k) => Some(PrivacyAccessLevels(k)),
@ -232,6 +237,8 @@ impl<D: Clone + Debug> DepNode<D> {
TransInlinedItem(ref d) => op(d).map(TransInlinedItem),
AssociatedItems(ref d) => op(d).map(AssociatedItems),
ItemSignature(ref d) => op(d).map(ItemSignature),
ItemVariances(ref d) => op(d).map(ItemVariances),
ItemVarianceConstraints(ref d) => op(d).map(ItemVarianceConstraints),
IsForeignItem(ref d) => op(d).map(IsForeignItem),
TypeParamPredicates((ref item, ref param)) => {
Some(TypeParamPredicates((try_opt!(op(item)), try_opt!(op(param)))))

View file

@ -18,7 +18,6 @@ mod raii;
mod safe;
mod shadow;
mod thread;
mod visit;
pub use self::dep_tracking_map::{DepTrackingMap, DepTrackingMapConfig};
pub use self::dep_node::DepNode;
@ -28,5 +27,4 @@ pub use self::graph::WorkProduct;
pub use self::query::DepGraphQuery;
pub use self::safe::AssertDepGraphSafe;
pub use self::safe::DepGraphSafe;
pub use self::visit::visit_all_item_likes_in_krate;
pub use self::raii::DepTask;

View file

@ -1,77 +0,0 @@
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use hir;
use hir::def_id::DefId;
use hir::itemlikevisit::ItemLikeVisitor;
use ty::TyCtxt;
use super::dep_node::DepNode;
/// Visit all the items in the krate in some order. When visiting a
/// particular item, first create a dep-node by calling `dep_node_fn`
/// and push that onto the dep-graph stack of tasks, and also create a
/// read edge from the corresponding AST node. This is used in
/// compiler passes to automatically record the item that they are
/// working on.
pub fn visit_all_item_likes_in_krate<'a, 'tcx, V, F>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
mut dep_node_fn: F,
visitor: &mut V)
where F: FnMut(DefId) -> DepNode<DefId>, V: ItemLikeVisitor<'tcx>
{
struct TrackingVisitor<'visit, 'tcx: 'visit, F: 'visit, V: 'visit> {
tcx: TyCtxt<'visit, 'tcx, 'tcx>,
dep_node_fn: &'visit mut F,
visitor: &'visit mut V,
}
impl<'visit, 'tcx, F, V> ItemLikeVisitor<'tcx> for TrackingVisitor<'visit, 'tcx, F, V>
where F: FnMut(DefId) -> DepNode<DefId>, V: ItemLikeVisitor<'tcx>
{
fn visit_item(&mut self, i: &'tcx hir::Item) {
let item_def_id = self.tcx.hir.local_def_id(i.id);
let task_id = (self.dep_node_fn)(item_def_id);
let _task = self.tcx.dep_graph.in_task(task_id.clone());
debug!("Started task {:?}", task_id);
self.tcx.dep_graph.read(DepNode::Hir(item_def_id));
self.visitor.visit_item(i);
debug!("Ended task {:?}", task_id);
}
fn visit_trait_item(&mut self, i: &'tcx hir::TraitItem) {
let trait_item_def_id = self.tcx.hir.local_def_id(i.id);
let task_id = (self.dep_node_fn)(trait_item_def_id);
let _task = self.tcx.dep_graph.in_task(task_id.clone());
debug!("Started task {:?}", task_id);
self.tcx.dep_graph.read(DepNode::Hir(trait_item_def_id));
self.visitor.visit_trait_item(i);
debug!("Ended task {:?}", task_id);
}
fn visit_impl_item(&mut self, i: &'tcx hir::ImplItem) {
let impl_item_def_id = self.tcx.hir.local_def_id(i.id);
let task_id = (self.dep_node_fn)(impl_item_def_id);
let _task = self.tcx.dep_graph.in_task(task_id.clone());
debug!("Started task {:?}", task_id);
self.tcx.dep_graph.read(DepNode::Hir(impl_item_def_id));
self.visitor.visit_impl_item(i);
debug!("Ended task {:?}", task_id);
}
}
let krate = tcx.dep_graph.with_ignore(|| tcx.hir.krate());
let mut tracking_visitor = TrackingVisitor {
tcx: tcx,
dep_node_fn: &mut dep_node_fn,
visitor: visitor,
};
krate.visit_all_item_likes(&mut tracking_visitor)
}

View file

@ -88,7 +88,7 @@ pub enum NestedVisitorMap<'this, 'tcx: 'this> {
/// that are inside of an item-like.
///
/// **This is the most common choice.** A very commmon pattern is
/// to use `tcx.visit_all_item_likes_in_krate()` as an outer loop,
/// to use `visit_all_item_likes()` as an outer loop,
/// and to have the visitor that visits the contents of each item
/// using this setting.
OnlyBodies(&'this Map<'tcx>),

View file

@ -19,9 +19,8 @@ use super::intravisit::Visitor;
///
/// 1. **Shallow visit**: Get a simple callback for every item (or item-like thing) in the HIR.
/// - Example: find all items with a `#[foo]` attribute on them.
/// - How: Implement `ItemLikeVisitor` and call `tcx.visit_all_item_likes_in_krate()`.
/// - How: Implement `ItemLikeVisitor` and call `tcx.hir.krate().visit_all_item_likes()`.
/// - Pro: Efficient; just walks the lists of item-like things, not the nodes themselves.
/// - Pro: Integrates well into dependency tracking.
/// - Con: Don't get information about nesting
/// - Con: Don't have methods for specific bits of HIR, like "on
/// every expr, do this".
@ -30,7 +29,7 @@ use super::intravisit::Visitor;
/// within one another.
/// - Example: Examine each expression to look for its type and do some check or other.
/// - How: Implement `intravisit::Visitor` and use
/// `tcx.visit_all_item_likes_in_krate(visitor.as_deep_visitor())`. Within
/// `tcx.hir.krate().visit_all_item_likes(visitor.as_deep_visitor())`. Within
/// your `intravisit::Visitor` impl, implement methods like
/// `visit_expr()`; don't forget to invoke
/// `intravisit::walk_visit_expr()` to keep walking the subparts.

View file

@ -470,9 +470,6 @@ pub struct GlobalCtxt<'tcx> {
pub lang_items: middle::lang_items::LanguageItems,
/// True if the variance has been computed yet; false otherwise.
pub variance_computed: Cell<bool>,
/// Set of used unsafe nodes (functions or blocks). Unsafe nodes not
/// present in this set can be warned about.
pub used_unsafe: RefCell<NodeSet>,
@ -744,7 +741,6 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
dep_graph: dep_graph.clone(),
types: common_types,
named_region_map: named_region_map,
variance_computed: Cell::new(false),
trait_map: resolutions.trait_map,
export_map: resolutions.export_map,
fulfilled_predicates: RefCell::new(fulfilled_predicates),

View file

@ -266,6 +266,12 @@ impl<'tcx> QueryDescription for queries::crate_inherent_impls_overlap_check<'tcx
}
}
impl<'tcx> QueryDescription for queries::crate_variances<'tcx> {
fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
format!("computing the variances for items in this crate")
}
}
impl<'tcx> QueryDescription for queries::mir_shims<'tcx> {
fn describe(tcx: TyCtxt, def: ty::InstanceDef<'tcx>) -> String {
format!("generating MIR shim for `{}`",
@ -549,18 +555,6 @@ macro_rules! define_map_struct {
}
};
// Detect things with the `pub` modifier
(tcx: $tcx:tt,
input: (([pub $($other_modifiers:tt)*] $attrs:tt $name:tt) $($input:tt)*),
output: $output:tt) => {
define_map_struct! {
tcx: $tcx,
ready: ([pub] $attrs $name),
input: ($($input)*),
output: $output
}
};
// No modifiers left? This is a private item.
(tcx: $tcx:tt,
input: (([] $attrs:tt $name:tt) $($input:tt)*),
@ -687,9 +681,13 @@ define_maps! { <'tcx>
/// True if this is a foreign item (i.e., linked via `extern { ... }`).
[] is_foreign_item: IsForeignItem(DefId) -> bool,
/// Get a map with the variance of every item; use `item_variance`
/// instead.
[] crate_variances: crate_variances(CrateNum) -> Rc<ty::CrateVariancesMap>,
/// Maps from def-id of a type or region parameter to its
/// (inferred) variance.
[pub] variances_of: ItemSignature(DefId) -> Rc<Vec<ty::Variance>>,
[] variances_of: ItemVariances(DefId) -> Rc<Vec<ty::Variance>>,
/// Maps from an impl/trait def-id to a list of the def-ids of its items
[] associated_item_def_ids: AssociatedItemDefIds(DefId) -> Rc<Vec<DefId>>,
@ -825,3 +823,7 @@ fn const_eval_dep_node((def_id, _): (DefId, &Substs)) -> DepNode<DefId> {
fn mir_keys(_: CrateNum) -> DepNode<DefId> {
DepNode::MirKeys
}
fn crate_variances(_: CrateNum) -> DepNode<DefId> {
DepNode::CrateVariances
}

View file

@ -15,7 +15,7 @@ pub use self::IntVarValue::*;
pub use self::LvaluePreference::*;
pub use self::fold::TypeFoldable;
use dep_graph::{self, DepNode};
use dep_graph::DepNode;
use hir::{map as hir_map, FreevarMap, TraitMap};
use hir::def::{Def, CtorKind, ExportMap};
use hir::def_id::{CrateNum, DefId, DefIndex, CRATE_DEF_INDEX, LOCAL_CRATE};
@ -55,9 +55,9 @@ use rustc_const_math::ConstInt;
use rustc_data_structures::accumulate_vec::IntoIter as AccIntoIter;
use rustc_data_structures::stable_hasher::{StableHasher, StableHasherResult,
HashStable};
use rustc_data_structures::transitive_relation::TransitiveRelation;
use hir;
use hir::itemlikevisit::ItemLikeVisitor;
pub use self::sty::{Binder, DebruijnIndex};
pub use self::sty::{FnSig, PolyFnSig};
@ -309,6 +309,27 @@ pub enum Variance {
Bivariant, // T<A> <: T<B> -- e.g., unused type parameter
}
/// The crate variances map is computed during typeck and contains the
/// variance of every item in the local crate. You should not use it
/// directly, because to do so will make your pass dependent on the
/// HIR of every item in the local crate. Instead, use
/// `tcx.variances_of()` to get the variance for a *particular*
/// item.
pub struct CrateVariancesMap {
/// This relation tracks the dependencies between the variance of
/// various items. In particular, if `a < b`, then the variance of
/// `a` depends on the sources of `b`.
pub dependencies: TransitiveRelation<DefId>,
/// For each item with generics, maps to a vector of the variance
/// of its generics. If an item has no generics, it will have no
/// entry.
pub variances: FxHashMap<DefId, Rc<Vec<ty::Variance>>>,
/// An empty vector, useful for cloning.
pub empty_variance: Rc<Vec<ty::Variance>>,
}
#[derive(Clone, Copy, Debug, RustcDecodable, RustcEncodable)]
pub struct MethodCallee<'tcx> {
/// Impl method ID, for inherent methods, or trait method ID, otherwise.
@ -2543,14 +2564,6 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
self.mk_region(ty::ReScope(self.node_extent(id)))
}
pub fn visit_all_item_likes_in_krate<V,F>(self,
dep_node_fn: F,
visitor: &mut V)
where F: FnMut(DefId) -> DepNode<DefId>, V: ItemLikeVisitor<'gcx>
{
dep_graph::visit_all_item_likes_in_krate(self.global_tcx(), dep_node_fn, visitor);
}
/// Looks up the span of `impl_did` if the impl is local; otherwise returns `Err`
/// with the name of the crate containing the impl.
pub fn span_of_impl(self, impl_did: DefId) -> Result<Span, Symbol> {

View file

@ -124,14 +124,8 @@ fn relate_item_substs<'a, 'gcx, 'tcx, R>(relation: &mut R,
a_subst,
b_subst);
let variances;
let opt_variances = if relation.tcx().variance_computed.get() {
variances = relation.tcx().variances_of(item_def_id);
Some(&*variances)
} else {
None
};
relate_substs(relation, opt_variances, a_subst, b_subst)
let opt_variances = relation.tcx().variances_of(item_def_id);
relate_substs(relation, Some(&opt_variances), a_subst, b_subst)
}
pub fn relate_substs<'a, 'gcx, 'tcx, R>(relation: &mut R,

View file

@ -72,7 +72,7 @@ impl<'tcx> From<ty::Region<'tcx>> for Kind<'tcx> {
impl<'tcx> Kind<'tcx> {
#[inline]
unsafe fn downcast<T>(self, tag: usize) -> Option<&'tcx T> {
let ptr = *self.ptr;
let ptr = self.ptr.get();
if ptr & TAG_MASK == tag {
Some(&*((ptr & !TAG_MASK) as *const _))
} else {
@ -102,7 +102,7 @@ impl<'tcx> fmt::Debug for Kind<'tcx> {
} else if let Some(r) = self.as_region() {
write!(f, "{:?}", r)
} else {
write!(f, "<unknwon @ {:p}>", *self.ptr as *const ())
write!(f, "<unknwon @ {:p}>", self.ptr.get() as *const ())
}
}
}

View file

@ -43,7 +43,7 @@ mod indexes {
unsafe { $Index(NonZero::new(idx + 1)) }
}
fn index(self) -> usize {
*self.0 - 1
self.0.get() - 1
}
}

View file

@ -255,7 +255,7 @@ impl<'a, A: Array> Drop for Drain<'a, A> {
if self.tail_len > 0 {
unsafe {
let source_array_vec = &mut *self.array_vec.as_mut_ptr();
let source_array_vec = self.array_vec.as_mut();
// memmove back untouched tail, update to new length
let start = source_array_vec.len();
let tail = self.tail_start;

View file

@ -23,6 +23,6 @@ impl NodeIndex {
}
pub fn get(self) -> usize {
(*self.index - 1) as usize
(self.index.get() - 1) as usize
}
}

View file

@ -9,21 +9,23 @@
// except according to those terms.
use bitvec::BitMatrix;
use stable_hasher::{HashStable, StableHasher, StableHasherResult};
use fx::FxHashMap;
use rustc_serialize::{Encodable, Encoder, Decodable, Decoder};
use stable_hasher::{HashStable, StableHasher, StableHasherResult};
use std::cell::RefCell;
use std::fmt::Debug;
use std::hash::Hash;
use std::mem;
#[derive(Clone)]
pub struct TransitiveRelation<T: Debug + PartialEq> {
// List of elements. This is used to map from a T to a usize. We
// expect domain to be small so just use a linear list versus a
// hashmap or something.
pub struct TransitiveRelation<T: Clone + Debug + Eq + Hash + Clone> {
// List of elements. This is used to map from a T to a usize.
elements: Vec<T>,
// Maps each element to an index.
map: FxHashMap<T, Index>,
// List of base edges in the graph. Require to compute transitive
// closure.
edges: Vec<Edge>,
@ -40,19 +42,20 @@ pub struct TransitiveRelation<T: Debug + PartialEq> {
closure: RefCell<Option<BitMatrix>>,
}
#[derive(Clone, PartialEq, PartialOrd, RustcEncodable, RustcDecodable)]
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable)]
struct Index(usize);
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable)]
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable)]
struct Edge {
source: Index,
target: Index,
}
impl<T: Debug + PartialEq> TransitiveRelation<T> {
impl<T: Clone + Debug + Eq + Hash + Clone> TransitiveRelation<T> {
pub fn new() -> TransitiveRelation<T> {
TransitiveRelation {
elements: vec![],
map: FxHashMap(),
edges: vec![],
closure: RefCell::new(None),
}
@ -63,21 +66,27 @@ impl<T: Debug + PartialEq> TransitiveRelation<T> {
}
fn index(&self, a: &T) -> Option<Index> {
self.elements.iter().position(|e| *e == *a).map(Index)
self.map.get(a).cloned()
}
fn add_index(&mut self, a: T) -> Index {
match self.index(&a) {
Some(i) => i,
None => {
self.elements.push(a);
let &mut TransitiveRelation {
ref mut elements,
ref closure,
ref mut map,
..
} = self;
// if we changed the dimensions, clear the cache
*self.closure.borrow_mut() = None;
map.entry(a.clone())
.or_insert_with(|| {
elements.push(a);
Index(self.elements.len() - 1)
}
}
// if we changed the dimensions, clear the cache
*closure.borrow_mut() = None;
Index(elements.len() - 1)
})
.clone()
}
/// Applies the (partial) function to each edge and returns a new
@ -85,7 +94,7 @@ impl<T: Debug + PartialEq> TransitiveRelation<T> {
/// `None`.
pub fn maybe_map<F, U>(&self, mut f: F) -> Option<TransitiveRelation<U>>
where F: FnMut(&T) -> Option<U>,
U: Debug + PartialEq,
U: Clone + Debug + Eq + Hash + Clone,
{
let mut result = TransitiveRelation::new();
for edge in &self.edges {
@ -125,6 +134,20 @@ impl<T: Debug + PartialEq> TransitiveRelation<T> {
}
}
/// Returns a vector of all things less than `a`.
///
/// Really this probably ought to be `impl Iterator<Item=&T>`, but
/// I'm too lazy to make that work, and -- given the caching
/// strategy -- it'd be a touch tricky anyhow.
pub fn less_than(&self, a: &T) -> Vec<&T> {
match self.index(a) {
Some(a) => self.with_closure(|closure| {
closure.iter(a.0).map(|i| &self.elements[i]).collect()
}),
None => vec![],
}
}
/// Picks what I am referring to as the "postdominating"
/// upper-bound for `a` and `b`. This is usually the least upper
/// bound, but in cases where there is no single least upper
@ -335,7 +358,7 @@ fn pare_down(candidates: &mut Vec<usize>, closure: &BitMatrix) {
}
impl<T> Encodable for TransitiveRelation<T>
where T: Encodable + Debug + PartialEq
where T: Clone + Encodable + Debug + Eq + Hash + Clone
{
fn encode<E: Encoder>(&self, s: &mut E) -> Result<(), E::Error> {
s.emit_struct("TransitiveRelation", 2, |s| {
@ -347,19 +370,23 @@ impl<T> Encodable for TransitiveRelation<T>
}
impl<T> Decodable for TransitiveRelation<T>
where T: Decodable + Debug + PartialEq
where T: Clone + Decodable + Debug + Eq + Hash + Clone
{
fn decode<D: Decoder>(d: &mut D) -> Result<Self, D::Error> {
d.read_struct("TransitiveRelation", 2, |d| {
let elements = d.read_struct_field("elements", 0, |d| Decodable::decode(d))?;
let elements: Vec<T> = d.read_struct_field("elements", 0, |d| Decodable::decode(d))?;
let edges = d.read_struct_field("edges", 1, |d| Decodable::decode(d))?;
Ok(TransitiveRelation { elements, edges, closure: RefCell::new(None) })
let map = elements.iter()
.enumerate()
.map(|(index, elem)| (elem.clone(), Index(index)))
.collect();
Ok(TransitiveRelation { elements, edges, map, closure: RefCell::new(None) })
})
}
}
impl<CTX, T> HashStable<CTX> for TransitiveRelation<T>
where T: HashStable<CTX> + PartialEq + Debug
where T: HashStable<CTX> + Eq + Debug + Clone + Hash
{
fn hash_stable<W: StableHasherResult>(&self,
hcx: &mut CTX,
@ -369,6 +396,8 @@ impl<CTX, T> HashStable<CTX> for TransitiveRelation<T>
let TransitiveRelation {
ref elements,
ref edges,
// "map" is just a copy of elements vec
map: _,
// "closure" is just a copy of the data above
closure: _
} = *self;

View file

@ -51,7 +51,7 @@ use rustc::ty::TyCtxt;
use rustc_data_structures::fx::FxHashSet;
use rustc_data_structures::graph::{Direction, INCOMING, OUTGOING, NodeIndex};
use rustc::hir;
use rustc::hir::itemlikevisit::ItemLikeVisitor;
use rustc::hir::intravisit::{self, NestedVisitorMap, Visitor};
use rustc::ich::{ATTR_IF_THIS_CHANGED, ATTR_THEN_THIS_WOULD_NEED};
use graphviz::IntoCow;
use std::env;
@ -80,7 +80,7 @@ pub fn assert_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
if_this_changed: vec![],
then_this_would_need: vec![] };
visitor.process_attrs(ast::CRATE_NODE_ID, &tcx.hir.krate().attrs);
tcx.hir.krate().visit_all_item_likes(&mut visitor);
tcx.hir.krate().visit_all_item_likes(&mut visitor.as_deep_visitor());
(visitor.if_this_changed, visitor.then_this_would_need)
};
@ -166,17 +166,29 @@ impl<'a, 'tcx> IfThisChanged<'a, 'tcx> {
}
}
impl<'a, 'tcx> ItemLikeVisitor<'tcx> for IfThisChanged<'a, 'tcx> {
impl<'a, 'tcx> Visitor<'tcx> for IfThisChanged<'a, 'tcx> {
fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
NestedVisitorMap::OnlyBodies(&self.tcx.hir)
}
fn visit_item(&mut self, item: &'tcx hir::Item) {
self.process_attrs(item.id, &item.attrs);
intravisit::walk_item(self, item);
}
fn visit_trait_item(&mut self, trait_item: &'tcx hir::TraitItem) {
self.process_attrs(trait_item.id, &trait_item.attrs);
intravisit::walk_trait_item(self, trait_item);
}
fn visit_impl_item(&mut self, impl_item: &'tcx hir::ImplItem) {
self.process_attrs(impl_item.id, &impl_item.attrs);
intravisit::walk_impl_item(self, impl_item);
}
fn visit_struct_field(&mut self, s: &'tcx hir::StructField) {
self.process_attrs(s.id, &s.attrs);
intravisit::walk_struct_field(self, s);
}
}

View file

@ -240,8 +240,8 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
}
impl<'a, 'b: 'a, 'tcx: 'b> EntryBuilder<'a, 'b, 'tcx> {
fn encode_item_variances(&mut self, def_id: DefId) -> LazySeq<ty::Variance> {
debug!("EntryBuilder::encode_item_variances({:?})", def_id);
fn encode_variances_of(&mut self, def_id: DefId) -> LazySeq<ty::Variance> {
debug!("EntryBuilder::encode_variances_of({:?})", def_id);
let tcx = self.tcx;
self.lazy_seq_from_slice(&tcx.variances_of(def_id))
}
@ -824,7 +824,7 @@ impl<'a, 'b: 'a, 'tcx: 'b> EntryBuilder<'a, 'b, 'tcx> {
hir::ItemEnum(..) |
hir::ItemStruct(..) |
hir::ItemUnion(..) |
hir::ItemTrait(..) => self.encode_item_variances(def_id),
hir::ItemTrait(..) => self.encode_variances_of(def_id),
_ => LazySeq::empty(),
},
generics: match item.node {

View file

@ -293,6 +293,7 @@ pub fn provide(providers: &mut Providers) {
collect::provide(providers);
coherence::provide(providers);
check::provide(providers);
variance::provide(providers);
}
pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>)
@ -307,9 +308,6 @@ pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>)
})?;
time(time_passes, "variance inference", ||
variance::infer_variance(tcx));
tcx.sess.track_errors(|| {
time(time_passes, "impl wf inference", ||
impl_wf_check::impl_wf_check(tcx));
@ -320,6 +318,11 @@ pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>)
coherence::check_coherence(tcx));
})?;
tcx.sess.track_errors(|| {
time(time_passes, "variance testing", ||
variance::test::test_variance(tcx));
})?;
time(time_passes, "wf checking", || check::check_wf_new(tcx))?;
time(time_passes, "item-types checking", || check::check_item_types(tcx))?;

View file

@ -97,51 +97,29 @@ types involved before considering variance.
#### Dependency graph management
Because variance works in two phases, if we are not careful, we wind
up with a muddled mess of a dep-graph. Basically, when gathering up
the constraints, things are fairly well-structured, but then we do a
fixed-point iteration and write the results back where they
belong. You can't give this fixed-point iteration a single task
because it reads from (and writes to) the variance of all types in the
crate. In principle, we *could* switch the "current task" in a very
fine-grained way while propagating constraints in the fixed-point
iteration and everything would be automatically tracked, but that
would add some overhead and isn't really necessary anyway.
Because variance is a whole-crate inference, its dependency graph
can become quite muddled if we are not careful. To resolve this, we refactor
into two queries:
Instead what we do is to add edges into the dependency graph as we
construct the constraint set: so, if computing the constraints for
node `X` requires loading the inference variables from node `Y`, then
we can add an edge `Y -> X`, since the variance we ultimately infer
for `Y` will affect the variance we ultimately infer for `X`.
- `crate_variances` computes the variance for all items in the current crate.
- `variances_of` accesses the variance for an individual reading; it
works by requesting `crate_variances` and extracting the relevant data.
If you limit yourself to reading `variances_of`, your code will only
depend then on the inference inferred for that particular item.
At this point, we've basically mirrored the inference graph in the
dependency graph. This means we can just completely ignore the
fixed-point iteration, since it is just shuffling values along this
graph. In other words, if we added the fine-grained switching of tasks
I described earlier, all it would show is that we repeatedly read the
values described by the constraints, but those edges were already
added when building the constraints in the first place.
Here is how this is implemented (at least as of the time of this
writing). The associated `DepNode` for the variance map is (at least
presently) `Signature(DefId)`. This means that, in `constraints.rs`,
when we visit an item to load up its constraints, we set
`Signature(DefId)` as the current task (the "memoization" pattern
described in the `dep-graph` README). Then whenever we find an
embedded type or trait, we add a synthetic read of `Signature(DefId)`,
which covers the variances we will compute for all of its
parameters. This read is synthetic (i.e., we call
`variance_map.read()`) because, in fact, the final variance is not yet
computed -- the read *will* occur (repeatedly) during the fixed-point
iteration phase.
In fact, we don't really *need* this synthetic read. That's because we
do wind up looking up the `TypeScheme` or `TraitDef` for all
references types/traits, and those reads add an edge from
`Signature(DefId)` (that is, they share the same dep node as
variance). However, I've kept the synthetic reads in place anyway,
just for future-proofing (in case we change the dep-nodes in the
future), and because it makes the intention a bit clearer I think.
Eventually, the goal is to rely on the red-green dependency management
algorithm. At the moment, however, we rely instead on a hack, where
`variances_of` ignores the dependencies of accessing
`crate_variances` and instead computes the *correct* dependencies
itself. To this end, when we build up the constraints in the system,
we also built up a transitive `dependencies` relation as part of the
crate map. A `(X, Y)` pair is added to the map each time we have a
constraint that the variance of some inferred for the item `X` depends
on the variance of some element of `Y`. This is to some extent a
mirroring of the inference graph in the dependency graph. This means
we can just completely ignore the fixed-point iteration, since it is
just shuffling values along this graph.
### Addendum: Variance on traits

View file

@ -15,6 +15,7 @@
use hir::def_id::DefId;
use middle::resolve_lifetime as rl;
use rustc::dep_graph::{AssertDepGraphSafe, DepNode};
use rustc::ty::subst::Substs;
use rustc::ty::{self, Ty, TyCtxt};
use rustc::hir::map as hir_map;
@ -22,12 +23,12 @@ use syntax::ast;
use rustc::hir;
use rustc::hir::itemlikevisit::ItemLikeVisitor;
use rustc_data_structures::transitive_relation::TransitiveRelation;
use super::terms::*;
use super::terms::VarianceTerm::*;
use super::xform::*;
use dep_graph::DepNode::ItemSignature as VarianceDepNode;
pub struct ConstraintContext<'a, 'tcx: 'a> {
pub terms_cx: TermsContext<'a, 'tcx>,
@ -38,6 +39,11 @@ pub struct ConstraintContext<'a, 'tcx: 'a> {
bivariant: VarianceTermPtr<'a>,
pub constraints: Vec<Constraint<'a>>,
/// This relation tracks the dependencies between the variance of
/// various items. In particular, if `a < b`, then the variance of
/// `a` depends on the sources of `b`.
pub dependencies: TransitiveRelation<DefId>,
}
/// Declares that the variable `decl_id` appears in a location with
@ -48,6 +54,20 @@ pub struct Constraint<'a> {
pub variance: &'a VarianceTerm<'a>,
}
/// To build constriants, we visit one item (type, trait) at a time
/// and look at its contents. So e.g. if we have
///
/// struct Foo<T> {
/// b: Bar<T>
/// }
///
/// then while we are visiting `Bar<T>`, the `CurrentItem` would have
/// the def-id and generics of `Foo`.
pub struct CurrentItem<'a> {
def_id: DefId,
generics: &'a ty::Generics,
}
pub fn add_constraints_from_crate<'a, 'tcx>(terms_cx: TermsContext<'a, 'tcx>)
-> ConstraintContext<'a, 'tcx> {
let tcx = terms_cx.tcx;
@ -62,10 +82,10 @@ pub fn add_constraints_from_crate<'a, 'tcx>(terms_cx: TermsContext<'a, 'tcx>)
invariant: invariant,
bivariant: bivariant,
constraints: Vec::new(),
dependencies: TransitiveRelation::new(),
};
// See README.md for a discussion on dep-graph management.
tcx.visit_all_item_likes_in_krate(VarianceDepNode, &mut constraint_cx);
tcx.hir.krate().visit_all_item_likes(&mut constraint_cx);
constraint_cx
}
@ -73,50 +93,32 @@ pub fn add_constraints_from_crate<'a, 'tcx>(terms_cx: TermsContext<'a, 'tcx>)
impl<'a, 'tcx, 'v> ItemLikeVisitor<'v> for ConstraintContext<'a, 'tcx> {
fn visit_item(&mut self, item: &hir::Item) {
let tcx = self.terms_cx.tcx;
let did = tcx.hir.local_def_id(item.id);
debug!("visit_item item={}", tcx.hir.node_to_string(item.id));
let def_id = tcx.hir.local_def_id(item.id);
// Encapsulate constructing the constraints into a task we can
// reference later. This can go away once the red-green
// algorithm is in place.
//
// See README.md for a detailed discussion
// on dep-graph management.
match item.node {
hir::ItemEnum(..) |
hir::ItemStruct(..) |
hir::ItemUnion(..) => {
let generics = tcx.generics_of(did);
// Not entirely obvious: constraints on structs/enums do not
// affect the variance of their type parameters. See discussion
// in comment at top of module.
//
// self.add_constraints_from_generics(generics);
for field in tcx.adt_def(did).all_fields() {
self.add_constraints_from_ty(generics,
tcx.type_of(field.did),
self.covariant);
}
tcx.dep_graph.with_task(DepNode::ItemVarianceConstraints(def_id),
AssertDepGraphSafe(self),
def_id,
visit_item_task);
}
hir::ItemTrait(..) => {
let generics = tcx.generics_of(did);
let trait_ref = ty::TraitRef {
def_id: did,
substs: Substs::identity_for_item(tcx, did)
};
self.add_constraints_from_trait_ref(generics,
trait_ref,
self.invariant);
_ => {
// Nothing to do here, skip the task.
}
}
hir::ItemExternCrate(_) |
hir::ItemUse(..) |
hir::ItemStatic(..) |
hir::ItemConst(..) |
hir::ItemFn(..) |
hir::ItemMod(..) |
hir::ItemForeignMod(..) |
hir::ItemGlobalAsm(..) |
hir::ItemTy(..) |
hir::ItemImpl(..) |
hir::ItemDefaultImpl(..) => {}
fn visit_item_task<'a, 'tcx>(ccx: AssertDepGraphSafe<&mut ConstraintContext<'a, 'tcx>>,
def_id: DefId)
{
ccx.0.build_constraints_for_item(def_id);
}
}
@ -140,16 +142,64 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> {
self.terms_cx.tcx
}
fn inferred_index(&self, param_id: ast::NodeId) -> InferredIndex {
match self.terms_cx.inferred_map.get(&param_id) {
Some(&index) => index,
None => {
bug!("no inferred index entry for {}",
self.tcx().hir.node_to_string(param_id));
fn build_constraints_for_item(&mut self, def_id: DefId) {
let tcx = self.tcx();
let id = self.tcx().hir.as_local_node_id(def_id).unwrap();
let item = tcx.hir.expect_item(id);
debug!("visit_item item={}", tcx.hir.node_to_string(item.id));
match item.node {
hir::ItemEnum(..) |
hir::ItemStruct(..) |
hir::ItemUnion(..) => {
let generics = tcx.generics_of(def_id);
let current_item = &CurrentItem { def_id, generics };
// Not entirely obvious: constraints on structs/enums do not
// affect the variance of their type parameters. See discussion
// in comment at top of module.
//
// self.add_constraints_from_generics(generics);
for field in tcx.adt_def(def_id).all_fields() {
self.add_constraints_from_ty(current_item,
tcx.type_of(field.did),
self.covariant);
}
}
hir::ItemTrait(..) |
hir::ItemExternCrate(_) |
hir::ItemUse(..) |
hir::ItemStatic(..) |
hir::ItemConst(..) |
hir::ItemFn(..) |
hir::ItemMod(..) |
hir::ItemForeignMod(..) |
hir::ItemGlobalAsm(..) |
hir::ItemTy(..) |
hir::ItemImpl(..) |
hir::ItemDefaultImpl(..) => {
span_bug!(item.span, "`build_constraints_for_item` invoked for non-type-def");
}
}
}
/// Load the generics for another item, adding a corresponding
/// relation into the dependencies to indicate that the variance
/// for `current` relies on `def_id`.
fn read_generics(&mut self, current: &CurrentItem, def_id: DefId) -> &'tcx ty::Generics {
let generics = self.tcx().generics_of(def_id);
if self.tcx().dep_graph.is_fully_enabled() {
self.dependencies.add(current.def_id, def_id);
}
generics
}
fn opt_inferred_index(&self, param_id: ast::NodeId) -> Option<&InferredIndex> {
self.terms_cx.inferred_map.get(&param_id)
}
fn find_binding_for_lifetime(&self, param_id: ast::NodeId) -> ast::NodeId {
let tcx = self.terms_cx.tcx;
assert!(is_lifetime(&tcx.hir, param_id));
@ -228,8 +278,27 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> {
// Parameter on an item defined within current crate:
// variance not yet inferred, so return a symbolic
// variance.
let InferredIndex(index) = self.inferred_index(param_node_id);
self.terms_cx.inferred_infos[index].term
if let Some(&InferredIndex(index)) = self.opt_inferred_index(param_node_id) {
self.terms_cx.inferred_infos[index].term
} else {
// If there is no inferred entry for a type parameter,
// it must be declared on a (locally defiend) trait -- they don't
// get inferreds because they are always invariant.
if cfg!(debug_assertions) {
let item_node_id = self.tcx().hir.as_local_node_id(item_def_id).unwrap();
let item = self.tcx().hir.expect_item(item_node_id);
let success = match item.node {
hir::ItemTrait(..) => true,
_ => false,
};
if !success {
bug!("parameter {:?} has no inferred, but declared on non-trait: {:?}",
item_def_id,
item);
}
}
self.invariant
}
} else {
// Parameter on an item defined within another crate:
// variance already inferred, just look it up.
@ -279,7 +348,7 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> {
}
fn add_constraints_from_trait_ref(&mut self,
generics: &ty::Generics,
current: &CurrentItem,
trait_ref: ty::TraitRef<'tcx>,
variance: VarianceTermPtr<'a>) {
debug!("add_constraints_from_trait_ref: trait_ref={:?} variance={:?}",
@ -288,12 +357,7 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> {
let trait_generics = self.tcx().generics_of(trait_ref.def_id);
// This edge is actually implied by the call to
// `trait_def`, but I'm trying to be future-proof. See
// README.md for a discussion on dep-graph management.
self.tcx().dep_graph.read(VarianceDepNode(trait_ref.def_id));
self.add_constraints_from_substs(generics,
self.add_constraints_from_substs(current,
trait_ref.def_id,
&trait_generics.types,
&trait_generics.regions,
@ -305,7 +369,7 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> {
/// in a context with the generics defined in `generics` and
/// ambient variance `variance`
fn add_constraints_from_ty(&mut self,
generics: &ty::Generics,
current: &CurrentItem,
ty: Ty<'tcx>,
variance: VarianceTermPtr<'a>) {
debug!("add_constraints_from_ty(ty={:?}, variance={:?})",
@ -325,34 +389,29 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> {
ty::TyRef(region, ref mt) => {
let contra = self.contravariant(variance);
self.add_constraints_from_region(generics, region, contra);
self.add_constraints_from_mt(generics, mt, variance);
self.add_constraints_from_region(current, region, contra);
self.add_constraints_from_mt(current, mt, variance);
}
ty::TyArray(typ, _) |
ty::TySlice(typ) => {
self.add_constraints_from_ty(generics, typ, variance);
self.add_constraints_from_ty(current, typ, variance);
}
ty::TyRawPtr(ref mt) => {
self.add_constraints_from_mt(generics, mt, variance);
self.add_constraints_from_mt(current, mt, variance);
}
ty::TyTuple(subtys, _) => {
for &subty in subtys {
self.add_constraints_from_ty(generics, subty, variance);
self.add_constraints_from_ty(current, subty, variance);
}
}
ty::TyAdt(def, substs) => {
let adt_generics = self.tcx().generics_of(def.did);
let adt_generics = self.read_generics(current, def.did);
// This edge is actually implied by the call to
// `trait_def`, but I'm trying to be future-proof. See
// README.md for a discussion on dep-graph management.
self.tcx().dep_graph.read(VarianceDepNode(def.did));
self.add_constraints_from_substs(generics,
self.add_constraints_from_substs(current,
def.did,
&adt_generics.types,
&adt_generics.regions,
@ -364,12 +423,7 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> {
let trait_ref = &data.trait_ref;
let trait_generics = self.tcx().generics_of(trait_ref.def_id);
// This edge is actually implied by the call to
// `trait_def`, but I'm trying to be future-proof. See
// README.md for a discussion on dep-graph management.
self.tcx().dep_graph.read(VarianceDepNode(trait_ref.def_id));
self.add_constraints_from_substs(generics,
self.add_constraints_from_substs(current,
trait_ref.def_id,
&trait_generics.types,
&trait_generics.regions,
@ -380,25 +434,25 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> {
ty::TyDynamic(ref data, r) => {
// The type `Foo<T+'a>` is contravariant w/r/t `'a`:
let contra = self.contravariant(variance);
self.add_constraints_from_region(generics, r, contra);
self.add_constraints_from_region(current, r, contra);
if let Some(p) = data.principal() {
let poly_trait_ref = p.with_self_ty(self.tcx(), self.tcx().types.err);
self.add_constraints_from_trait_ref(generics, poly_trait_ref.0, variance);
self.add_constraints_from_trait_ref(current, poly_trait_ref.0, variance);
}
for projection in data.projection_bounds() {
self.add_constraints_from_ty(generics, projection.0.ty, self.invariant);
self.add_constraints_from_ty(current, projection.0.ty, self.invariant);
}
}
ty::TyParam(ref data) => {
assert_eq!(generics.parent, None);
assert_eq!(current.generics.parent, None);
let mut i = data.idx as usize;
if !generics.has_self || i > 0 {
i -= generics.regions.len();
if !current.generics.has_self || i > 0 {
i -= current.generics.regions.len();
}
let def_id = generics.types[i].def_id;
let def_id = current.generics.types[i].def_id;
let node_id = self.tcx().hir.as_local_node_id(def_id).unwrap();
match self.terms_cx.inferred_map.get(&node_id) {
Some(&index) => {
@ -414,7 +468,7 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> {
ty::TyFnDef(.., sig) |
ty::TyFnPtr(sig) => {
self.add_constraints_from_sig(generics, sig, variance);
self.add_constraints_from_sig(current, sig, variance);
}
ty::TyError => {
@ -433,7 +487,7 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> {
/// Adds constraints appropriate for a nominal type (enum, struct,
/// object, etc) appearing in a context with ambient variance `variance`
fn add_constraints_from_substs(&mut self,
generics: &ty::Generics,
current: &CurrentItem,
def_id: DefId,
type_param_defs: &[ty::TypeParameterDef],
region_param_defs: &[ty::RegionParameterDef],
@ -451,44 +505,44 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> {
debug!("add_constraints_from_substs: variance_decl={:?} variance_i={:?}",
variance_decl,
variance_i);
self.add_constraints_from_ty(generics, substs_ty, variance_i);
self.add_constraints_from_ty(current, substs_ty, variance_i);
}
for p in region_param_defs {
let variance_decl = self.declared_variance(p.def_id, def_id, p.index as usize);
let variance_i = self.xform(variance, variance_decl);
let substs_r = substs.region_for_def(p);
self.add_constraints_from_region(generics, substs_r, variance_i);
self.add_constraints_from_region(current, substs_r, variance_i);
}
}
/// Adds constraints appropriate for a function with signature
/// `sig` appearing in a context with ambient variance `variance`
fn add_constraints_from_sig(&mut self,
generics: &ty::Generics,
current: &CurrentItem,
sig: ty::PolyFnSig<'tcx>,
variance: VarianceTermPtr<'a>) {
let contra = self.contravariant(variance);
for &input in sig.0.inputs() {
self.add_constraints_from_ty(generics, input, contra);
self.add_constraints_from_ty(current, input, contra);
}
self.add_constraints_from_ty(generics, sig.0.output(), variance);
self.add_constraints_from_ty(current, sig.0.output(), variance);
}
/// Adds constraints appropriate for a region appearing in a
/// context with ambient variance `variance`
fn add_constraints_from_region(&mut self,
generics: &ty::Generics,
current: &CurrentItem,
region: ty::Region<'tcx>,
variance: VarianceTermPtr<'a>) {
match *region {
ty::ReEarlyBound(ref data) => {
assert_eq!(generics.parent, None);
let i = data.index as usize - generics.has_self as usize;
let def_id = generics.regions[i].def_id;
assert_eq!(current.generics.parent, None);
let i = data.index as usize - current.generics.has_self as usize;
let def_id = current.generics.regions[i].def_id;
let node_id = self.tcx().hir.as_local_node_id(def_id).unwrap();
if self.is_to_be_inferred(node_id) {
let index = self.inferred_index(node_id);
let &index = self.opt_inferred_index(node_id).unwrap();
self.add_constraint(index, variance);
}
}
@ -518,17 +572,17 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> {
/// Adds constraints appropriate for a mutability-type pair
/// appearing in a context with ambient variance `variance`
fn add_constraints_from_mt(&mut self,
generics: &ty::Generics,
current: &CurrentItem,
mt: &ty::TypeAndMut<'tcx>,
variance: VarianceTermPtr<'a>) {
match mt.mutbl {
hir::MutMutable => {
let invar = self.invariant(variance);
self.add_constraints_from_ty(generics, mt.ty, invar);
self.add_constraints_from_ty(current, mt.ty, invar);
}
hir::MutImmutable => {
self.add_constraints_from_ty(generics, mt.ty, variance);
self.add_constraints_from_ty(current, mt.ty, variance);
}
}
}

View file

@ -12,7 +12,12 @@
//! parameters. See README.md for details.
use arena;
use rustc::ty::TyCtxt;
use rustc::dep_graph::DepNode;
use rustc::hir;
use rustc::hir::def_id::{CrateNum, DefId, LOCAL_CRATE};
use rustc::ty::{self, CrateVariancesMap, TyCtxt};
use rustc::ty::maps::Providers;
use std::rc::Rc;
/// Defines the `TermsContext` basically houses an arena where we can
/// allocate terms.
@ -24,13 +29,67 @@ mod constraints;
/// Code to solve constraints and write out the results.
mod solve;
/// Code to write unit tests of variance.
pub mod test;
/// Code for transforming variances.
mod xform;
pub fn infer_variance<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
pub fn provide(providers: &mut Providers) {
*providers = Providers {
variances_of,
crate_variances,
..*providers
};
}
fn crate_variances<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, crate_num: CrateNum)
-> Rc<CrateVariancesMap> {
assert_eq!(crate_num, LOCAL_CRATE);
let mut arena = arena::TypedArena::new();
let terms_cx = terms::determine_parameters_to_be_inferred(tcx, &mut arena);
let constraints_cx = constraints::add_constraints_from_crate(terms_cx);
solve::solve_constraints(constraints_cx);
tcx.variance_computed.set(true);
Rc::new(solve::solve_constraints(constraints_cx))
}
fn variances_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, item_def_id: DefId)
-> Rc<Vec<ty::Variance>> {
let item_id = tcx.hir.as_local_node_id(item_def_id).expect("expected local def-id");
let item = tcx.hir.expect_item(item_id);
match item.node {
hir::ItemTrait(..) => {
// Traits are always invariant.
let generics = tcx.generics_of(item_def_id);
assert!(generics.parent.is_none());
Rc::new(vec![ty::Variance::Invariant; generics.count()])
}
hir::ItemEnum(..) |
hir::ItemStruct(..) |
hir::ItemUnion(..) => {
// Everything else must be inferred.
// Lacking red/green, we read the variances for all items here
// but ignore the dependencies, then re-synthesize the ones we need.
let crate_map = tcx.dep_graph.with_ignore(|| tcx.crate_variances(LOCAL_CRATE));
tcx.dep_graph.read(DepNode::ItemVarianceConstraints(item_def_id));
for &dep_def_id in crate_map.dependencies.less_than(&item_def_id) {
if dep_def_id.is_local() {
tcx.dep_graph.read(DepNode::ItemVarianceConstraints(dep_def_id));
} else {
tcx.dep_graph.read(DepNode::ItemVariances(dep_def_id));
}
}
crate_map.variances.get(&item_def_id)
.unwrap_or(&crate_map.empty_variance)
.clone()
}
_ => {
// Variance not relevant.
span_bug!(item.span, "asked to compute variance for wrong kind of item")
}
}
}

View file

@ -15,7 +15,9 @@
//! optimal solution to the constraints. The final variance for each
//! inferred is then written into the `variance_map` in the tcx.
use rustc::hir::def_id::DefId;
use rustc::ty;
use rustc_data_structures::fx::FxHashMap;
use std::rc::Rc;
use super::constraints::*;
@ -31,8 +33,8 @@ struct SolveContext<'a, 'tcx: 'a> {
solutions: Vec<ty::Variance>,
}
pub fn solve_constraints(constraints_cx: ConstraintContext) {
let ConstraintContext { terms_cx, constraints, .. } = constraints_cx;
pub fn solve_constraints(constraints_cx: ConstraintContext) -> ty::CrateVariancesMap {
let ConstraintContext { terms_cx, dependencies, constraints, .. } = constraints_cx;
let solutions = terms_cx.inferred_infos
.iter()
@ -45,7 +47,10 @@ pub fn solve_constraints(constraints_cx: ConstraintContext) {
solutions: solutions,
};
solutions_cx.solve();
solutions_cx.write();
let variances = solutions_cx.create_map();
let empty_variance = Rc::new(Vec::new());
ty::CrateVariancesMap { dependencies, variances, empty_variance }
}
impl<'a, 'tcx> SolveContext<'a, 'tcx> {
@ -83,7 +88,7 @@ impl<'a, 'tcx> SolveContext<'a, 'tcx> {
}
}
fn write(&self) {
fn create_map(&self) -> FxHashMap<DefId, Rc<Vec<ty::Variance>>> {
// Collect all the variances for a particular item and stick
// them into the variance map. We rely on the fact that we
// generate all the inferreds for a particular item
@ -95,11 +100,7 @@ impl<'a, 'tcx> SolveContext<'a, 'tcx> {
let tcx = self.terms_cx.tcx;
// Ignore the writes here because the relevant edges were
// already accounted for in `constraints.rs`. See the section
// on dependency graph management in README.md for more
// information.
let _ignore = tcx.dep_graph.in_ignore();
let mut map = FxHashMap();
let solutions = &self.solutions;
let inferred_infos = &self.terms_cx.inferred_infos;
@ -127,19 +128,10 @@ impl<'a, 'tcx> SolveContext<'a, 'tcx> {
let item_def_id = tcx.hir.local_def_id(item_id);
// For unit testing: check for a special "rustc_variance"
// attribute and report an error with various results if found.
if tcx.has_attr(item_def_id, "rustc_variance") {
span_err!(tcx.sess,
tcx.hir.span(item_id),
E0208,
"{:?}",
item_variances);
}
tcx.maps.variances_of.borrow_mut()
.insert(item_def_id, Rc::new(item_variances));
map.insert(item_def_id, Rc::new(item_variances));
}
map
}
fn evaluate(&self, term: VarianceTermPtr<'a>) -> ty::Variance {

View file

@ -32,8 +32,6 @@ use self::VarianceTerm::*;
pub type VarianceTermPtr<'a> = &'a VarianceTerm<'a>;
use dep_graph::DepNode::ItemSignature as VarianceDepNode;
#[derive(Copy, Clone, Debug)]
pub struct InferredIndex(pub usize);
@ -109,7 +107,7 @@ pub fn determine_parameters_to_be_inferred<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>
};
// See README.md for a discussion on dep-graph management.
tcx.visit_all_item_likes_in_krate(|def_id| VarianceDepNode(def_id), &mut terms_cx);
tcx.hir.krate().visit_all_item_likes(&mut terms_cx);
terms_cx
}
@ -139,7 +137,6 @@ fn lang_items(tcx: TyCtxt) -> Vec<(ast::NodeId, Vec<ty::Variance>)> {
impl<'a, 'tcx> TermsContext<'a, 'tcx> {
fn add_inferreds_for_item(&mut self,
item_id: ast::NodeId,
has_self: bool,
generics: &hir::Generics) {
//! Add "inferreds" for the generic parameters declared on this
//! item. This has a lot of annoying parameters because we are
@ -149,38 +146,17 @@ impl<'a, 'tcx> TermsContext<'a, 'tcx> {
//!
// NB: In the code below for writing the results back into the
// tcx, we rely on the fact that all inferreds for a particular
// item are assigned continuous indices.
// `CrateVariancesMap`, we rely on the fact that all inferreds
// for a particular item are assigned continuous indices.
let inferreds_on_entry = self.num_inferred();
if has_self {
self.add_inferred(item_id, 0, item_id);
}
for (i, p) in generics.lifetimes.iter().enumerate() {
for (p, i) in generics.lifetimes.iter().zip(0..) {
let id = p.lifetime.id;
let i = has_self as usize + i;
self.add_inferred(item_id, i, id);
}
for (i, p) in generics.ty_params.iter().enumerate() {
let i = has_self as usize + generics.lifetimes.len() + i;
for (p, i) in generics.ty_params.iter().zip(generics.lifetimes.len()..) {
self.add_inferred(item_id, i, p.id);
}
// If this item has no type or lifetime parameters,
// then there are no variances to infer, so just
// insert an empty entry into the variance map.
// Arguably we could just leave the map empty in this
// case but it seems cleaner to be able to distinguish
// "invalid item id" from "item id with no
// parameters".
if self.num_inferred() == inferreds_on_entry {
let item_def_id = self.tcx.hir.local_def_id(item_id);
self.tcx.maps.variances_of.borrow_mut()
.insert(item_def_id, self.empty_variances.clone());
}
}
fn add_inferred(&mut self, item_id: ast::NodeId, index: usize, param_id: ast::NodeId) {
@ -232,15 +208,10 @@ impl<'a, 'tcx, 'v> ItemLikeVisitor<'v> for TermsContext<'a, 'tcx> {
hir::ItemEnum(_, ref generics) |
hir::ItemStruct(_, ref generics) |
hir::ItemUnion(_, ref generics) => {
self.add_inferreds_for_item(item.id, false, generics);
}
hir::ItemTrait(_, ref generics, ..) => {
// Note: all inputs for traits are ultimately
// constrained to be invariant. See `visit_item` in
// the impl for `ConstraintContext` in `constraints.rs`.
self.add_inferreds_for_item(item.id, true, generics);
self.add_inferreds_for_item(item.id, generics);
}
hir::ItemTrait(..) |
hir::ItemExternCrate(_) |
hir::ItemUse(..) |
hir::ItemDefaultImpl(..) |

View file

@ -0,0 +1,41 @@
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use rustc::hir;
use rustc::hir::itemlikevisit::ItemLikeVisitor;
use rustc::ty::TyCtxt;
pub fn test_variance<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
tcx.hir.krate().visit_all_item_likes(&mut VarianceTest { tcx });
}
struct VarianceTest<'a, 'tcx: 'a> {
tcx: TyCtxt<'a, 'tcx, 'tcx>
}
impl<'a, 'tcx> ItemLikeVisitor<'tcx> for VarianceTest<'a, 'tcx> {
fn visit_item(&mut self, item: &'tcx hir::Item) {
let item_def_id = self.tcx.hir.local_def_id(item.id);
// For unit testing: check for a special "rustc_variance"
// attribute and report an error with various results if found.
if self.tcx.has_attr(item_def_id, "rustc_variance") {
let variances_of = self.tcx.variances_of(item_def_id);
span_err!(self.tcx.sess,
item.span,
E0208,
"{:?}",
variances_of);
}
}
fn visit_trait_item(&mut self, _: &'tcx hir::TraitItem) { }
fn visit_impl_item(&mut self, _: &'tcx hir::ImplItem) { }
}

View file

@ -104,7 +104,8 @@ pub fn run_core(search_paths: SearchPaths,
externs: config::Externs,
input: Input,
triple: Option<String>,
maybe_sysroot: Option<PathBuf>) -> (clean::Crate, RenderInfo)
maybe_sysroot: Option<PathBuf>,
allow_warnings: bool) -> (clean::Crate, RenderInfo)
{
// Parse, resolve, and typecheck the given crate.
@ -119,7 +120,7 @@ pub fn run_core(search_paths: SearchPaths,
maybe_sysroot: maybe_sysroot,
search_paths: search_paths,
crate_types: vec![config::CrateTypeRlib],
lint_opts: vec![(warning_lint, lint::Allow)],
lint_opts: if !allow_warnings { vec![(warning_lint, lint::Allow)] } else { vec![] },
lint_cap: Some(lint::Allow),
externs: externs,
target_triple: triple.unwrap_or(config::host_triple().to_string()),

View file

@ -137,7 +137,6 @@ r##"<!DOCTYPE html>
window.rootPath = "{root_path}";
window.currentCrate = "{krate}";
</script>
<script src="{root_path}jquery.js"></script>
<script src="{root_path}main.js"></script>
<script defer src="{root_path}search-index.js"></script>
</body>

View file

@ -660,8 +660,6 @@ fn write_shared(cx: &Context,
// Add all the static files. These may already exist, but we just
// overwrite them anyway to make sure that they're fresh and up-to-date.
write(cx.dst.join("jquery.js"),
include_bytes!("static/jquery-2.1.4.min.js"))?;
write(cx.dst.join("main.js"),
include_bytes!("static/main.js"))?;
write(cx.dst.join("rustdoc.css"),

View file

@ -27,11 +27,6 @@ included, and carry their own copyright notices and license terms:
Licensed under the SIL Open Font License, Version 1.1.
See Heuristica-LICENSE.txt.
* jQuery (jquery-2.1.4.min.js):
Copyright 2005, 2015 jQuery Foundation, Inc.
Licensed under the MIT license (see LICENSE-MIT.txt).
* rustdoc.css, main.js, and playpen.js:
Copyright 2015 The Rust Developers.

File diff suppressed because one or more lines are too long

View file

@ -37,10 +37,63 @@
"associatedconstant",
"union"];
function hasClass(elem, className) {
if (elem && className && elem.className) {
var elemClass = elem.className;
var start = elemClass.indexOf(className);
if (start == -1) {
return false;
} else if (elemClass.length == className.length) {
return true;
} else {
if (start > 0 && elemClass[start - 1] != ' ') {
return false;
}
var end = start + className.length;
if (end < elemClass.length && elemClass[end] != ' ') {
return false;
}
return true;
}
}
return false;
}
function addClass(elem, className) {
if (elem && className && !hasClass(elem, className)) {
if (elem.className && elem.className.length > 0) {
elem.className += ' ' + className;
} else {
elem.className = className;
}
}
}
function removeClass(elem, className) {
if (elem && className && elem.className) {
elem.className = (" " + elem.className + " ").replace(" " + className + " ", " ")
.trim();
}
}
function onEach(arr, func) {
if (arr && arr.length > 0 && func) {
for (var i = 0; i < arr.length; i++) {
func(arr[i]);
}
}
}
function isHidden(elem) {
return (elem.offsetParent === null)
}
// used for special search precedence
var TY_PRIMITIVE = itemTypes.indexOf("primitive");
$('.js-only').removeClass('js-only');
onEach(document.getElementsByClassName('js-only'), function(e) {
removeClass(e, 'js-only');
});
function getQueryStringParams() {
var params = {};
@ -65,18 +118,28 @@
from = parseInt(match[1], 10);
to = Math.min(50000, parseInt(match[2] || match[1], 10));
from = Math.min(from, to);
if ($('#' + from).length === 0) {
var elem = document.getElementById(from);
if (!elem) {
return;
}
if (ev === null) { $('#' + from)[0].scrollIntoView(); };
$('.line-numbers span').removeClass('line-highlighted');
if (ev === null) {
var x = document.getElementById(from);
if (x) {
x.scrollIntoView();
}
};
onEach(document.getElementsByClassName('line-numbers'), function(e) {
onEach(e.getElementsByTagName('span'), function(i_e) {
removeClass(i_e, 'line-highlighted');
});
})
for (i = from; i <= to; ++i) {
$('#' + i).addClass('line-highlighted');
addClass(document.getElementById(i), 'line-highlighted');
}
}
}
highlightSourceLines(null);
$(window).on('hashchange', highlightSourceLines);
window.onhashchange = highlightSourceLines;
// Gets the human-readable string for the virtual-key code of the
// given KeyboardEvent, ev.
@ -99,23 +162,25 @@
}
function handleShortcut(ev) {
if (document.activeElement.tagName == "INPUT")
if (document.activeElement.tagName === "INPUT")
return;
// Don't interfere with browser shortcuts
if (ev.ctrlKey || ev.altKey || ev.metaKey)
return;
var help = document.getElementById("help");
switch (getVirtualKey(ev)) {
case "Escape":
if (!$("#help").hasClass("hidden")) {
var search = document.getElementById("search");
if (!hasClass(help, "hidden")) {
ev.preventDefault();
$("#help").addClass("hidden");
$("body").removeClass("blur");
} else if (!$("#search").hasClass("hidden")) {
addClass(help, "hidden");
removeClass(document.body, "blur");
} else if (!hasClass(search, "hidden")) {
ev.preventDefault();
$("#search").addClass("hidden");
$("#main").removeClass("hidden");
addClass(search, "hidden");
removeClass(document.getElementById("main"), "hidden");
}
break;
@ -131,42 +196,76 @@
break;
case "?":
if (ev.shiftKey && $("#help").hasClass("hidden")) {
if (ev.shiftKey && hasClass(help, "hidden")) {
ev.preventDefault();
$("#help").removeClass("hidden");
$("body").addClass("blur");
removeClass(help, "hidden");
addClass(document.body, "blur");
}
break;
}
}
$(document).on("keypress", handleShortcut);
$(document).on("keydown", handleShortcut);
$(document).on("click", function(ev) {
if (!$(ev.target).closest("#help > div").length) {
$("#help").addClass("hidden");
$("body").removeClass("blur");
}
});
document.onkeypress = handleShortcut;
document.onkeydown = handleShortcut;
document.onclick = function(ev) {
if (hasClass(ev.target, 'collapse-toggle')) {
collapseDocs(ev.target);
} else if (hasClass(ev.target.parentNode, 'collapse-toggle')) {
collapseDocs(ev.target.parentNode);
} else if (ev.target.tagName === 'SPAN' && hasClass(ev.target.parentNode, 'line-numbers')) {
var prev_id = 0;
$('.version-selector').on('change', function() {
var i, match,
url = document.location.href,
stripped = '',
len = rootPath.match(/\.\.\//g).length + 1;
for (i = 0; i < len; ++i) {
match = url.match(/\/[^\/]*$/);
if (i < len - 1) {
stripped = match[0] + stripped;
function set_fragment(name) {
if (browserSupportsHistoryApi()) {
history.replaceState(null, null, '#' + name);
window.hashchange();
} else {
location.replace('#' + name);
}
}
url = url.substring(0, url.length - match[0].length);
var cur_id = parseInt(ev.target.id, 10);
if (ev.shiftKey && prev_id) {
if (prev_id > cur_id) {
var tmp = prev_id;
prev_id = cur_id;
cur_id = tmp;
}
set_fragment(prev_id + '-' + cur_id);
} else {
prev_id = cur_id;
set_fragment(cur_id);
}
} else if (!hasClass(document.getElementById("help"), "hidden")) {
addClass(document.getElementById("help"), "hidden");
removeClass(document.body, "blur");
}
};
url += '/' + $('.version-selector').val() + stripped;
var x = document.getElementsByClassName('version-selector');
if (x.length > 0) {
x[0].onchange = function() {
var i, match,
url = document.location.href,
stripped = '',
len = rootPath.match(/\.\.\//g).length + 1;
document.location.href = url;
});
for (i = 0; i < len; ++i) {
match = url.match(/\/[^\/]*$/);
if (i < len - 1) {
stripped = match[0] + stripped;
}
url = url.substring(0, url.length - match[0].length);
}
url += '/' + document.getElementsByClassName('version-selector')[0].value + stripped;
document.location.href = url;
};
}
/**
* A function to compute the Levenshtein distance between two strings
@ -214,8 +313,8 @@
// but only if the input bar is empty. This avoid the obnoxious issue
// where you start trying to do a search, and the index loads, and
// suddenly your search is gone!
if ($(".search-input")[0].value === "") {
$(".search-input")[0].value = params.search || '';
if (document.getElementsByClassName("search-input")[0].value === "") {
document.getElementsByClassName("search-input")[0].value = params.search || '';
}
/**
@ -484,7 +583,8 @@
}
function getQuery() {
var matches, type, query, raw = $('.search-input').val();
var matches, type, query, raw =
document.getElementsByClassName('search-input')[0].value;
query = raw;
matches = query.match(/^(fn|mod|struct|enum|trait|type|const|macro)\s*:\s*/i);
@ -502,54 +602,92 @@
}
function initSearchNav() {
var hoverTimeout, $results = $('.search-results .result');
var hoverTimeout;
$results.on('click', function() {
var dst = $(this).find('a')[0];
var click_func = function(e) {
var el = e.target;
// to retrieve the real "owner" of the event.
while (el.tagName !== 'TR') {
el = el.parentNode;
}
var dst = e.target.getElementsByTagName('a');
if (dst.length < 1) {
return;
}
dst = dst[0];
if (window.location.pathname === dst.pathname) {
$('#search').addClass('hidden');
$('#main').removeClass('hidden');
addClass(document.getElementById('search'), 'hidden');
removeClass(document.getElementById('main'), 'hidden');
document.location.href = dst.href;
}
}).on('mouseover', function() {
var $el = $(this);
};
var mouseover_func = function(e) {
var el = e.target;
// to retrieve the real "owner" of the event.
while (el.tagName !== 'TR') {
el = el.parentNode;
}
clearTimeout(hoverTimeout);
hoverTimeout = setTimeout(function() {
$results.removeClass('highlighted');
$el.addClass('highlighted');
onEach(document.getElementsByClassName('search-results'), function(e) {
onEach(e.getElementsByClassName('result'), function(i_e) {
removeClass(i_e, 'highlighted');
});
});
addClass(el, 'highlighted');
}, 20);
};
onEach(document.getElementsByClassName('search-results'), function(e) {
onEach(e.getElementsByClassName('result'), function(i_e) {
i_e.onclick = click_func;
i_e.onmouseover = mouseover_func;
});
});
$(document).off('keydown.searchnav');
$(document).on('keydown.searchnav', function(e) {
var $active = $results.filter('.highlighted');
var search_input = document.getElementsByClassName('search-input')[0];
search_input.onkeydown = null;
search_input.onkeydown = function(e) {
var actives = [];
onEach(document.getElementsByClassName('search-results'), function(e) {
onEach(document.getElementsByClassName('highlighted'), function(e) {
actives.push(e);
});
});
if (e.which === 38) { // up
if (!$active.length || !$active.prev()) {
if (!actives.length || !actives[0].previousElementSibling) {
return;
}
$active.prev().addClass('highlighted');
$active.removeClass('highlighted');
addClass(actives[0].previousElementSibling, 'highlighted');
removeClass(actives[0], 'highlighted');
} else if (e.which === 40) { // down
if (!$active.length) {
$results.first().addClass('highlighted');
} else if ($active.next().length) {
$active.next().addClass('highlighted');
$active.removeClass('highlighted');
if (!actives.length) {
var results = document.getElementsByClassName('search-results');
if (results.length > 0) {
var res = results[0].getElementsByClassName('result');
if (res.length > 0) {
addClass(res[0], 'highlighted');
}
}
} else if (actives[0].nextElementSibling) {
addClass(actives[0].nextElementSibling, 'highlighted');
removeClass(actives[0], 'highlighted');
}
} else if (e.which === 13) { // return
if ($active.length) {
document.location.href = $active.find('a').prop('href');
if (actives.length) {
document.location.href = actives[0].getElementsByTagName('a')[0].href;
}
} else {
$active.removeClass('highlighted');
} else if (actives.length > 0) {
removeClass(actives[0], 'highlighted');
}
});
};
}
function escape(content) {
return $('<h1/>').text(content).html();
let h1 = document.createElement('h1');
h1.textContent = content;
return h1.innerHTML;
}
function showResults(results) {
@ -619,10 +757,19 @@
}
output += "</p>";
$('#main.content').addClass('hidden');
$('#search.content').removeClass('hidden').html(output);
$('#search .desc').width($('#search').width() - 40 -
$('#search td:first-child').first().width());
addClass(document.getElementById('main'), 'hidden');
var search = document.getElementById('search');
removeClass(search, 'hidden');
search.innerHTML = output;
var tds = search.getElementsByTagName('td');
var td_width = 0;
if (tds.length > 0) {
td_width = tds[0].offsetWidth;
}
var width = search.offsetWidth - 40 - td_width;
onEach(search.getElementsByClassName('desc'), function(e) {
e.style.width = width + 'px';
});
initSearchNav();
}
@ -645,17 +792,15 @@
}
// Update document title to maintain a meaningful browser history
$(document).prop("title", "Results for " + query.query + " - Rust");
document.title = "Results for " + query.query + " - Rust";
// Because searching is incremental by character, only the most
// recent search query is added to the browser history.
if (browserSupportsHistoryApi()) {
if (!history.state && !params.search) {
history.pushState(query, "", "?search=" +
encodeURIComponent(query.raw));
history.pushState(query, "", "?search=" + encodeURIComponent(query.raw));
} else {
history.replaceState(query, "", "?search=" +
encodeURIComponent(query.raw));
history.replaceState(query, "", "?search=" + encodeURIComponent(query.raw));
}
}
@ -744,49 +889,68 @@
function startSearch() {
var searchTimeout;
$(".search-input").on("keyup input",function() {
var callback = function() {
var search_input = document.getElementsByClassName('search-input');
if (search_input.length < 1) { return; }
search_input = search_input[0];
clearTimeout(searchTimeout);
if ($(this).val().length === 0) {
if (search_input.value.length === 0) {
if (browserSupportsHistoryApi()) {
history.replaceState("", "std - Rust", "?search=");
}
$('#main.content').removeClass('hidden');
$('#search.content').addClass('hidden');
var main = document.getElementById('main');
if (hasClass(main, 'content')) {
removeClass(main, 'hidden');
}
var search_c = document.getElementById('search');
if (hasClass(search_c, 'content')) {
addClass(search_c, 'hidden');
}
} else {
searchTimeout = setTimeout(search, 500);
}
});
$('.search-form').on('submit', function(e){
};
var search_input = document.getElementsByClassName("search-input")[0];
search_input.onkeyup = callback;
search_input.oninput = callback;
document.getElementsByClassName("search-form")[0].onsubmit = function(e){
e.preventDefault();
clearTimeout(searchTimeout);
search();
});
$('.search-input').on('change paste', function(e) {
};
search_input.onchange = function(e) {
// Do NOT e.preventDefault() here. It will prevent pasting.
clearTimeout(searchTimeout);
// zero-timeout necessary here because at the time of event handler execution the
// pasted content is not in the input field yet. Shouldnt make any difference for
// change, though.
setTimeout(search, 0);
});
};
search_input.onpaste = search_input.onchange;
// Push and pop states are used to add search results to the browser
// history.
if (browserSupportsHistoryApi()) {
// Store the previous <title> so we can revert back to it later.
var previousTitle = $(document).prop("title");
var previousTitle = document.title;
$(window).on('popstate', function(e) {
window.onpopstate = function(e) {
var params = getQueryStringParams();
// When browsing back from search results the main page
// visibility must be reset.
if (!params.search) {
$('#main.content').removeClass('hidden');
$('#search.content').addClass('hidden');
var main = document.getElementById('main');
if (hasClass(main, 'content')) {
removeClass(main, 'hidden');
}
var search = document.getElementById('search');
if (hasClass(main, 'content')) {
addClass(main, 'hidden');
}
}
// Revert to the previous title manually since the History
// API ignores the title parameter.
$(document).prop("title", previousTitle);
document.title = previousTitle;
// When browsing forward to search results the previous
// search will be repeated, so the currentResults are
// cleared to ensure the search is successful.
@ -795,14 +959,14 @@
// perform the search. This will empty the bar if there's
// nothing there, which lets you really go back to a
// previous state with nothing in the bar.
$('.search-input').val(params.search);
document.getElementsByClassName('search-input')[0].value = params.search;
// Some browsers fire 'onpopstate' for every page load
// (Chrome), while others fire the event only when actually
// popping a state (Firefox), which is why search() is
// called both here and at the end of the startSearch()
// function.
search();
});
};
}
search();
}
@ -812,10 +976,12 @@
// Draw a convenient sidebar of known crates if we have a listing
if (rootPath === '../') {
var sidebar = $('.sidebar');
var div = $('<div>').attr('class', 'block crate');
div.append($('<h3>').text('Crates'));
var ul = $('<ul>').appendTo(div);
var sidebar = document.getElementsByClassName('sidebar')[0];
var div = document.createElement('div');
div.className = 'block crate';
div.innerHTML = '<h3>Crates</h3>';
var ul = document.createElement('ul');
div.appendChild(ul);
var crates = [];
for (var crate in rawSearchIndex) {
@ -828,12 +994,17 @@
if (crates[i] === window.currentCrate) {
klass += ' current';
}
var link = $('<a>', {'href': '../' + crates[i] + '/index.html',
'title': rawSearchIndex[crates[i]].doc,
'class': klass}).text(crates[i]);
ul.append($('<li>').append(link));
var link = document.createElement('a');
link.href = '../' + crates[i] + '/index.html';
link.title = rawSearchIndex[crates[i]].doc;
link.className = klass;
link.textContent = crates[i];
var li = document.createElement('li');
li.appendChild(link);
ul.appendChild(li);
}
sidebar.append(div);
sidebar.appendChild(div);
}
}
@ -841,16 +1012,19 @@
// delayed sidebar rendering.
function initSidebarItems(items) {
var sidebar = $('.sidebar');
var sidebar = document.getElementsByClassName('sidebar')[0];
var current = window.sidebarCurrent;
function block(shortty, longty) {
var filtered = items[shortty];
if (!filtered) { return; }
var div = $('<div>').attr('class', 'block ' + shortty);
div.append($('<h3>').text(longty));
var ul = $('<ul>').appendTo(div);
var div = document.createElement('div');
div.className = 'block ' + shortty;
var h3 = document.createElement('h3');
h3.textContent = longty;
div.appendChild(h3);
var ul = document.createElement('ul');
for (var i = 0; i < filtered.length; ++i) {
var item = filtered[i];
@ -867,12 +1041,17 @@
} else {
path = shortty + '.' + name + '.html';
}
var link = $('<a>', {'href': current.relpath + path,
'title': desc,
'class': klass}).text(name);
ul.append($('<li>').append(link));
var link = document.createElement('a');
link.href = current.relpath + path;
link.title = desc;
link.className = klass;
link.textContent = name;
var li = document.createElement('li');
li.appendChild(link);
ul.appendChild(li);
}
sidebar.append(div);
div.appendChild(ul);
sidebar.appendChild(div);
}
block("primitive", "Primitive Types");
@ -890,21 +1069,25 @@
window.initSidebarItems = initSidebarItems;
window.register_implementors = function(imp) {
var list = $('#implementors-list');
var list = document.getElementById('implementors-list');
var libs = Object.getOwnPropertyNames(imp);
for (var i = 0; i < libs.length; ++i) {
if (libs[i] === currentCrate) { continue; }
var structs = imp[libs[i]];
for (var j = 0; j < structs.length; ++j) {
var code = $('<code>').append(structs[j]);
$.each(code.find('a'), function(idx, a) {
var href = $(a).attr('href');
var code = document.createElement('code');
code.innerHTML = structs[j];
var x = code.getElementsByTagName('a');
for (var i = 0; i < x.length; i++) {
var href = x[i].href;
if (href && href.indexOf('http') !== 0) {
$(a).attr('href', rootPath + href);
x[i].href = rootPath + href;
}
});
var li = $('<li>').append(code);
list.append(li);
}
var li = document.createElement('li');
li.appendChild(code);
list.appendChild(li);
}
}
};
@ -922,146 +1105,186 @@
return "\u2212"; // "\u2212" is '' minus sign
}
function toggleAllDocs() {
var toggle = $("#toggle-all-docs");
if (toggle.hasClass("will-expand")) {
toggle.removeClass("will-expand");
toggle.children(".inner").text(labelForToggleButton(false));
toggle.attr("title", "collapse all docs");
$(".docblock").show();
$(".toggle-label").hide();
$(".toggle-wrapper").removeClass("collapsed");
$(".collapse-toggle").children(".inner").text(labelForToggleButton(false));
} else {
toggle.addClass("will-expand");
toggle.children(".inner").text(labelForToggleButton(true));
toggle.attr("title", "expand all docs");
$(".docblock").hide();
$(".toggle-label").show();
$(".toggle-wrapper").addClass("collapsed");
$(".collapse-toggle").children(".inner").text(labelForToggleButton(true));
}
}
function collapseDocs(toggle, animate) {
var relatedDoc = toggle.parent().next();
if (relatedDoc.is(".stability")) {
relatedDoc = relatedDoc.next();
}
if (relatedDoc.is(".docblock")) {
if (relatedDoc.is(":visible")) {
if (animate === true) {
relatedDoc.slideUp({
duration: 'fast',
easing: 'linear',
complete: function() {
toggle.children(".toggle-label").fadeIn();
toggle.parent(".toggle-wrapper").addClass("collapsed");
toggle.children(".inner").text(labelForToggleButton(true));
},
});
function onEveryMatchingChild(elem, className, func) {
if (elem && className && func) {
for (var i = 0; i < elem.childNodes.length; i++) {
if (hasClass(elem.childNodes[i], className)) {
func(elem.childNodes[i]);
} else {
relatedDoc.hide();
toggle.children(".toggle-label").show();
toggle.parent(".toggle-wrapper").addClass("collapsed");
toggle.children(".inner").text(labelForToggleButton(true));
onEveryMatchingChild(elem.childNodes[i], className, func);
}
} else {
relatedDoc.slideDown({duration: 'fast', easing: 'linear'});
toggle.parent(".toggle-wrapper").removeClass("collapsed");
toggle.children(".inner").text(labelForToggleButton(false));
toggle.children(".toggle-label").hide();
}
}
}
$("#toggle-all-docs").on("click", toggleAllDocs);
function toggleAllDocs() {
var toggle = document.getElementById("toggle-all-docs");
if (hasClass(toggle, "will-expand")) {
removeClass(toggle, "will-expand");
onEveryMatchingChild(toggle, "inner", function(e) {
e.innerHTML = labelForToggleButton(false);
});
toggle.title = "collapse all docs";
onEach(document.getElementsByClassName("docblock"), function(e) {
e.style.display = 'block';
});
onEach(document.getElementsByClassName("toggle-label"), function(e) {
e.style.display = 'none';
});
onEach(document.getElementsByClassName("toggle-wrapper"), function(e) {
removeClass(e, "collapsed");
});
onEach(document.getElementsByClassName("collapse-toggle"), function(e) {
onEveryMatchingChild(e, "inner", function(i_e) {
i_e.innerHTML = labelForToggleButton(false);
});
});
} else {
addClass(toggle, "will-expand");
onEveryMatchingChild(toggle, "inner", function(e) {
e.innerHTML = labelForToggleButton(true);
});
toggle.title = "expand all docs";
onEach(document.getElementsByClassName("docblock"), function(e) {
e.style.display = 'none';
});
onEach(document.getElementsByClassName("toggle-label"), function(e) {
e.style.display = 'inline-block';
});
onEach(document.getElementsByClassName("toggle-wrapper"), function(e) {
addClass(e, "collapsed");
});
onEach(document.getElementsByClassName("collapse-toggle"), function(e) {
onEveryMatchingChild(e, "inner", function(i_e) {
i_e.innerHTML = labelForToggleButton(true);
});
});
}
}
$(document).on("click", ".collapse-toggle", function() {
collapseDocs($(this), true)
});
$(function() {
var toggle = $("<a/>", {'href': 'javascript:void(0)', 'class': 'collapse-toggle'})
.html("[<span class='inner'></span>]");
toggle.children(".inner").text(labelForToggleButton(false));
$(".method, .impl-items > .associatedconstant").each(function() {
if ($(this).next().is(".docblock") ||
($(this).next().is(".stability") && $(this).next().next().is(".docblock"))) {
$(this).children().last().after(toggle.clone());
}
});
var mainToggle =
$(toggle.clone()).append(
$('<span/>', {'class': 'toggle-label'})
.css('display', 'none')
.html('&nbsp;Expand&nbsp;description'));
var wrapper = $("<div class='toggle-wrapper'>").append(mainToggle);
$("#main > .docblock").before(wrapper);
$(".docblock.autohide").each(function() {
var wrap = $(this).prev();
if (wrap.is(".toggle-wrapper")) {
var toggle = wrap.children().first();
if ($(this).children().first().is("h3")) {
toggle.children(".toggle-label")
.text(" Show " + $(this).children().first().text());
}
$(this).hide();
wrap.addClass("collapsed");
toggle.children(".inner").text(labelForToggleButton(true));
toggle.children(".toggle-label").show();
}
});
var mainToggle =
$(toggle).append(
$('<span/>', {'class': 'toggle-label'})
.css('display', 'none')
.html('&nbsp;Expand&nbsp;attributes'));
var wrapper = $("<div class='toggle-wrapper toggle-attributes'>").append(mainToggle);
$("#main > pre > .attributes").each(function() {
$(this).before(wrapper);
collapseDocs($($(this).prev().children()[0]), false);
});
});
$('pre.line-numbers').on('click', 'span', function() {
var prev_id = 0;
function set_fragment(name) {
if (browserSupportsHistoryApi()) {
history.replaceState(null, null, '#' + name);
$(window).trigger('hashchange');
function collapseDocs(toggle) {
if (!toggle || !toggle.parentNode) {
return;
}
var relatedDoc = toggle.parentNode.nextElementSibling;
if (hasClass(relatedDoc, "stability")) {
relatedDoc = relatedDoc.nextElementSibling;
}
if (hasClass(relatedDoc, "docblock")) {
if (!isHidden(relatedDoc)) {
relatedDoc.style.display = 'none';
onEach(toggle.childNodes, function(e) {
if (hasClass(e, 'toggle-label')) {
e.style.display = 'inline-block';
}
if (hasClass(e, 'inner')) {
e.innerHTML = labelForToggleButton(true);
}
});
addClass(toggle.parentNode, 'collapsed');
} else {
location.replace('#' + name);
relatedDoc.style.display = 'block';
removeClass(toggle.parentNode, 'collapsed');
onEach(toggle.childNodes, function(e) {
if (hasClass(e, 'toggle-label')) {
e.style.display = 'none';
}
if (hasClass(e, 'inner')) {
e.innerHTML = labelForToggleButton(false);
}
});
}
}
}
return function(ev) {
var cur_id = parseInt(ev.target.id, 10);
var x = document.getElementById('toggle-all-docs');
if (x) {
x.onclick = toggleAllDocs;
}
if (ev.shiftKey && prev_id) {
if (prev_id > cur_id) {
var tmp = prev_id;
prev_id = cur_id;
cur_id = tmp;
function insertAfter(newNode, referenceNode) {
referenceNode.parentNode.insertBefore(newNode, referenceNode.nextSibling);
}
var toggle = document.createElement('a');
toggle.href = 'javascript:void(0)';
toggle.className = 'collapse-toggle';
toggle.innerHTML = "[<span class='inner'>"+labelForToggleButton(false)+"</span>]";
var func = function(e) {
var next = e.nextElementSibling;
if (!next) {
return;
}
if (hasClass(next, 'docblock') ||
(hasClass(next, 'stability') &&
hasClass(next.nextElementSibling, 'docblock'))) {
insertAfter(toggle.cloneNode(true), e.childNodes[e.childNodes.length - 1]);
}
}
onEach(document.getElementsByClassName('method'), func);
onEach(document.getElementsByClassName('impl-items'), function(e) {
onEach(e.getElementsByClassName('associatedconstant'), func);
});
var span = document.createElement('span');
span.className = 'toggle-label';
span.style.display = 'none';
span.innerHTML = '&nbsp;Expand&nbsp;description';
var mainToggle = toggle.cloneNode(true);
mainToggle.appendChild(span);
var wrapper = document.createElement('div');
wrapper.className = 'toggle-wrapper';
wrapper.appendChild(mainToggle);
onEach(document.getElementById('main').getElementsByClassName('docblock'), function(e) {
if (e.parentNode.id === "main") {
e.parentNode.insertBefore(wrapper, e);
}
});
onEach(document.getElementsByClassName('docblock'), function(e) {
if (hasClass(e, 'autohide')) {
var wrap = e.previousElementSibling;
if (wrap && hasClass(wrap, 'toggle-wrapper')) {
var toggle = wrap.childNodes[0];
if (e.childNodes[0].tagName === 'H3') {
onEach(toggle.getElementsByClassName('toggle-label'), function(i_e) {
i_e.innerHTML = " Show " + e.childNodes[0].innerHTML;
});
}
set_fragment(prev_id + '-' + cur_id);
} else {
prev_id = cur_id;
set_fragment(cur_id);
e.style.display = 'none';
addClass(wrap, 'collapsed');
onEach(toggle.getElementsByClassName('inner'), function(e) {
e.innerHTML = labelForToggleButton(true);
});
onEach(toggle.getElementsByClassName('toggle-label'), function(e) {
e.style.display = 'block';
});
}
};
}());
}
})
var span = document.createElement('span');
span.className = 'toggle-label';
span.style.display = 'none';
span.innerHTML = '&nbsp;Expand&nbsp;attributes';
toggle.appendChild(span);
var wrapper = document.createElement('div');
wrapper.className = 'toggle-wrapper toggle-attributes';
wrapper.appendChild(toggle);
onEach(document.getElementById('main').getElementsByTagName('pre'), function(e) {
onEach(e.getElementsByClassName('attributes'), function(i_e) {
i_e.parentNode.insertBefore(wrapper, i_e);
collapseDocs(i_e.previousSibling.childNodes[0]);
});
});
}());
// Sets the focus on the search bar at the top of the page
function focusSearchBar() {
$('.search-input').focus();
document.getElementsByClassName('search-input')[0].focus();
}

View file

@ -173,6 +173,7 @@ pub fn opts() -> Vec<RustcOptGroup> {
or `#![doc(html_playground_url=...)]`",
"URL")),
unstable(optflag("", "enable-commonmark", "to enable commonmark doc rendering/testing")),
unstable(optflag("", "display-warnings", "to print code warnings when testing doc")),
]
}
@ -280,14 +281,16 @@ pub fn main_args(args: &[String]) -> isize {
let crate_name = matches.opt_str("crate-name");
let playground_url = matches.opt_str("playground-url");
let maybe_sysroot = matches.opt_str("sysroot").map(PathBuf::from);
let display_warnings = matches.opt_present("display-warnings");
match (should_test, markdown_input) {
(true, true) => {
return markdown::test(input, cfgs, libs, externs, test_args, maybe_sysroot, render_type)
return markdown::test(input, cfgs, libs, externs, test_args, maybe_sysroot, render_type,
display_warnings)
}
(true, false) => {
return test::run(input, cfgs, libs, externs, test_args, crate_name, maybe_sysroot,
render_type)
render_type, display_warnings)
}
(false, true) => return markdown::render(input,
output.unwrap_or(PathBuf::from("doc")),
@ -389,13 +392,15 @@ where R: 'static + Send, F: 'static + Send + FnOnce(Output) -> R {
let cr = PathBuf::from(cratefile);
info!("starting to run rustc");
let display_warnings = matches.opt_present("display-warnings");
let (tx, rx) = channel();
rustc_driver::monitor(move || {
use rustc::session::config::Input;
let (mut krate, renderinfo) =
core::run_core(paths, cfgs, externs, Input::File(cr), triple, maybe_sysroot);
core::run_core(paths, cfgs, externs, Input::File(cr), triple, maybe_sysroot,
display_warnings);
info!("finished with rustc");

View file

@ -150,7 +150,7 @@ pub fn render(input: &str, mut output: PathBuf, matches: &getopts::Matches,
/// Run any tests/code examples in the markdown file `input`.
pub fn test(input: &str, cfgs: Vec<String>, libs: SearchPaths, externs: Externs,
mut test_args: Vec<String>, maybe_sysroot: Option<PathBuf>,
render_type: RenderType) -> isize {
render_type: RenderType, display_warnings: bool) -> isize {
let input_str = match load_string(input) {
Ok(s) => s,
Err(LoadStringError::ReadFail) => return 1,
@ -166,6 +166,7 @@ pub fn test(input: &str, cfgs: Vec<String>, libs: SearchPaths, externs: Externs,
old_find_testable_code(&input_str, &mut collector, DUMMY_SP);
find_testable_code(&input_str, &mut collector, DUMMY_SP);
test_args.insert(0, "rustdoctest".to_string());
testing::test_main(&test_args, collector.tests);
testing::test_main(&test_args, collector.tests,
testing::Options::new().display_output(display_warnings));
0
}

View file

@ -58,7 +58,8 @@ pub fn run(input: &str,
mut test_args: Vec<String>,
crate_name: Option<String>,
maybe_sysroot: Option<PathBuf>,
render_type: RenderType)
render_type: RenderType,
display_warnings: bool)
-> isize {
let input_path = PathBuf::from(input);
let input = config::Input::File(input_path.clone());
@ -127,7 +128,8 @@ pub fn run(input: &str,
test_args.insert(0, "rustdoctest".to_string());
testing::test_main(&test_args,
collector.tests.into_iter().collect());
collector.tests.into_iter().collect(),
testing::Options::new().display_output(display_warnings));
0
}

View file

@ -43,11 +43,16 @@ fn main() {
println!("cargo:rustc-link-lib=pthread");
} else if target.contains("apple-darwin") {
println!("cargo:rustc-link-lib=System");
// res_init and friends require -lresolv on macOS/iOS.
// See #41582 and http://blog.achernya.com/2013/03/os-x-has-silly-libsystem.html
println!("cargo:rustc-link-lib=resolv");
} else if target.contains("apple-ios") {
println!("cargo:rustc-link-lib=System");
println!("cargo:rustc-link-lib=objc");
println!("cargo:rustc-link-lib=framework=Security");
println!("cargo:rustc-link-lib=framework=Foundation");
println!("cargo:rustc-link-lib=resolv");
} else if target.contains("windows") {
println!("cargo:rustc-link-lib=advapi32");
println!("cargo:rustc-link-lib=ws2_32");

View file

@ -8,7 +8,7 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use alloc::heap::{EMPTY, allocate, deallocate};
use alloc::heap::{allocate, deallocate};
use cmp;
use hash::{BuildHasher, Hash, Hasher};
@ -33,6 +33,7 @@ use self::BucketState::*;
type HashUint = usize;
const EMPTY_BUCKET: HashUint = 0;
const EMPTY: usize = 1;
/// Special `Unique<HashUint>` that uses the lower bit of the pointer
/// to expose a boolean tag.
@ -49,24 +50,25 @@ impl TaggedHashUintPtr {
#[inline]
fn set_tag(&mut self, value: bool) {
let usize_ptr = &*self.0 as *const *mut HashUint as *mut usize;
let mut usize_ptr = self.0.as_ptr() as usize;
unsafe {
if value {
*usize_ptr |= 1;
usize_ptr |= 1;
} else {
*usize_ptr &= !1;
usize_ptr &= !1;
}
self.0 = Unique::new(usize_ptr as *mut HashUint)
}
}
#[inline]
fn tag(&self) -> bool {
(*self.0 as usize) & 1 == 1
(self.0.as_ptr() as usize) & 1 == 1
}
#[inline]
fn ptr(&self) -> *mut HashUint {
(*self.0 as usize & !1) as *mut HashUint
(self.0.as_ptr() as usize & !1) as *mut HashUint
}
}
@ -1112,10 +1114,12 @@ impl<'a, K, V> Iterator for Drain<'a, K, V> {
#[inline]
fn next(&mut self) -> Option<(SafeHash, K, V)> {
self.iter.next().map(|raw| unsafe {
(*self.table.as_mut_ptr()).size -= 1;
let (k, v) = ptr::read(raw.pair());
(SafeHash { hash: ptr::replace(&mut *raw.hash(), EMPTY_BUCKET) }, k, v)
self.iter.next().map(|raw| {
unsafe {
self.table.as_mut().size -= 1;
let (k, v) = ptr::read(raw.pair());
(SafeHash { hash: ptr::replace(&mut *raw.hash(), EMPTY_BUCKET) }, k, v)
}
})
}

View file

@ -38,3 +38,8 @@ pub unsafe fn destroy(key: Key) {
let r = libc::pthread_key_delete(key);
debug_assert_eq!(r, 0);
}
#[inline]
pub fn requires_synchronized_create() -> bool {
false
}

View file

@ -935,7 +935,6 @@ extern "system" {
args: *const c_void)
-> DWORD;
pub fn TlsAlloc() -> DWORD;
pub fn TlsFree(dwTlsIndex: DWORD) -> BOOL;
pub fn TlsGetValue(dwTlsIndex: DWORD) -> LPVOID;
pub fn TlsSetValue(dwTlsIndex: DWORD, lpTlsvalue: LPVOID) -> BOOL;
pub fn GetLastError() -> DWORD;

View file

@ -8,10 +8,11 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use mem;
use ptr;
use sync::atomic::AtomicPtr;
use sync::atomic::Ordering::SeqCst;
use sys::c;
use sys_common::mutex::Mutex;
use sys_common;
pub type Key = c::DWORD;
pub type Dtor = unsafe extern fn(*mut u8);
@ -34,8 +35,6 @@ pub type Dtor = unsafe extern fn(*mut u8);
// * All TLS destructors are tracked by *us*, not the windows runtime. This
// means that we have a global list of destructors for each TLS key that
// we know about.
// * When a TLS key is destroyed, we're sure to remove it from the dtor list
// if it's in there.
// * When a thread exits, we run over the entire list and run dtors for all
// non-null keys. This attempts to match Unix semantics in this regard.
//
@ -50,13 +49,6 @@ pub type Dtor = unsafe extern fn(*mut u8);
// [2]: https://github.com/ChromiumWebApps/chromium/blob/master/base
// /threading/thread_local_storage_win.cc#L42
// NB these are specifically not types from `std::sync` as they currently rely
// on poisoning and this module needs to operate at a lower level than requiring
// the thread infrastructure to be in place (useful on the borders of
// initialization/destruction).
static DTOR_LOCK: Mutex = Mutex::new();
static mut DTORS: *mut Vec<(Key, Dtor)> = ptr::null_mut();
// -------------------------------------------------------------------------
// Native bindings
//
@ -85,81 +77,64 @@ pub unsafe fn get(key: Key) -> *mut u8 {
}
#[inline]
pub unsafe fn destroy(key: Key) {
if unregister_dtor(key) {
// FIXME: Currently if a key has a destructor associated with it we
// can't actually ever unregister it. If we were to
// unregister it, then any key destruction would have to be
// serialized with respect to actually running destructors.
//
// We want to avoid a race where right before run_dtors runs
// some destructors TlsFree is called. Allowing the call to
// TlsFree would imply that the caller understands that *all
// known threads* are not exiting, which is quite a difficult
// thing to know!
//
// For now we just leak all keys with dtors to "fix" this.
// Note that source [2] above shows precedent for this sort
// of strategy.
} else {
let r = c::TlsFree(key);
debug_assert!(r != 0);
}
pub unsafe fn destroy(_key: Key) {
rtabort!("can't destroy tls keys on windows")
}
#[inline]
pub fn requires_synchronized_create() -> bool {
true
}
// -------------------------------------------------------------------------
// Dtor registration
//
// These functions are associated with registering and unregistering
// destructors. They're pretty simple, they just push onto a vector and scan
// a vector currently.
// Windows has no native support for running destructors so we manage our own
// list of destructors to keep track of how to destroy keys. We then install a
// callback later to get invoked whenever a thread exits, running all
// appropriate destructors.
//
// FIXME: This could probably be at least a little faster with a BTree.
// Currently unregistration from this list is not supported. A destructor can be
// registered but cannot be unregistered. There's various simplifying reasons
// for doing this, the big ones being:
//
// 1. Currently we don't even support deallocating TLS keys, so normal operation
// doesn't need to deallocate a destructor.
// 2. There is no point in time where we know we can unregister a destructor
// because it could always be getting run by some remote thread.
//
// Typically processes have a statically known set of TLS keys which is pretty
// small, and we'd want to keep this memory alive for the whole process anyway
// really.
//
// Perhaps one day we can fold the `Box` here into a static allocation,
// expanding the `StaticKey` structure to contain not only a slot for the TLS
// key but also a slot for the destructor queue on windows. An optimization for
// another day!
unsafe fn init_dtors() {
if !DTORS.is_null() { return }
static DTORS: AtomicPtr<Node> = AtomicPtr::new(ptr::null_mut());
let dtors = box Vec::<(Key, Dtor)>::new();
let res = sys_common::at_exit(move|| {
DTOR_LOCK.lock();
let dtors = DTORS;
DTORS = 1 as *mut _;
Box::from_raw(dtors);
assert!(DTORS as usize == 1); // can't re-init after destructing
DTOR_LOCK.unlock();
});
if res.is_ok() {
DTORS = Box::into_raw(dtors);
} else {
DTORS = 1 as *mut _;
}
struct Node {
dtor: Dtor,
key: Key,
next: *mut Node,
}
unsafe fn register_dtor(key: Key, dtor: Dtor) {
DTOR_LOCK.lock();
init_dtors();
assert!(DTORS as usize != 0);
assert!(DTORS as usize != 1,
"cannot create new TLS keys after the main thread has exited");
(*DTORS).push((key, dtor));
DTOR_LOCK.unlock();
}
let mut node = Box::new(Node {
key: key,
dtor: dtor,
next: ptr::null_mut(),
});
unsafe fn unregister_dtor(key: Key) -> bool {
DTOR_LOCK.lock();
init_dtors();
assert!(DTORS as usize != 0);
assert!(DTORS as usize != 1,
"cannot unregister destructors after the main thread has exited");
let ret = {
let dtors = &mut *DTORS;
let before = dtors.len();
dtors.retain(|&(k, _)| k != key);
dtors.len() != before
};
DTOR_LOCK.unlock();
ret
let mut head = DTORS.load(SeqCst);
loop {
node.next = head;
match DTORS.compare_exchange(head, &mut *node, SeqCst, SeqCst) {
Ok(_) => return mem::forget(node),
Err(cur) => head = cur,
}
}
}
// -------------------------------------------------------------------------
@ -196,16 +171,12 @@ unsafe fn unregister_dtor(key: Key) -> bool {
// # Ok, what's up with running all these destructors?
//
// This will likely need to be improved over time, but this function
// attempts a "poor man's" destructor callback system. To do this we clone a
// local copy of the dtor list to start out with. This is our fudgy attempt
// to not hold the lock while destructors run and not worry about the list
// changing while we're looking at it.
//
// Once we've got a list of what to run, we iterate over all keys, check
// their values, and then run destructors if the values turn out to be non
// null (setting them to null just beforehand). We do this a few times in a
// loop to basically match Unix semantics. If we don't reach a fixed point
// after a short while then we just inevitably leak something most likely.
// attempts a "poor man's" destructor callback system. Once we've got a list
// of what to run, we iterate over all keys, check their values, and then run
// destructors if the values turn out to be non null (setting them to null just
// beforehand). We do this a few times in a loop to basically match Unix
// semantics. If we don't reach a fixed point after a short while then we just
// inevitably leak something most likely.
//
// # The article mentions weird stuff about "/INCLUDE"?
//
@ -259,25 +230,21 @@ unsafe extern "system" fn on_tls_callback(h: c::LPVOID,
unsafe fn run_dtors() {
let mut any_run = true;
for _ in 0..5 {
if !any_run { break }
if !any_run {
break
}
any_run = false;
let dtors = {
DTOR_LOCK.lock();
let ret = if DTORS as usize <= 1 {
Vec::new()
} else {
(*DTORS).iter().map(|s| *s).collect()
};
DTOR_LOCK.unlock();
ret
};
for &(key, dtor) in &dtors {
let ptr = c::TlsGetValue(key);
let mut cur = DTORS.load(SeqCst);
while !cur.is_null() {
let ptr = c::TlsGetValue((*cur).key);
if !ptr.is_null() {
c::TlsSetValue(key, ptr::null_mut());
dtor(ptr as *mut _);
c::TlsSetValue((*cur).key, ptr::null_mut());
((*cur).dtor)(ptr as *mut _);
any_run = true;
}
cur = (*cur).next;
}
}
}

View file

@ -177,9 +177,22 @@ pub fn lookup_host(host: &str) -> io::Result<LookupHost> {
};
let mut res = ptr::null_mut();
unsafe {
cvt_gai(c::getaddrinfo(c_host.as_ptr(), ptr::null(), &hints,
&mut res))?;
Ok(LookupHost { original: res, cur: res })
match cvt_gai(c::getaddrinfo(c_host.as_ptr(), ptr::null(), &hints, &mut res)) {
Ok(_) => {
Ok(LookupHost { original: res, cur: res })
},
#[cfg(unix)]
Err(e) => {
// The lookup failure could be caused by using a stale /etc/resolv.conf.
// See https://github.com/rust-lang/rust/issues/41570.
// We therefore force a reload of the nameserver information.
c::res_init();
Err(e)
},
// the cfg is needed here to avoid an "unreachable pattern" warning
#[cfg(not(unix))]
Err(e) => Err(e),
}
}
}

View file

@ -61,6 +61,7 @@
use sync::atomic::{self, AtomicUsize, Ordering};
use sys::thread_local as imp;
use sys_common::mutex::Mutex;
/// A type for TLS keys that are statically allocated.
///
@ -145,20 +146,6 @@ impl StaticKey {
#[inline]
pub unsafe fn set(&self, val: *mut u8) { imp::set(self.key(), val) }
/// Deallocates this OS TLS key.
///
/// This function is unsafe as there is no guarantee that the key is not
/// currently in use by other threads or will not ever be used again.
///
/// Note that this does *not* run the user-provided destructor if one was
/// specified at definition time. Doing so must be done manually.
pub unsafe fn destroy(&self) {
match self.key.swap(0, Ordering::SeqCst) {
0 => {}
n => { imp::destroy(n as imp::Key) }
}
}
#[inline]
unsafe fn key(&self) -> imp::Key {
match self.key.load(Ordering::Relaxed) {
@ -168,6 +155,24 @@ impl StaticKey {
}
unsafe fn lazy_init(&self) -> usize {
// Currently the Windows implementation of TLS is pretty hairy, and
// it greatly simplifies creation if we just synchronize everything.
//
// Additionally a 0-index of a tls key hasn't been seen on windows, so
// we just simplify the whole branch.
if imp::requires_synchronized_create() {
static INIT_LOCK: Mutex = Mutex::new();
INIT_LOCK.lock();
let mut key = self.key.load(Ordering::SeqCst);
if key == 0 {
key = imp::create(self.dtor) as usize;
self.key.store(key, Ordering::SeqCst);
}
INIT_LOCK.unlock();
assert!(key != 0);
return key
}
// POSIX allows the key created here to be 0, but the compare_and_swap
// below relies on using 0 as a sentinel value to check who won the
// race to set the shared TLS key. As far as I know, there is no
@ -227,7 +232,9 @@ impl Key {
impl Drop for Key {
fn drop(&mut self) {
unsafe { imp::destroy(self.key) }
// Right now Windows doesn't support TLS key destruction, but this also
// isn't used anywhere other than tests, so just leak the TLS key.
// unsafe { imp::destroy(self.key) }
}
}

View file

@ -8,7 +8,7 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// NOTE: The following code was generated by "src/etc/unicode.py", do not edit directly
// NOTE: The following code was generated by "./unicode.py", do not edit directly
#![allow(missing_docs, non_upper_case_globals, non_snake_case)]

View file

@ -35,7 +35,7 @@ preamble = '''// Copyright 2012-2016 The Rust Project Developers. See the COPYRI
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// NOTE: The following code was generated by "src/etc/unicode.py", do not edit directly
// NOTE: The following code was generated by "./unicode.py", do not edit directly
#![allow(missing_docs, non_upper_case_globals, non_snake_case)]
'''

View file

@ -2700,6 +2700,19 @@ impl<'a> Parser<'a> {
let (span, e) = self.interpolated_or_expr_span(e)?;
(span, self.mk_unary(UnOp::Not, e))
}
// Suggest `!` for bitwise negation when encountering a `~`
token::Tilde => {
self.bump();
let e = self.parse_prefix_expr(None);
let (span, e) = self.interpolated_or_expr_span(e)?;
let span_of_tilde = lo;
let mut err = self.diagnostic().struct_span_err(span_of_tilde,
"`~` can not be used as an unary operator");
err.span_label(span_of_tilde, &"did you mean `!`?");
err.help("use `!` instead of `~` if you meant to perform bitwise negation");
err.emit();
(span, self.mk_unary(UnOp::Not, e))
}
token::BinOp(token::Minus) => {
self.bump();
let e = self.parse_prefix_expr(None);

View file

@ -442,7 +442,7 @@ We're going to be building a module that looks more or less like:
mod __test {
extern crate test (name = "test", vers = "...");
fn main() {
test::test_main_static(&::os::args()[], tests)
test::test_main_static(&::os::args()[], tests, test::Options::new())
}
static tests : &'static [test::TestDescAndFn] = &[
@ -478,7 +478,7 @@ fn mk_main(cx: &mut TestCtxt) -> P<ast::Item> {
// pub fn main() {
// #![main]
// use std::slice::AsSlice;
// test::test_main_static(::std::os::args().as_slice(), TESTS);
// test::test_main_static(::std::os::args().as_slice(), TESTS, test::Options::new());
// }
let sp = ignored_span(cx, DUMMY_SP);

View file

@ -76,7 +76,7 @@ pub mod test {
pub use {Bencher, TestName, TestResult, TestDesc, TestDescAndFn, TestOpts, TrFailed,
TrFailedMsg, TrIgnored, TrOk, Metric, MetricMap, StaticTestFn, StaticTestName,
DynTestName, DynTestFn, run_test, test_main, test_main_static, filter_tests,
parse_opts, StaticBenchFn, ShouldPanic};
parse_opts, StaticBenchFn, ShouldPanic, Options};
}
pub mod stats;
@ -252,14 +252,34 @@ impl Clone for MetricMap {
}
}
/// In case we want to add other options as well, just add them in this struct.
#[derive(Copy, Clone, Debug)]
pub struct Options {
display_output: bool,
}
impl Options {
pub fn new() -> Options {
Options {
display_output: false,
}
}
pub fn display_output(mut self, display_output: bool) -> Options {
self.display_output = display_output;
self
}
}
// The default console test runner. It accepts the command line
// arguments and a vector of test_descs.
pub fn test_main(args: &[String], tests: Vec<TestDescAndFn>) {
let opts = match parse_opts(args) {
pub fn test_main(args: &[String], tests: Vec<TestDescAndFn>, options: Options) {
let mut opts = match parse_opts(args) {
Some(Ok(o)) => o,
Some(Err(msg)) => panic!("{:?}", msg),
None => return,
};
opts.options = options;
if opts.list {
if let Err(e) = list_tests_console(&opts, tests) {
panic!("io error when listing tests: {:?}", e);
@ -301,16 +321,17 @@ pub fn test_main_static(tests: &[TestDescAndFn]) {
}
})
.collect();
test_main(&args, owned_tests)
test_main(&args, owned_tests, Options::new())
}
#[derive(Copy, Clone)]
#[derive(Copy, Clone, Debug)]
pub enum ColorConfig {
AutoColor,
AlwaysColor,
NeverColor,
}
#[derive(Debug)]
pub struct TestOpts {
pub list: bool,
pub filter: Option<String>,
@ -324,6 +345,7 @@ pub struct TestOpts {
pub quiet: bool,
pub test_threads: Option<usize>,
pub skip: Vec<String>,
pub options: Options,
}
impl TestOpts {
@ -342,6 +364,7 @@ impl TestOpts {
quiet: false,
test_threads: None,
skip: vec![],
options: Options::new(),
}
}
}
@ -481,6 +504,7 @@ pub fn parse_opts(args: &[String]) -> Option<OptRes> {
quiet: quiet,
test_threads: test_threads,
skip: matches.opt_strs("skip"),
options: Options::new(),
};
Some(Ok(test_opts))
@ -521,7 +545,9 @@ struct ConsoleTestState<T> {
measured: usize,
metrics: MetricMap,
failures: Vec<(TestDesc, Vec<u8>)>,
not_failures: Vec<(TestDesc, Vec<u8>)>,
max_name_len: usize, // number of columns to fill when aligning names
options: Options,
}
impl<T: Write> ConsoleTestState<T> {
@ -547,7 +573,9 @@ impl<T: Write> ConsoleTestState<T> {
measured: 0,
metrics: MetricMap::new(),
failures: Vec::new(),
not_failures: Vec::new(),
max_name_len: 0,
options: opts.options,
})
}
@ -703,9 +731,38 @@ impl<T: Write> ConsoleTestState<T> {
Ok(())
}
pub fn write_outputs(&mut self) -> io::Result<()> {
self.write_plain("\nsuccesses:\n")?;
let mut successes = Vec::new();
let mut stdouts = String::new();
for &(ref f, ref stdout) in &self.not_failures {
successes.push(f.name.to_string());
if !stdout.is_empty() {
stdouts.push_str(&format!("---- {} stdout ----\n\t", f.name));
let output = String::from_utf8_lossy(stdout);
stdouts.push_str(&output);
stdouts.push_str("\n");
}
}
if !stdouts.is_empty() {
self.write_plain("\n")?;
self.write_plain(&stdouts)?;
}
self.write_plain("\nsuccesses:\n")?;
successes.sort();
for name in &successes {
self.write_plain(&format!(" {}\n", name))?;
}
Ok(())
}
pub fn write_run_finish(&mut self) -> io::Result<bool> {
assert!(self.passed + self.failed + self.ignored + self.measured == self.total);
if self.options.display_output {
self.write_outputs()?;
}
let success = self.failed == 0;
if !success {
self.write_failures()?;
@ -824,7 +881,10 @@ pub fn run_tests_console(opts: &TestOpts, tests: Vec<TestDescAndFn>) -> io::Resu
st.write_log_result(&test, &result)?;
st.write_result(&result)?;
match result {
TrOk => st.passed += 1,
TrOk => {
st.passed += 1;
st.not_failures.push((test, stdout));
}
TrIgnored => st.ignored += 1,
TrMetrics(mm) => {
let tname = test.name;
@ -901,6 +961,8 @@ fn should_sort_failures_before_printing_them() {
max_name_len: 10,
metrics: MetricMap::new(),
failures: vec![(test_b, Vec::new()), (test_a, Vec::new())],
options: Options::new(),
not_failures: Vec::new(),
};
st.write_failures().unwrap();

View file

@ -58,13 +58,15 @@ mod signatures {
fn method(&self, x: u32) { }
}
#[rustc_then_this_would_need(ItemSignature)] //~ ERROR OK
struct WillChanges {
#[rustc_then_this_would_need(ItemSignature)] //~ ERROR OK
x: WillChange,
#[rustc_then_this_would_need(ItemSignature)] //~ ERROR OK
y: WillChange
}
#[rustc_then_this_would_need(ItemSignature)] //~ ERROR OK
// The fields change, not the type itself.
#[rustc_then_this_would_need(ItemSignature)] //~ ERROR no path
fn indirect(x: WillChanges) { }
}

View file

@ -23,15 +23,21 @@ fn main() { }
#[rustc_if_this_changed]
type TypeAlias = u32;
#[rustc_then_this_would_need(ItemSignature)] //~ ERROR OK
// The type alias directly affects the type of the field,
// not the enclosing struct:
#[rustc_then_this_would_need(ItemSignature)] //~ ERROR no path
struct Struct {
#[rustc_then_this_would_need(ItemSignature)] //~ ERROR OK
x: TypeAlias,
y: u32
}
#[rustc_then_this_would_need(ItemSignature)] //~ ERROR OK
#[rustc_then_this_would_need(ItemSignature)] //~ ERROR no path
enum Enum {
Variant1(TypeAlias),
Variant1 {
#[rustc_then_this_would_need(ItemSignature)] //~ ERROR OK
t: TypeAlias
},
Variant2(i32)
}

View file

@ -0,0 +1,32 @@
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that changing what a `type` points to does not go unnoticed
// by the variance analysis.
// compile-flags: -Z query-dep-graph
#![feature(rustc_attrs)]
#![allow(dead_code)]
#![allow(unused_variables)]
fn main() { }
struct Foo<T> {
f: T
}
#[rustc_if_this_changed]
type TypeAlias<T> = Foo<T>;
#[rustc_then_this_would_need(ItemVariances)] //~ ERROR OK
struct Use<T> {
x: TypeAlias<T>
}

View file

@ -60,7 +60,6 @@ struct Test6<'a, 'b:'a> { //~ ERROR [-, o]
#[rustc_variance]
struct Test7<'a> { //~ ERROR [*]
//~^ ERROR parameter `'a` is never used
x: isize
}

View file

@ -16,7 +16,6 @@
#[rustc_variance]
enum Base<'a, 'b, 'c:'b, 'd> { //~ ERROR [+, -, o, *]
//~^ ERROR parameter `'d` is never used
Test8A(extern "Rust" fn(&'a isize)),
Test8B(&'b [isize]),
Test8C(&'b mut &'c str),
@ -24,19 +23,16 @@ enum Base<'a, 'b, 'c:'b, 'd> { //~ ERROR [+, -, o, *]
#[rustc_variance]
struct Derived1<'w, 'x:'y, 'y, 'z> { //~ ERROR [*, o, -, +]
//~^ ERROR parameter `'w` is never used
f: Base<'z, 'y, 'x, 'w>
}
#[rustc_variance] // Combine - and + to yield o
struct Derived2<'a, 'b:'a, 'c> { //~ ERROR [o, o, *]
//~^ ERROR parameter `'c` is never used
f: Base<'a, 'a, 'b, 'c>
}
#[rustc_variance] // Combine + and o to yield o (just pay attention to 'a here)
struct Derived3<'a:'b, 'b, 'c> { //~ ERROR [o, -, *]
//~^ ERROR parameter `'c` is never used
f: Base<'a, 'b, 'a, 'c>
}

View file

@ -30,8 +30,7 @@ struct TestStruct<U,T:Setter<U>> { //~ ERROR [+, +]
}
#[rustc_variance]
enum TestEnum<U,T:Setter<U>> {//~ ERROR [*, +]
//~^ ERROR parameter `U` is never used
enum TestEnum<U,T:Setter<U>> { //~ ERROR [*, +]
Foo(T)
}
@ -51,13 +50,11 @@ trait TestTrait3<U> { //~ ERROR [o, o]
#[rustc_variance]
struct TestContraStruct<U,T:Setter<U>> { //~ ERROR [*, +]
//~^ ERROR parameter `U` is never used
t: T
}
#[rustc_variance]
struct TestBox<U,T:Getter<U>+Setter<U>> { //~ ERROR [*, +]
//~^ ERROR parameter `U` is never used
t: T
}

View file

@ -72,6 +72,7 @@ else
endif
else
ifeq ($(UNAME),Darwin)
EXTRACFLAGS := -lresolv
else
ifeq ($(UNAME),FreeBSD)
EXTRACFLAGS := -lm -lpthread -lgcc_s

View file

@ -0,0 +1,23 @@
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![crate_type="lib"]
enum E { E0 = 0, E1 = 1 }
const E0_U8: u8 = E::E0 as u8;
const E1_U8: u8 = E::E1 as u8;
pub fn go<T>() {
match 0 {
E0_U8 => (),
E1_U8 => (),
_ => (),
}
}

View file

@ -0,0 +1,17 @@
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// aux-build:issue_24106.rs
extern crate issue_24106;
fn main() {
issue_24106::go::<()>();
}

View file

@ -0,0 +1,13 @@
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn main() {
let x = ~1;
}

View file

@ -0,0 +1,10 @@
error: `~` can not be used as an unary operator
--> $DIR/issue-41679.rs:12:13
|
12 | let x = ~1;
| ^ did you mean `!`?
|
= help: use `!` instead of `~` if you meant to perform bitwise negation
error: aborting due to previous error

View file

@ -336,6 +336,7 @@ pub fn test_opts(config: &Config) -> test::TestOpts {
test_threads: None,
skip: vec![],
list: false,
options: test::Options::new(),
}
}

View file

@ -101,7 +101,7 @@ pub fn check(path: &Path, bad: &mut bool) {
filename.starts_with(".#") {
return
}
if filename == "miniz.c" || filename.contains("jquery") {
if filename == "miniz.c" {
return
}