Merge remote-tracking branch 'origin/master' into gen
This commit is contained in:
commit
b31998ec93
103 changed files with 719 additions and 1563 deletions
|
@ -415,4 +415,4 @@ are:
|
|||
[tlgba]: http://tomlee.co/2014/04/a-more-detailed-tour-of-the-rust-compiler/
|
||||
[ro]: http://www.rustaceans.org/
|
||||
[rctd]: ./src/test/COMPILER_TESTS.md
|
||||
[cheatsheet]: https://buildbot.rust-lang.org/homu/
|
||||
[cheatsheet]: https://buildbot2.rust-lang.org/homu/
|
||||
|
|
|
@ -121,6 +121,41 @@
|
|||
"llvm": "vavg{0.kind}{0.data_type_short}",
|
||||
"ret": "i(8-32)",
|
||||
"args": ["0", "0"]
|
||||
},
|
||||
{
|
||||
"intrinsic": "packs{0.kind}{1.data_type_short}",
|
||||
"width": [128],
|
||||
"llvm": "vpk{0.kind}{1.data_type_short}{0.kind}s",
|
||||
"ret": "i(8-16)",
|
||||
"args": ["0W", "1"]
|
||||
},
|
||||
{
|
||||
"intrinsic": "packsu{1.kind}{1.data_type_short}",
|
||||
"width": [128],
|
||||
"llvm": "vpk{1.kind}{1.data_type_short}{0.kind}s",
|
||||
"ret": "u(8-16)",
|
||||
"args": ["0Ws", "1"]
|
||||
},
|
||||
{
|
||||
"intrinsic": "packpx",
|
||||
"width": [128],
|
||||
"llvm": "vpkpx",
|
||||
"ret": "s16",
|
||||
"args": ["s32", "s32"]
|
||||
},
|
||||
{
|
||||
"intrinsic": "unpackl{1.kind}{1.data_type_short}",
|
||||
"width": [128],
|
||||
"llvm": "vupkl{1.kind}{1.data_type_short}",
|
||||
"ret": "s(16-32)",
|
||||
"args": ["0N"]
|
||||
},
|
||||
{
|
||||
"intrinsic": "unpackh{1.kind}{1.data_type_short}",
|
||||
"width": [128],
|
||||
"llvm": "vupkh{1.kind}{1.data_type_short}",
|
||||
"ret": "s(16-32)",
|
||||
"args": ["0N"]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit 11bfb0dcf85f7aa92abd30524bb1e42e18d108c6
|
||||
Subproject commit 1f5a28755e301ac581e2048011e4e0ff3da482ef
|
|
@ -211,7 +211,7 @@
|
|||
//!
|
||||
//! - [`fmt::Display`][`Display`] implementations assert that the type can be faithfully
|
||||
//! represented as a UTF-8 string at all times. It is **not** expected that
|
||||
//! all types implement the `Display` trait.
|
||||
//! all types implement the [`Display`] trait.
|
||||
//! - [`fmt::Debug`][`Debug`] implementations should be implemented for **all** public types.
|
||||
//! Output will typically represent the internal state as faithfully as possible.
|
||||
//! The purpose of the [`Debug`] trait is to facilitate debugging Rust code. In
|
||||
|
|
|
@ -38,7 +38,7 @@
|
|||
//! let message = s + " world!";
|
||||
//! ```
|
||||
//!
|
||||
//! If you have a vector of valid UTF-8 bytes, you can make a `String` out of
|
||||
//! If you have a vector of valid UTF-8 bytes, you can make a [`String`] out of
|
||||
//! it. You can do the reverse too.
|
||||
//!
|
||||
//! ```
|
||||
|
@ -155,17 +155,14 @@ use boxed::Box;
|
|||
/// takes_str(&s);
|
||||
/// ```
|
||||
///
|
||||
/// [`&str`]: ../../std/primitive.str.html
|
||||
/// [`Deref`]: ../../std/ops/trait.Deref.html
|
||||
///
|
||||
/// This will create a [`&str`] from the `String` and pass it in. This
|
||||
/// conversion is very inexpensive, and so generally, functions will accept
|
||||
/// [`&str`]s as arguments unless they need a `String` for some specific
|
||||
/// reason.
|
||||
///
|
||||
/// In certain cases Rust doesn't have enough information to make this
|
||||
/// conversion, known as `Deref` coercion. In the following example a string
|
||||
/// slice `&'a str` implements the trait `TraitExample`, and the function
|
||||
/// conversion, known as [`Deref`] coercion. In the following example a string
|
||||
/// slice [`&'a str`][`&str`] implements the trait `TraitExample`, and the function
|
||||
/// `example_func` takes anything that implements the trait. In this case Rust
|
||||
/// would need to make two implicit conversions, which Rust doesn't have the
|
||||
/// means to do. For that reason, the following example will not compile.
|
||||
|
@ -185,13 +182,13 @@ use boxed::Box;
|
|||
///
|
||||
/// There are two options that would work instead. The first would be to
|
||||
/// change the line `example_func(&example_string);` to
|
||||
/// `example_func(example_string.as_str());`, using the method `as_str()`
|
||||
/// `example_func(example_string.as_str());`, using the method [`as_str()`]
|
||||
/// to explicitly extract the string slice containing the string. The second
|
||||
/// way changes `example_func(&example_string);` to
|
||||
/// `example_func(&*example_string);`. In this case we are dereferencing a
|
||||
/// `String` to a `str`, then referencing the `str` back to `&str`. The
|
||||
/// second way is more idiomatic, however both work to do the conversion
|
||||
/// explicitly rather than relying on the implicit conversion.
|
||||
/// `String` to a [`str`][`&str`], then referencing the [`str`][`&str`] back to
|
||||
/// [`&str`]. The second way is more idiomatic, however both work to do the
|
||||
/// conversion explicitly rather than relying on the implicit conversion.
|
||||
///
|
||||
/// # Representation
|
||||
///
|
||||
|
@ -287,6 +284,10 @@ use boxed::Box;
|
|||
/// ```
|
||||
///
|
||||
/// Here, there's no need to allocate more memory inside the loop.
|
||||
///
|
||||
/// [`&str`]: ../../std/primitive.str.html
|
||||
/// [`Deref`]: ../../std/ops/trait.Deref.html
|
||||
/// [`as_str()`]: struct.String.html#method.as_str
|
||||
#[derive(PartialOrd, Eq, Ord)]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
pub struct String {
|
||||
|
@ -443,32 +444,22 @@ impl String {
|
|||
/// requires that it is valid UTF-8. `from_utf8()` checks to ensure that
|
||||
/// the bytes are valid UTF-8, and then does the conversion.
|
||||
///
|
||||
/// [`&str`]: ../../std/primitive.str.html
|
||||
/// [`u8`]: ../../std/primitive.u8.html
|
||||
/// [`Vec<u8>`]: ../../std/vec/struct.Vec.html
|
||||
///
|
||||
/// If you are sure that the byte slice is valid UTF-8, and you don't want
|
||||
/// to incur the overhead of the validity check, there is an unsafe version
|
||||
/// of this function, [`from_utf8_unchecked`], which has the same behavior
|
||||
/// but skips the check.
|
||||
///
|
||||
/// [`from_utf8_unchecked`]: struct.String.html#method.from_utf8_unchecked
|
||||
///
|
||||
/// This method will take care to not copy the vector, for efficiency's
|
||||
/// sake.
|
||||
///
|
||||
/// If you need a `&str` instead of a `String`, consider
|
||||
/// If you need a [`&str`] instead of a `String`, consider
|
||||
/// [`str::from_utf8`].
|
||||
///
|
||||
/// [`str::from_utf8`]: ../../std/str/fn.from_utf8.html
|
||||
///
|
||||
/// The inverse of this method is [`as_bytes`].
|
||||
///
|
||||
/// [`as_bytes`]: #method.as_bytes
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Returns `Err` if the slice is not UTF-8 with a description as to why the
|
||||
/// Returns [`Err`] if the slice is not UTF-8 with a description as to why the
|
||||
/// provided bytes are not UTF-8. The vector you moved in is also included.
|
||||
///
|
||||
/// # Examples
|
||||
|
@ -497,7 +488,14 @@ impl String {
|
|||
/// See the docs for [`FromUtf8Error`] for more details on what you can do
|
||||
/// with this error.
|
||||
///
|
||||
/// [`from_utf8_unchecked`]: struct.String.html#method.from_utf8_unchecked
|
||||
/// [`&str`]: ../../std/primitive.str.html
|
||||
/// [`u8`]: ../../std/primitive.u8.html
|
||||
/// [`Vec<u8>`]: ../../std/vec/struct.Vec.html
|
||||
/// [`str::from_utf8`]: ../../std/str/fn.from_utf8.html
|
||||
/// [`as_bytes`]: struct.String.html#method.as_bytes
|
||||
/// [`FromUtf8Error`]: struct.FromUtf8Error.html
|
||||
/// [`Err`]: ../../stdresult/enum.Result.html#variant.Err
|
||||
#[inline]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
pub fn from_utf8(vec: Vec<u8>) -> Result<String, FromUtf8Error> {
|
||||
|
@ -594,9 +592,11 @@ impl String {
|
|||
Cow::Owned(res)
|
||||
}
|
||||
|
||||
/// Decode a UTF-16 encoded vector `v` into a `String`, returning `Err`
|
||||
/// Decode a UTF-16 encoded vector `v` into a `String`, returning [`Err`]
|
||||
/// if `v` contains any invalid data.
|
||||
///
|
||||
/// [`Err`]: ../../std/result/enum.Result.htlm#variant.Err
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// Basic usage:
|
||||
|
@ -618,7 +618,7 @@ impl String {
|
|||
decode_utf16(v.iter().cloned()).collect::<Result<_, _>>().map_err(|_| FromUtf16Error(()))
|
||||
}
|
||||
|
||||
/// Decode a UTF-16 encoded vector `v` into a string, replacing
|
||||
/// Decode a UTF-16 encoded slice `v` into a `String`, replacing
|
||||
/// invalid data with the replacement character (U+FFFD).
|
||||
///
|
||||
/// # Examples
|
||||
|
@ -800,11 +800,12 @@ impl String {
|
|||
/// If you do not want this "at least" behavior, see the [`reserve_exact`]
|
||||
/// method.
|
||||
///
|
||||
/// [`reserve_exact`]: #method.reserve_exact
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Panics if the new capacity overflows `usize`.
|
||||
/// Panics if the new capacity overflows [`usize`].
|
||||
///
|
||||
/// [`reserve_exact`]: struct.String.html#method.reserve_exact
|
||||
/// [`usize`]: ../../std/primitive.usize.html
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
|
@ -909,7 +910,9 @@ impl String {
|
|||
self.vec.shrink_to_fit()
|
||||
}
|
||||
|
||||
/// Appends the given `char` to the end of this `String`.
|
||||
/// Appends the given [`char`] to the end of this `String`.
|
||||
///
|
||||
/// [`char`]: ../../std/primitive.char.html
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
|
@ -990,7 +993,9 @@ impl String {
|
|||
|
||||
/// Removes the last character from the string buffer and returns it.
|
||||
///
|
||||
/// Returns `None` if this `String` is empty.
|
||||
/// Returns [`None`] if this `String` is empty.
|
||||
///
|
||||
/// [`None`]: ../../std/option/enum.Option.html#variant.None
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
|
@ -1019,7 +1024,7 @@ impl String {
|
|||
Some(ch)
|
||||
}
|
||||
|
||||
/// Removes a `char` from this `String` at a byte position and returns it.
|
||||
/// Removes a [`char`] from this `String` at a byte position and returns it.
|
||||
///
|
||||
/// This is an `O(n)` operation, as it requires copying every element in the
|
||||
/// buffer.
|
||||
|
@ -1389,7 +1394,7 @@ impl String {
|
|||
/// replaces with the given string, and yields the removed chars.
|
||||
/// The given string doesn’t need to be the same length as the range.
|
||||
///
|
||||
/// Note: The element range is removed when the `Splice` is dropped,
|
||||
/// Note: The element range is removed when the [`Splice`] is dropped,
|
||||
/// even if the iterator is not consumed until the end.
|
||||
///
|
||||
/// # Panics
|
||||
|
@ -1398,6 +1403,7 @@ impl String {
|
|||
/// boundary, or if they're out of bounds.
|
||||
///
|
||||
/// [`char`]: ../../std/primitive.char.html
|
||||
/// [`Splice`]: ../../std/string/struct.Splice.html
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
|
@ -1450,10 +1456,13 @@ impl String {
|
|||
}
|
||||
}
|
||||
|
||||
/// Converts this `String` into a `Box<str>`.
|
||||
/// Converts this `String` into a [`Box`]`<`[`str`]`>`.
|
||||
///
|
||||
/// This will drop any excess capacity.
|
||||
///
|
||||
/// [`Box`]: ../../std/boxed/struct.Box.html
|
||||
/// [`str`]: ../../std/primitive.str.html
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// Basic usage:
|
||||
|
|
|
@ -93,29 +93,7 @@ fn main() {
|
|||
.env("AR", &ar)
|
||||
.env("RANLIB", format!("{} s", ar.display()));
|
||||
|
||||
if target.contains("windows") {
|
||||
// A bit of history here, this used to be --enable-lazy-lock added in
|
||||
// #14006 which was filed with jemalloc in jemalloc/jemalloc#83 which
|
||||
// was also reported to MinGW:
|
||||
//
|
||||
// http://sourceforge.net/p/mingw-w64/bugs/395/
|
||||
//
|
||||
// When updating jemalloc to 4.0, however, it was found that binaries
|
||||
// would exit with the status code STATUS_RESOURCE_NOT_OWNED indicating
|
||||
// that a thread was unlocking a mutex it never locked. Disabling this
|
||||
// "lazy lock" option seems to fix the issue, but it was enabled by
|
||||
// default for MinGW targets in 13473c7 for jemalloc.
|
||||
//
|
||||
// As a result of all that, force disabling lazy lock on Windows, and
|
||||
// after reading some code it at least *appears* that the initialization
|
||||
// of mutexes is otherwise ok in jemalloc, so shouldn't cause problems
|
||||
// hopefully...
|
||||
//
|
||||
// tl;dr: make windows behave like other platforms by disabling lazy
|
||||
// locking, but requires passing an option due to a historical
|
||||
// default with jemalloc.
|
||||
cmd.arg("--disable-lazy-lock");
|
||||
} else if target.contains("ios") {
|
||||
if target.contains("ios") {
|
||||
cmd.arg("--disable-tls");
|
||||
} else if target.contains("android") {
|
||||
// We force android to have prefixed symbols because apparently
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit 5e49856003f33aa5781a0edca148be21025e18e7
|
||||
Subproject commit 6b9281d2b2f0ebb94838814b1e8ace2de4b7035b
|
|
@ -89,7 +89,7 @@ macro_rules! __impl_slice_eq2 {
|
|||
}
|
||||
}
|
||||
|
||||
// macro for implementing n-ary tuple functions and operations
|
||||
// macro for implementing n-element array functions and operations
|
||||
macro_rules! array_impls {
|
||||
($($N:expr)+) => {
|
||||
$(
|
||||
|
|
|
@ -605,7 +605,7 @@ pub trait Iterator {
|
|||
/// closure returns [`None`], it will try again, and call the closure on the
|
||||
/// next element, seeing if it will return [`Some`].
|
||||
///
|
||||
/// Why `filter_map` and not just [`filter`].[`map`]? The key is in this
|
||||
/// Why `filter_map` and not just [`filter`] and [`map`]? The key is in this
|
||||
/// part:
|
||||
///
|
||||
/// [`filter`]: #method.filter
|
||||
|
|
|
@ -244,9 +244,12 @@ use fmt;
|
|||
use iter::{FromIterator, FusedIterator, TrustedLen};
|
||||
use ops;
|
||||
|
||||
/// `Result` is a type that represents either success (`Ok`) or failure (`Err`).
|
||||
/// `Result` is a type that represents either success ([`Ok`]) or failure ([`Err`]).
|
||||
///
|
||||
/// See the [`std::result`](index.html) module documentation for details.
|
||||
///
|
||||
/// [`Ok`]: enum.Result.html#variant.Ok
|
||||
/// [`Err`]: enum.Result.html#variant.Err
|
||||
#[derive(Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Debug, Hash)]
|
||||
#[must_use]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
|
@ -269,7 +272,9 @@ impl<T, E> Result<T, E> {
|
|||
// Querying the contained values
|
||||
/////////////////////////////////////////////////////////////////////////
|
||||
|
||||
/// Returns `true` if the result is `Ok`.
|
||||
/// Returns `true` if the result is [`Ok`].
|
||||
///
|
||||
/// [`Ok`]: enum.Result.html#variant.Ok
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
|
@ -291,7 +296,9 @@ impl<T, E> Result<T, E> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if the result is `Err`.
|
||||
/// Returns `true` if the result is [`Err`].
|
||||
///
|
||||
/// [`Err`]: enum.Result.html#variant.Err
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
|
@ -433,10 +440,13 @@ impl<T, E> Result<T, E> {
|
|||
/////////////////////////////////////////////////////////////////////////
|
||||
|
||||
/// Maps a `Result<T, E>` to `Result<U, E>` by applying a function to a
|
||||
/// contained `Ok` value, leaving an `Err` value untouched.
|
||||
/// contained [`Ok`] value, leaving an [`Err`] value untouched.
|
||||
///
|
||||
/// This function can be used to compose the results of two functions.
|
||||
///
|
||||
/// [`Ok`]: enum.Result.html#variant.Ok
|
||||
/// [`Err`]: enum.Result.html#variant.Err
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// Print the numbers on each line of a string multiplied by two.
|
||||
|
@ -461,11 +471,14 @@ impl<T, E> Result<T, E> {
|
|||
}
|
||||
|
||||
/// Maps a `Result<T, E>` to `Result<T, F>` by applying a function to a
|
||||
/// contained `Err` value, leaving an `Ok` value untouched.
|
||||
/// contained [`Err`] value, leaving an [`Ok`] value untouched.
|
||||
///
|
||||
/// This function can be used to pass through a successful result while handling
|
||||
/// an error.
|
||||
///
|
||||
/// [`Ok`]: enum.Result.html#variant.Ok
|
||||
/// [`Err`]: enum.Result.html#variant.Err
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// Basic usage:
|
||||
|
@ -546,7 +559,10 @@ impl<T, E> Result<T, E> {
|
|||
// Boolean operations on the values, eager and lazy
|
||||
/////////////////////////////////////////////////////////////////////////
|
||||
|
||||
/// Returns `res` if the result is `Ok`, otherwise returns the `Err` value of `self`.
|
||||
/// Returns `res` if the result is [`Ok`], otherwise returns the [`Err`] value of `self`.
|
||||
///
|
||||
/// [`Ok`]: enum.Result.html#variant.Ok
|
||||
/// [`Err`]: enum.Result.html#variant.Err
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
|
@ -578,7 +594,10 @@ impl<T, E> Result<T, E> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Calls `op` if the result is `Ok`, otherwise returns the `Err` value of `self`.
|
||||
/// Calls `op` if the result is [`Ok`], otherwise returns the [`Err`] value of `self`.
|
||||
///
|
||||
/// [`Ok`]: enum.Result.html#variant.Ok
|
||||
/// [`Err`]: enum.Result.html#variant.Err
|
||||
///
|
||||
/// This function can be used for control flow based on `Result` values.
|
||||
///
|
||||
|
@ -604,7 +623,10 @@ impl<T, E> Result<T, E> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Returns `res` if the result is `Err`, otherwise returns the `Ok` value of `self`.
|
||||
/// Returns `res` if the result is [`Err`], otherwise returns the [`Ok`] value of `self`.
|
||||
///
|
||||
/// [`Ok`]: enum.Result.html#variant.Ok
|
||||
/// [`Err`]: enum.Result.html#variant.Err
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
|
@ -636,10 +658,13 @@ impl<T, E> Result<T, E> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Calls `op` if the result is `Err`, otherwise returns the `Ok` value of `self`.
|
||||
/// Calls `op` if the result is [`Err`], otherwise returns the [`Ok`] value of `self`.
|
||||
///
|
||||
/// This function can be used for control flow based on result values.
|
||||
///
|
||||
/// [`Ok`]: enum.Result.html#variant.Ok
|
||||
/// [`Err`]: enum.Result.html#variant.Err
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// Basic usage:
|
||||
|
@ -662,9 +687,12 @@ impl<T, E> Result<T, E> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Unwraps a result, yielding the content of an `Ok`.
|
||||
/// Unwraps a result, yielding the content of an [`Ok`].
|
||||
/// Else, it returns `optb`.
|
||||
///
|
||||
/// [`Ok`]: enum.Result.html#variant.Ok
|
||||
/// [`Err`]: enum.Result.html#variant.Err
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// Basic usage:
|
||||
|
@ -686,8 +714,11 @@ impl<T, E> Result<T, E> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Unwraps a result, yielding the content of an `Ok`.
|
||||
/// If the value is an `Err` then it calls `op` with its value.
|
||||
/// Unwraps a result, yielding the content of an [`Ok`].
|
||||
/// If the value is an [`Err`] then it calls `op` with its value.
|
||||
///
|
||||
/// [`Ok`]: enum.Result.html#variant.Ok
|
||||
/// [`Err`]: enum.Result.html#variant.Err
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
|
@ -710,12 +741,15 @@ impl<T, E> Result<T, E> {
|
|||
}
|
||||
|
||||
impl<T, E: fmt::Debug> Result<T, E> {
|
||||
/// Unwraps a result, yielding the content of an `Ok`.
|
||||
/// Unwraps a result, yielding the content of an [`Ok`].
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Panics if the value is an `Err`, with a panic message provided by the
|
||||
/// `Err`'s value.
|
||||
/// Panics if the value is an [`Err`], with a panic message provided by the
|
||||
/// [`Err`]'s value.
|
||||
///
|
||||
/// [`Ok`]: enum.Result.html#variant.Ok
|
||||
/// [`Err`]: enum.Result.html#variant.Err
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
|
@ -739,12 +773,15 @@ impl<T, E: fmt::Debug> Result<T, E> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Unwraps a result, yielding the content of an `Ok`.
|
||||
/// Unwraps a result, yielding the content of an [`Ok`].
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Panics if the value is an `Err`, with a panic message including the
|
||||
/// passed message, and the content of the `Err`.
|
||||
/// Panics if the value is an [`Err`], with a panic message including the
|
||||
/// passed message, and the content of the [`Err`].
|
||||
///
|
||||
/// [`Ok`]: enum.Result.html#variant.Ok
|
||||
/// [`Err`]: enum.Result.html#variant.Err
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
|
@ -765,12 +802,16 @@ impl<T, E: fmt::Debug> Result<T, E> {
|
|||
}
|
||||
|
||||
impl<T: fmt::Debug, E> Result<T, E> {
|
||||
/// Unwraps a result, yielding the content of an `Err`.
|
||||
/// Unwraps a result, yielding the content of an [`Err`].
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Panics if the value is an `Ok`, with a custom panic message provided
|
||||
/// by the `Ok`'s value.
|
||||
/// Panics if the value is an [`Ok`], with a custom panic message provided
|
||||
/// by the [`Ok`]'s value.
|
||||
///
|
||||
/// [`Ok`]: enum.Result.html#variant.Ok
|
||||
/// [`Err`]: enum.Result.html#variant.Err
|
||||
///
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
|
@ -792,12 +833,15 @@ impl<T: fmt::Debug, E> Result<T, E> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Unwraps a result, yielding the content of an `Err`.
|
||||
/// Unwraps a result, yielding the content of an [`Err`].
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Panics if the value is an `Ok`, with a panic message including the
|
||||
/// passed message, and the content of the `Ok`.
|
||||
/// Panics if the value is an [`Ok`], with a panic message including the
|
||||
/// passed message, and the content of the [`Ok`].
|
||||
///
|
||||
/// [`Ok`]: enum.Result.html#variant.Ok
|
||||
/// [`Err`]: enum.Result.html#variant.Err
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
|
@ -820,8 +864,8 @@ impl<T: fmt::Debug, E> Result<T, E> {
|
|||
impl<T: Default, E> Result<T, E> {
|
||||
/// Returns the contained value or a default
|
||||
///
|
||||
/// Consumes the `self` argument then, if `Ok`, returns the contained
|
||||
/// value, otherwise if `Err`, returns the default value for that
|
||||
/// Consumes the `self` argument then, if [`Ok`], returns the contained
|
||||
/// value, otherwise if [`Err`], returns the default value for that
|
||||
/// type.
|
||||
///
|
||||
/// # Examples
|
||||
|
@ -829,7 +873,7 @@ impl<T: Default, E> Result<T, E> {
|
|||
/// Convert a string to an integer, turning poorly-formed strings
|
||||
/// into 0 (the default value for integers). [`parse`] converts
|
||||
/// a string to any other type that implements [`FromStr`], returning an
|
||||
/// `Err` on error.
|
||||
/// [`Err`] on error.
|
||||
///
|
||||
/// ```
|
||||
/// let good_year_from_input = "1909";
|
||||
|
@ -843,6 +887,8 @@ impl<T: Default, E> Result<T, E> {
|
|||
///
|
||||
/// [`parse`]: ../../std/primitive.str.html#method.parse
|
||||
/// [`FromStr`]: ../../std/str/trait.FromStr.html
|
||||
/// [`Ok`]: enum.Result.html#variant.Ok
|
||||
/// [`Err`]: enum.Result.html#variant.Err
|
||||
#[inline]
|
||||
#[stable(feature = "result_unwrap_or_default", since = "1.16.0")]
|
||||
pub fn unwrap_or_default(self) -> T {
|
||||
|
|
|
@ -59,7 +59,7 @@ use syntax::ast::*;
|
|||
use syntax::errors;
|
||||
use syntax::ext::hygiene::{Mark, SyntaxContext};
|
||||
use syntax::ptr::P;
|
||||
use syntax::codemap::{self, respan, Spanned};
|
||||
use syntax::codemap::{self, respan, Spanned, CompilerDesugaringKind};
|
||||
use syntax::std_inject;
|
||||
use syntax::symbol::{Symbol, keywords};
|
||||
use syntax::util::small_vector::SmallVector;
|
||||
|
@ -418,12 +418,14 @@ impl<'a> LoweringContext<'a> {
|
|||
Symbol::gensym(s)
|
||||
}
|
||||
|
||||
fn allow_internal_unstable(&self, reason: &'static str, mut span: Span) -> Span {
|
||||
fn allow_internal_unstable(&self, reason: CompilerDesugaringKind, mut span: Span)
|
||||
-> Span
|
||||
{
|
||||
let mark = Mark::fresh(Mark::root());
|
||||
mark.set_expn_info(codemap::ExpnInfo {
|
||||
call_site: span,
|
||||
callee: codemap::NameAndSpan {
|
||||
format: codemap::CompilerDesugaring(Symbol::intern(reason)),
|
||||
format: codemap::CompilerDesugaring(reason),
|
||||
span: Some(span),
|
||||
allow_internal_unstable: true,
|
||||
allow_internal_unsafe: false,
|
||||
|
@ -1802,7 +1804,8 @@ impl<'a> LoweringContext<'a> {
|
|||
let move_val_init = ["intrinsics", "move_val_init"];
|
||||
let inplace_finalize = ["ops", "InPlace", "finalize"];
|
||||
|
||||
let unstable_span = self.allow_internal_unstable("<-", e.span);
|
||||
let unstable_span =
|
||||
self.allow_internal_unstable(CompilerDesugaringKind::BackArrow, e.span);
|
||||
let make_call = |this: &mut LoweringContext, p, args| {
|
||||
let path = P(this.expr_std_path(unstable_span, p, ThinVec::new()));
|
||||
P(this.expr_call(e.span, path, args))
|
||||
|
@ -2030,12 +2033,14 @@ impl<'a> LoweringContext<'a> {
|
|||
e1.iter().map(|e| ("start", e)).chain(e2.iter().map(|e| ("end", e)))
|
||||
.map(|(s, e)| {
|
||||
let expr = P(self.lower_expr(&e));
|
||||
let unstable_span = self.allow_internal_unstable("...", e.span);
|
||||
let unstable_span =
|
||||
self.allow_internal_unstable(CompilerDesugaringKind::DotFill, e.span);
|
||||
self.field(Symbol::intern(s), expr, unstable_span)
|
||||
}).collect::<P<[hir::Field]>>();
|
||||
|
||||
let is_unit = fields.is_empty();
|
||||
let unstable_span = self.allow_internal_unstable("...", e.span);
|
||||
let unstable_span =
|
||||
self.allow_internal_unstable(CompilerDesugaringKind::DotFill, e.span);
|
||||
let struct_path =
|
||||
iter::once("ops").chain(iter::once(path))
|
||||
.collect::<Vec<_>>();
|
||||
|
@ -2384,7 +2389,8 @@ impl<'a> LoweringContext<'a> {
|
|||
// return Try::from_error(From::from(err)),
|
||||
// }
|
||||
|
||||
let unstable_span = self.allow_internal_unstable("?", e.span);
|
||||
let unstable_span =
|
||||
self.allow_internal_unstable(CompilerDesugaringKind::QuestionMark, e.span);
|
||||
|
||||
// Try::into_result(<expr>)
|
||||
let discr = {
|
||||
|
|
|
@ -42,6 +42,7 @@ use syntax::ptr::P;
|
|||
use syntax::symbol::{Symbol, keywords};
|
||||
use syntax::tokenstream::TokenStream;
|
||||
use syntax::util::ThinVec;
|
||||
use ty::AdtKind;
|
||||
|
||||
use rustc_data_structures::indexed_vec;
|
||||
|
||||
|
@ -1796,6 +1797,15 @@ impl Item_ {
|
|||
ItemDefaultImpl(..) => "item",
|
||||
}
|
||||
}
|
||||
|
||||
pub fn adt_kind(&self) -> Option<AdtKind> {
|
||||
match *self {
|
||||
ItemStruct(..) => Some(AdtKind::Struct),
|
||||
ItemUnion(..) => Some(AdtKind::Union),
|
||||
ItemEnum(..) => Some(AdtKind::Enum),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A reference from an trait to one of its associated items. This
|
||||
|
|
|
@ -899,9 +899,9 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
|
|||
infer::LateBoundRegion(_, br, infer::HigherRankedType) => {
|
||||
format!(" for lifetime parameter {}in generic type", br_string(br))
|
||||
}
|
||||
infer::LateBoundRegion(_, br, infer::AssocTypeProjection(type_name)) => {
|
||||
infer::LateBoundRegion(_, br, infer::AssocTypeProjection(def_id)) => {
|
||||
format!(" for lifetime parameter {}in trait containing associated type `{}`",
|
||||
br_string(br), type_name)
|
||||
br_string(br), self.tcx.associated_item(def_id).name)
|
||||
}
|
||||
infer::EarlyBoundRegion(_, name) => {
|
||||
format!(" for lifetime parameter `{}`",
|
||||
|
|
|
@ -274,7 +274,7 @@ pub enum LateBoundRegionConversionTime {
|
|||
HigherRankedType,
|
||||
|
||||
/// when projecting an associated type
|
||||
AssocTypeProjection(ast::Name), // FIXME(tschottdorf): should contain DefId, not Name
|
||||
AssocTypeProjection(DefId),
|
||||
}
|
||||
|
||||
/// Reasons to create a region inference variable
|
||||
|
|
|
@ -121,10 +121,10 @@ pub enum FindLintError {
|
|||
|
||||
pub enum CheckLintNameResult<'a> {
|
||||
Ok(&'a [LintId]),
|
||||
// Lint doesn't exist
|
||||
/// Lint doesn't exist
|
||||
NoLint,
|
||||
// The lint is either renamed or removed. This is the warning
|
||||
// message.
|
||||
/// The lint is either renamed or removed. This is the warning
|
||||
/// message.
|
||||
Warning(String),
|
||||
}
|
||||
|
||||
|
@ -253,7 +253,7 @@ impl LintStore {
|
|||
}
|
||||
}
|
||||
|
||||
// Checks the validity of lint names derived from the command line
|
||||
/// Checks the validity of lint names derived from the command line
|
||||
pub fn check_lint_name_cmdline(&self,
|
||||
sess: &Session,
|
||||
lint_name: &str,
|
||||
|
|
|
@ -1112,8 +1112,19 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
|
|||
ObligationCauseCode::StructInitializerSized => {
|
||||
err.note("structs must have a statically known size to be initialized");
|
||||
}
|
||||
ObligationCauseCode::FieldSized => {
|
||||
err.note("only the last field of a struct may have a dynamically sized type");
|
||||
ObligationCauseCode::FieldSized(ref item) => {
|
||||
match *item {
|
||||
AdtKind::Struct => {
|
||||
err.note("only the last field of a struct may have a dynamically \
|
||||
sized type");
|
||||
}
|
||||
AdtKind::Union => {
|
||||
err.note("no field of a union may have a dynamically sized type");
|
||||
}
|
||||
AdtKind::Enum => {
|
||||
err.note("no field of an enum variant may have a dynamically sized type");
|
||||
}
|
||||
}
|
||||
}
|
||||
ObligationCauseCode::ConstSized => {
|
||||
err.note("constant expressions must have a statically known size");
|
||||
|
@ -1155,8 +1166,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
|
|||
fn suggest_new_overflow_limit(&self, err: &mut DiagnosticBuilder) {
|
||||
let current_limit = self.tcx.sess.recursion_limit.get();
|
||||
let suggested_limit = current_limit * 2;
|
||||
err.help(&format!(
|
||||
"consider adding a `#![recursion_limit=\"{}\"]` attribute to your crate",
|
||||
err.help(&format!("consider adding a `#![recursion_limit=\"{}\"]` attribute to your crate",
|
||||
suggested_limit));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,7 +20,7 @@ use hir::def_id::DefId;
|
|||
use middle::region::RegionMaps;
|
||||
use middle::free_region::FreeRegionMap;
|
||||
use ty::subst::Substs;
|
||||
use ty::{self, Ty, TyCtxt, TypeFoldable, ToPredicate};
|
||||
use ty::{self, AdtKind, Ty, TyCtxt, TypeFoldable, ToPredicate};
|
||||
use ty::error::{ExpectedFound, TypeError};
|
||||
use infer::{InferCtxt};
|
||||
|
||||
|
@ -133,7 +133,7 @@ pub enum ObligationCauseCode<'tcx> {
|
|||
RepeatVec,
|
||||
|
||||
/// Types of fields (other than the last) in a struct must be sized.
|
||||
FieldSized,
|
||||
FieldSized(AdtKind),
|
||||
|
||||
/// Constant expressions must be sized.
|
||||
ConstSized,
|
||||
|
|
|
@ -207,7 +207,7 @@ impl<'a, 'tcx> Lift<'tcx> for traits::ObligationCauseCode<'a> {
|
|||
super::ReturnType(id) => Some(super::ReturnType(id)),
|
||||
super::SizedReturnType => Some(super::SizedReturnType),
|
||||
super::RepeatVec => Some(super::RepeatVec),
|
||||
super::FieldSized => Some(super::FieldSized),
|
||||
super::FieldSized(item) => Some(super::FieldSized(item)),
|
||||
super::ConstSized => Some(super::ConstSized),
|
||||
super::SharedStatic => Some(super::SharedStatic),
|
||||
super::BuiltinDerivedObligation(ref cause) => {
|
||||
|
@ -527,7 +527,7 @@ impl<'tcx> TypeFoldable<'tcx> for traits::ObligationCauseCode<'tcx> {
|
|||
super::SizedReturnType |
|
||||
super::ReturnNoExpression |
|
||||
super::RepeatVec |
|
||||
super::FieldSized |
|
||||
super::FieldSized(_) |
|
||||
super::ConstSized |
|
||||
super::SharedStatic |
|
||||
super::BlockTailExpression(_) |
|
||||
|
@ -575,7 +575,7 @@ impl<'tcx> TypeFoldable<'tcx> for traits::ObligationCauseCode<'tcx> {
|
|||
super::SizedReturnType |
|
||||
super::ReturnNoExpression |
|
||||
super::RepeatVec |
|
||||
super::FieldSized |
|
||||
super::FieldSized(_) |
|
||||
super::ConstSized |
|
||||
super::SharedStatic |
|
||||
super::BlockTailExpression(_) |
|
||||
|
|
|
@ -8,6 +8,8 @@
|
|||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
#![deny(warnings)]
|
||||
|
||||
#![feature(rustc_private)]
|
||||
|
||||
extern crate rustc;
|
||||
|
@ -22,61 +24,51 @@ pub static ALLOCATOR_METHODS: &[AllocatorMethod] = &[
|
|||
name: "alloc",
|
||||
inputs: &[AllocatorTy::Layout],
|
||||
output: AllocatorTy::ResultPtr,
|
||||
is_unsafe: true,
|
||||
},
|
||||
AllocatorMethod {
|
||||
name: "oom",
|
||||
inputs: &[AllocatorTy::AllocErr],
|
||||
output: AllocatorTy::Bang,
|
||||
is_unsafe: false,
|
||||
},
|
||||
AllocatorMethod {
|
||||
name: "dealloc",
|
||||
inputs: &[AllocatorTy::Ptr, AllocatorTy::Layout],
|
||||
output: AllocatorTy::Unit,
|
||||
is_unsafe: true,
|
||||
},
|
||||
AllocatorMethod {
|
||||
name: "usable_size",
|
||||
inputs: &[AllocatorTy::LayoutRef],
|
||||
output: AllocatorTy::UsizePair,
|
||||
is_unsafe: false,
|
||||
},
|
||||
AllocatorMethod {
|
||||
name: "realloc",
|
||||
inputs: &[AllocatorTy::Ptr, AllocatorTy::Layout, AllocatorTy::Layout],
|
||||
output: AllocatorTy::ResultPtr,
|
||||
is_unsafe: true,
|
||||
},
|
||||
AllocatorMethod {
|
||||
name: "alloc_zeroed",
|
||||
inputs: &[AllocatorTy::Layout],
|
||||
output: AllocatorTy::ResultPtr,
|
||||
is_unsafe: true,
|
||||
},
|
||||
AllocatorMethod {
|
||||
name: "alloc_excess",
|
||||
inputs: &[AllocatorTy::Layout],
|
||||
output: AllocatorTy::ResultExcess,
|
||||
is_unsafe: true,
|
||||
},
|
||||
AllocatorMethod {
|
||||
name: "realloc_excess",
|
||||
inputs: &[AllocatorTy::Ptr, AllocatorTy::Layout, AllocatorTy::Layout],
|
||||
output: AllocatorTy::ResultExcess,
|
||||
is_unsafe: true,
|
||||
},
|
||||
AllocatorMethod {
|
||||
name: "grow_in_place",
|
||||
inputs: &[AllocatorTy::Ptr, AllocatorTy::Layout, AllocatorTy::Layout],
|
||||
output: AllocatorTy::ResultUnit,
|
||||
is_unsafe: true,
|
||||
},
|
||||
AllocatorMethod {
|
||||
name: "shrink_in_place",
|
||||
inputs: &[AllocatorTy::Ptr, AllocatorTy::Layout, AllocatorTy::Layout],
|
||||
output: AllocatorTy::ResultUnit,
|
||||
is_unsafe: true,
|
||||
},
|
||||
];
|
||||
|
||||
|
@ -84,7 +76,6 @@ pub struct AllocatorMethod {
|
|||
pub name: &'static str,
|
||||
pub inputs: &'static [AllocatorTy],
|
||||
pub output: AllocatorTy,
|
||||
pub is_unsafe: bool,
|
||||
}
|
||||
|
||||
pub enum AllocatorTy {
|
||||
|
|
|
@ -12,9 +12,8 @@
|
|||
//!
|
||||
//! A simple wrapper over the platform's dynamic library facilities
|
||||
|
||||
use std::env;
|
||||
use std::ffi::{CString, OsString};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::ffi::CString;
|
||||
use std::path::Path;
|
||||
|
||||
pub struct DynamicLibrary {
|
||||
handle: *mut u8
|
||||
|
@ -43,24 +42,6 @@ impl DynamicLibrary {
|
|||
}
|
||||
}
|
||||
|
||||
/// Prepends a path to this process's search path for dynamic libraries
|
||||
pub fn prepend_search_path(path: &Path) {
|
||||
let mut search_path = DynamicLibrary::search_path();
|
||||
search_path.insert(0, path.to_path_buf());
|
||||
env::set_var(DynamicLibrary::envvar(), &DynamicLibrary::create_path(&search_path));
|
||||
}
|
||||
|
||||
/// From a slice of paths, create a new vector which is suitable to be an
|
||||
/// environment variable for this platforms dylib search path.
|
||||
pub fn create_path(path: &[PathBuf]) -> OsString {
|
||||
let mut newvar = OsString::new();
|
||||
for (i, path) in path.iter().enumerate() {
|
||||
if i > 0 { newvar.push(DynamicLibrary::separator()); }
|
||||
newvar.push(path);
|
||||
}
|
||||
return newvar;
|
||||
}
|
||||
|
||||
/// Returns the environment variable for this process's dynamic library
|
||||
/// search path
|
||||
pub fn envvar() -> &'static str {
|
||||
|
@ -75,19 +56,6 @@ impl DynamicLibrary {
|
|||
}
|
||||
}
|
||||
|
||||
fn separator() -> &'static str {
|
||||
if cfg!(windows) { ";" } else { ":" }
|
||||
}
|
||||
|
||||
/// Returns the current search path for dynamic libraries being used by this
|
||||
/// process
|
||||
pub fn search_path() -> Vec<PathBuf> {
|
||||
match env::var_os(DynamicLibrary::envvar()) {
|
||||
Some(var) => env::split_paths(&var).collect(),
|
||||
None => Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Accesses the value at the symbol of the dynamic library.
|
||||
pub unsafe fn symbol<T>(&self, symbol: &str) -> Result<*mut T, String> {
|
||||
// This function should have a lifetime constraint of 'a on
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use {LinkerFlavor, PanicStrategy};
|
||||
use LinkerFlavor;
|
||||
use target::{LinkArgs, TargetOptions};
|
||||
use std::default::Default;
|
||||
|
||||
|
@ -37,7 +37,6 @@ pub fn opts() -> TargetOptions {
|
|||
target_family: None,
|
||||
linker_is_gnu: true,
|
||||
has_elf_tls: true,
|
||||
panic_strategy: PanicStrategy::Abort,
|
||||
.. Default::default()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -95,15 +95,6 @@ impl TempDir {
|
|||
self.path.as_ref().unwrap()
|
||||
}
|
||||
|
||||
/// Close and remove the temporary directory
|
||||
///
|
||||
/// Although `TempDir` removes the directory on drop, in the destructor
|
||||
/// any errors are ignored. To detect errors cleaning up the temporary
|
||||
/// directory, call `close` instead.
|
||||
pub fn close(mut self) -> io::Result<()> {
|
||||
self.cleanup_dir()
|
||||
}
|
||||
|
||||
fn cleanup_dir(&mut self) -> io::Result<()> {
|
||||
match self.path {
|
||||
Some(ref p) => fs::remove_dir_all(p),
|
||||
|
|
|
@ -113,37 +113,6 @@ pub fn gather_move_from_expr<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>,
|
|||
gather_move(bccx, move_data, move_error_collector, move_info);
|
||||
}
|
||||
|
||||
pub fn gather_match_variant<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>,
|
||||
move_data: &MoveData<'tcx>,
|
||||
_move_error_collector: &mut MoveErrorCollector<'tcx>,
|
||||
move_pat: &hir::Pat,
|
||||
cmt: mc::cmt<'tcx>,
|
||||
mode: euv::MatchMode) {
|
||||
let tcx = bccx.tcx;
|
||||
debug!("gather_match_variant(move_pat={}, cmt={:?}, mode={:?})",
|
||||
move_pat.id, cmt, mode);
|
||||
|
||||
let opt_lp = opt_loan_path(&cmt);
|
||||
match opt_lp {
|
||||
Some(lp) => {
|
||||
match lp.kind {
|
||||
LpDowncast(ref base_lp, _) =>
|
||||
move_data.add_variant_match(
|
||||
tcx, lp.clone(), move_pat.id, base_lp.clone(), mode),
|
||||
_ => bug!("should only call gather_match_variant \
|
||||
for cat_downcast cmt"),
|
||||
}
|
||||
}
|
||||
None => {
|
||||
// We get None when input to match is non-path (e.g.
|
||||
// temporary result like a function call). Since no
|
||||
// loan-path is being matched, no need to record a
|
||||
// downcast.
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn gather_move_from_pat<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>,
|
||||
move_data: &MoveData<'tcx>,
|
||||
move_error_collector: &mut MoveErrorCollector<'tcx>,
|
||||
|
|
|
@ -93,12 +93,6 @@ impl<'a, 'tcx> euv::Delegate<'tcx> for GatherLoanCtxt<'a, 'tcx> {
|
|||
matched_pat,
|
||||
cmt,
|
||||
mode);
|
||||
|
||||
if let Categorization::Downcast(..) = cmt.cat {
|
||||
gather_moves::gather_match_variant(
|
||||
self.bccx, &self.move_data, &mut self.move_error_collector,
|
||||
matched_pat, cmt, mode);
|
||||
}
|
||||
}
|
||||
|
||||
fn consume_pat(&mut self,
|
||||
|
|
|
@ -713,15 +713,6 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
|
|||
err.emit();
|
||||
}
|
||||
|
||||
pub fn span_err(&self, s: Span, m: &str) {
|
||||
self.tcx.sess.span_err(s, m);
|
||||
}
|
||||
|
||||
pub fn struct_span_err<S: Into<MultiSpan>>(&self, s: S, m: &str)
|
||||
-> DiagnosticBuilder<'a> {
|
||||
self.tcx.sess.struct_span_err(s, m)
|
||||
}
|
||||
|
||||
pub fn struct_span_err_with_code<S: Into<MultiSpan>>(&self,
|
||||
s: S,
|
||||
msg: &str,
|
||||
|
|
|
@ -53,10 +53,6 @@ pub struct MoveData<'tcx> {
|
|||
/// kill move bits.
|
||||
pub path_assignments: RefCell<Vec<Assignment>>,
|
||||
|
||||
/// Enum variant matched within a pattern on some match arm, like
|
||||
/// `SomeStruct{ f: Variant1(x, y) } => ...`
|
||||
pub variant_matches: RefCell<Vec<VariantMatch>>,
|
||||
|
||||
/// Assignments to a variable or path, like `x = foo`, but not `x += foo`.
|
||||
pub assignee_ids: RefCell<NodeSet>,
|
||||
}
|
||||
|
@ -161,21 +157,6 @@ pub struct Assignment {
|
|||
pub assignee_id: ast::NodeId,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub struct VariantMatch {
|
||||
/// downcast to the variant.
|
||||
pub path: MovePathIndex,
|
||||
|
||||
/// path being downcast to the variant.
|
||||
pub base_path: MovePathIndex,
|
||||
|
||||
/// id where variant's pattern occurs
|
||||
pub id: ast::NodeId,
|
||||
|
||||
/// says if variant established by move (and why), by copy, or by borrow.
|
||||
pub mode: euv::MatchMode
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
pub struct MoveDataFlowOperator;
|
||||
|
||||
|
@ -215,7 +196,6 @@ impl<'a, 'tcx> MoveData<'tcx> {
|
|||
moves: RefCell::new(Vec::new()),
|
||||
path_assignments: RefCell::new(Vec::new()),
|
||||
var_assignments: RefCell::new(Vec::new()),
|
||||
variant_matches: RefCell::new(Vec::new()),
|
||||
assignee_ids: RefCell::new(NodeSet()),
|
||||
}
|
||||
}
|
||||
|
@ -485,31 +465,6 @@ impl<'a, 'tcx> MoveData<'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Adds a new record for a match of `base_lp`, downcast to
|
||||
/// variant `lp`, that occurs at location `pattern_id`. (One
|
||||
/// should be able to recover the span info from the
|
||||
/// `pattern_id` and the hir_map, I think.)
|
||||
pub fn add_variant_match(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
lp: Rc<LoanPath<'tcx>>,
|
||||
pattern_id: ast::NodeId,
|
||||
base_lp: Rc<LoanPath<'tcx>>,
|
||||
mode: euv::MatchMode) {
|
||||
debug!("add_variant_match(lp={:?}, pattern_id={})",
|
||||
lp, pattern_id);
|
||||
|
||||
let path_index = self.move_path(tcx, lp.clone());
|
||||
let base_path_index = self.move_path(tcx, base_lp.clone());
|
||||
|
||||
let variant_match = VariantMatch {
|
||||
path: path_index,
|
||||
base_path: base_path_index,
|
||||
id: pattern_id,
|
||||
mode,
|
||||
};
|
||||
|
||||
self.variant_matches.borrow_mut().push(variant_match);
|
||||
}
|
||||
|
||||
/// Adds the gen/kills for the various moves and
|
||||
/// assignments into the provided data flow contexts.
|
||||
/// Moves are generated by moves and killed by assignments and
|
||||
|
|
|
@ -37,11 +37,10 @@ extern crate core; // for NonZero
|
|||
|
||||
pub use borrowck::check_crate;
|
||||
pub use borrowck::build_borrowck_dataflow_data_for_fn;
|
||||
pub use borrowck::{AnalysisData, BorrowckCtxt};
|
||||
|
||||
// NB: This module needs to be declared first so diagnostics are
|
||||
// registered before they are used.
|
||||
pub mod diagnostics;
|
||||
mod diagnostics;
|
||||
|
||||
mod borrowck;
|
||||
|
||||
|
|
|
@ -41,7 +41,7 @@ extern crate syntax_pos;
|
|||
|
||||
// NB: This module needs to be declared first so diagnostics are
|
||||
// registered before they are used.
|
||||
pub mod diagnostics;
|
||||
mod diagnostics;
|
||||
|
||||
mod eval;
|
||||
mod _match;
|
||||
|
|
|
@ -37,13 +37,6 @@ impl ConstFloat {
|
|||
self.ty.ty_to_string()
|
||||
}
|
||||
|
||||
pub fn is_nan(&self) -> bool {
|
||||
match self.ty {
|
||||
ast::FloatTy::F32 => Single::from_bits(self.bits).is_nan(),
|
||||
ast::FloatTy::F64 => Double::from_bits(self.bits).is_nan(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Compares the values if they are of the same type
|
||||
pub fn try_cmp(self, rhs: Self) -> Result<Ordering, ConstMathErr> {
|
||||
match (self.ty, rhs.ty) {
|
||||
|
|
|
@ -24,7 +24,7 @@ use std::mem;
|
|||
use std::slice;
|
||||
|
||||
#[repr(C)]
|
||||
pub struct Blake2bCtx {
|
||||
struct Blake2bCtx {
|
||||
b: [u8; 128],
|
||||
h: [u64; 8],
|
||||
t: [u64; 2],
|
||||
|
|
|
@ -134,56 +134,10 @@ impl<Node: Idx> Dominators<Node> {
|
|||
self.dominators(node).any(|n| n == dom)
|
||||
}
|
||||
|
||||
pub fn mutual_dominator_node(&self, node1: Node, node2: Node) -> Node {
|
||||
assert!(self.is_reachable(node1),
|
||||
"node {:?} is not reachable",
|
||||
node1);
|
||||
assert!(self.is_reachable(node2),
|
||||
"node {:?} is not reachable",
|
||||
node2);
|
||||
intersect::<Node>(&self.post_order_rank,
|
||||
&self.immediate_dominators,
|
||||
node1,
|
||||
node2)
|
||||
}
|
||||
|
||||
pub fn mutual_dominator<I>(&self, iter: I) -> Option<Node>
|
||||
where I: IntoIterator<Item = Node>
|
||||
{
|
||||
let mut iter = iter.into_iter();
|
||||
iter.next()
|
||||
.map(|dom| iter.fold(dom, |dom, node| self.mutual_dominator_node(dom, node)))
|
||||
}
|
||||
|
||||
pub fn all_immediate_dominators(&self) -> &IndexVec<Node, Option<Node>> {
|
||||
#[cfg(test)]
|
||||
fn all_immediate_dominators(&self) -> &IndexVec<Node, Option<Node>> {
|
||||
&self.immediate_dominators
|
||||
}
|
||||
|
||||
pub fn dominator_tree(&self) -> DominatorTree<Node> {
|
||||
let elem: Vec<Node> = Vec::new();
|
||||
let mut children: IndexVec<Node, Vec<Node>> =
|
||||
IndexVec::from_elem_n(elem, self.immediate_dominators.len());
|
||||
let mut root = None;
|
||||
for (index, immed_dom) in self.immediate_dominators.iter().enumerate() {
|
||||
let node = Node::new(index);
|
||||
match *immed_dom {
|
||||
None => {
|
||||
// node not reachable
|
||||
}
|
||||
Some(immed_dom) => {
|
||||
if node == immed_dom {
|
||||
root = Some(node);
|
||||
} else {
|
||||
children[immed_dom].push(node);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
DominatorTree {
|
||||
root: root.unwrap(),
|
||||
children,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Iter<'dom, Node: Idx + 'dom> {
|
||||
|
@ -215,38 +169,9 @@ pub struct DominatorTree<N: Idx> {
|
|||
}
|
||||
|
||||
impl<Node: Idx> DominatorTree<Node> {
|
||||
pub fn root(&self) -> Node {
|
||||
self.root
|
||||
}
|
||||
|
||||
pub fn children(&self, node: Node) -> &[Node] {
|
||||
&self.children[node]
|
||||
}
|
||||
|
||||
pub fn iter_children_of(&self, node: Node) -> IterChildrenOf<Node> {
|
||||
IterChildrenOf {
|
||||
tree: self,
|
||||
stack: vec![node],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct IterChildrenOf<'iter, Node: Idx + 'iter> {
|
||||
tree: &'iter DominatorTree<Node>,
|
||||
stack: Vec<Node>,
|
||||
}
|
||||
|
||||
impl<'iter, Node: Idx> Iterator for IterChildrenOf<'iter, Node> {
|
||||
type Item = Node;
|
||||
|
||||
fn next(&mut self) -> Option<Node> {
|
||||
if let Some(node) = self.stack.pop() {
|
||||
self.stack.extend(self.tree.children(node));
|
||||
Some(node)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<Node: Idx> fmt::Debug for DominatorTree<Node> {
|
||||
|
|
|
@ -47,22 +47,6 @@ fn post_order_walk<G: ControlFlowGraph>(graph: &G,
|
|||
result.push(node);
|
||||
}
|
||||
|
||||
pub fn pre_order_walk<G: ControlFlowGraph>(graph: &G,
|
||||
node: G::Node,
|
||||
result: &mut Vec<G::Node>,
|
||||
visited: &mut IndexVec<G::Node, bool>) {
|
||||
if visited[node] {
|
||||
return;
|
||||
}
|
||||
visited[node] = true;
|
||||
|
||||
result.push(node);
|
||||
|
||||
for successor in graph.successors(node) {
|
||||
pre_order_walk(graph, successor, result, visited);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn reverse_post_order<G: ControlFlowGraph>(graph: &G, start_node: G::Node) -> Vec<G::Node> {
|
||||
let mut vec = post_order_from(graph, start_node);
|
||||
vec.reverse();
|
||||
|
|
|
@ -9,7 +9,6 @@
|
|||
// except according to those terms.
|
||||
|
||||
use super::super::test::TestGraph;
|
||||
use super::super::transpose::TransposedGraph;
|
||||
|
||||
use super::*;
|
||||
|
||||
|
@ -20,22 +19,3 @@ fn diamond_post_order() {
|
|||
let result = post_order_from(&graph, 0);
|
||||
assert_eq!(result, vec![3, 1, 2, 0]);
|
||||
}
|
||||
|
||||
|
||||
#[test]
|
||||
fn rev_post_order_inner_loop() {
|
||||
// 0 -> 1 -> 2 -> 3 -> 5
|
||||
// ^ ^ v |
|
||||
// | 6 <- 4 |
|
||||
// +-----------------+
|
||||
let graph = TestGraph::new(0,
|
||||
&[(0, 1), (1, 2), (2, 3), (3, 5), (3, 1), (2, 4), (4, 6), (6, 2)]);
|
||||
|
||||
let rev_graph = TransposedGraph::new(&graph);
|
||||
|
||||
let result = post_order_from_to(&rev_graph, 6, Some(2));
|
||||
assert_eq!(result, vec![4, 6]);
|
||||
|
||||
let result = post_order_from_to(&rev_graph, 3, Some(1));
|
||||
assert_eq!(result, vec![4, 6, 2, 3]);
|
||||
}
|
||||
|
|
|
@ -9,13 +9,10 @@
|
|||
// except according to those terms.
|
||||
|
||||
use super::indexed_vec::Idx;
|
||||
pub use std::slice::Iter;
|
||||
|
||||
pub mod dominators;
|
||||
pub mod iterate;
|
||||
pub mod reachable;
|
||||
mod reference;
|
||||
pub mod transpose;
|
||||
|
||||
#[cfg(test)]
|
||||
mod test;
|
||||
|
|
|
@ -1,62 +0,0 @@
|
|||
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
//! Compute reachability using a simple dataflow propagation.
|
||||
//! Store end-result in a big NxN bit matrix.
|
||||
|
||||
use super::ControlFlowGraph;
|
||||
use super::super::bitvec::BitVector;
|
||||
use super::iterate::reverse_post_order;
|
||||
use super::super::indexed_vec::{IndexVec, Idx};
|
||||
|
||||
#[cfg(test)]
|
||||
mod test;
|
||||
|
||||
pub fn reachable<G: ControlFlowGraph>(graph: &G) -> Reachability<G::Node> {
|
||||
let reverse_post_order = reverse_post_order(graph, graph.start_node());
|
||||
reachable_given_rpo(graph, &reverse_post_order)
|
||||
}
|
||||
|
||||
pub fn reachable_given_rpo<G: ControlFlowGraph>(graph: &G,
|
||||
reverse_post_order: &[G::Node])
|
||||
-> Reachability<G::Node> {
|
||||
let mut reachability = Reachability::new(graph);
|
||||
let mut changed = true;
|
||||
while changed {
|
||||
changed = false;
|
||||
for &node in reverse_post_order.iter().rev() {
|
||||
// every node can reach itself
|
||||
changed |= reachability.bits[node].insert(node.index());
|
||||
|
||||
// and every pred can reach everything node can reach
|
||||
for pred in graph.predecessors(node) {
|
||||
let nodes_bits = reachability.bits[node].clone();
|
||||
changed |= reachability.bits[pred].insert_all(&nodes_bits);
|
||||
}
|
||||
}
|
||||
}
|
||||
reachability
|
||||
}
|
||||
|
||||
pub struct Reachability<Node: Idx> {
|
||||
bits: IndexVec<Node, BitVector>,
|
||||
}
|
||||
|
||||
impl<Node: Idx> Reachability<Node> {
|
||||
fn new<G: ControlFlowGraph>(graph: &G) -> Self {
|
||||
let num_nodes = graph.num_nodes();
|
||||
Reachability { bits: IndexVec::from_elem_n(BitVector::new(num_nodes), num_nodes) }
|
||||
}
|
||||
|
||||
pub fn can_reach(&self, source: Node, target: Node) -> bool {
|
||||
let bit: usize = target.index();
|
||||
self.bits[source].contains(bit)
|
||||
}
|
||||
}
|
|
@ -1,50 +0,0 @@
|
|||
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use super::super::test::TestGraph;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test1() {
|
||||
// 0 -> 1 -> 2 -> 3
|
||||
// ^ v
|
||||
// 6 <- 4 -> 5
|
||||
let graph = TestGraph::new(0, &[(0, 1), (1, 2), (2, 3), (2, 4), (4, 5), (4, 6), (6, 1)]);
|
||||
let reachable = reachable(&graph);
|
||||
assert!((0..6).all(|i| reachable.can_reach(0, i)));
|
||||
assert!((1..6).all(|i| reachable.can_reach(1, i)));
|
||||
assert!((1..6).all(|i| reachable.can_reach(2, i)));
|
||||
assert!((1..6).all(|i| reachable.can_reach(4, i)));
|
||||
assert!((1..6).all(|i| reachable.can_reach(6, i)));
|
||||
assert!(reachable.can_reach(3, 3));
|
||||
assert!(!reachable.can_reach(3, 5));
|
||||
assert!(!reachable.can_reach(5, 3));
|
||||
}
|
||||
|
||||
/// use bigger indices to cross between words in the bit set
|
||||
#[test]
|
||||
fn test2() {
|
||||
// 30 -> 31 -> 32 -> 33
|
||||
// ^ v
|
||||
// 36 <- 34 -> 35
|
||||
let graph = TestGraph::new(30,
|
||||
&[(30, 31), (31, 32), (32, 33), (32, 34), (34, 35), (34, 36),
|
||||
(36, 31)]);
|
||||
let reachable = reachable(&graph);
|
||||
assert!((30..36).all(|i| reachable.can_reach(30, i)));
|
||||
assert!((31..36).all(|i| reachable.can_reach(31, i)));
|
||||
assert!((31..36).all(|i| reachable.can_reach(32, i)));
|
||||
assert!((31..36).all(|i| reachable.can_reach(34, i)));
|
||||
assert!((31..36).all(|i| reachable.can_reach(36, i)));
|
||||
assert!(reachable.can_reach(33, 33));
|
||||
assert!(!reachable.can_reach(33, 35));
|
||||
assert!(!reachable.can_reach(35, 33));
|
||||
}
|
|
@ -1,64 +0,0 @@
|
|||
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use super::*;
|
||||
|
||||
pub struct TransposedGraph<G: ControlFlowGraph> {
|
||||
base_graph: G,
|
||||
start_node: G::Node,
|
||||
}
|
||||
|
||||
impl<G: ControlFlowGraph> TransposedGraph<G> {
|
||||
pub fn new(base_graph: G) -> Self {
|
||||
let start_node = base_graph.start_node();
|
||||
Self::with_start(base_graph, start_node)
|
||||
}
|
||||
|
||||
pub fn with_start(base_graph: G, start_node: G::Node) -> Self {
|
||||
TransposedGraph {
|
||||
base_graph,
|
||||
start_node,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<G: ControlFlowGraph> ControlFlowGraph for TransposedGraph<G> {
|
||||
type Node = G::Node;
|
||||
|
||||
fn num_nodes(&self) -> usize {
|
||||
self.base_graph.num_nodes()
|
||||
}
|
||||
|
||||
fn start_node(&self) -> Self::Node {
|
||||
self.start_node
|
||||
}
|
||||
|
||||
fn predecessors<'graph>(&'graph self,
|
||||
node: Self::Node)
|
||||
-> <Self as GraphPredecessors<'graph>>::Iter {
|
||||
self.base_graph.successors(node)
|
||||
}
|
||||
|
||||
fn successors<'graph>(&'graph self,
|
||||
node: Self::Node)
|
||||
-> <Self as GraphSuccessors<'graph>>::Iter {
|
||||
self.base_graph.predecessors(node)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'graph, G: ControlFlowGraph> GraphPredecessors<'graph> for TransposedGraph<G> {
|
||||
type Item = G::Node;
|
||||
type Iter = <G as GraphSuccessors<'graph>>::Iter;
|
||||
}
|
||||
|
||||
impl<'graph, G: ControlFlowGraph> GraphSuccessors<'graph> for TransposedGraph<G> {
|
||||
type Item = G::Node;
|
||||
type Iter = <G as GraphPredecessors<'graph>>::Iter;
|
||||
}
|
|
@ -1,31 +0,0 @@
|
|||
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use std::fmt;
|
||||
|
||||
// Provide some more formatting options for some data types (at the moment
|
||||
// that's just `{:x}` for slices of u8).
|
||||
|
||||
pub struct FmtWrap<T>(pub T);
|
||||
|
||||
impl<'a> fmt::LowerHex for FmtWrap<&'a [u8]> {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
for byte in self.0.iter() {
|
||||
try!(write!(formatter, "{:02x}", byte));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_lower_hex() {
|
||||
let bytes: &[u8] = &[0x01, 0x23, 0x45, 0x67, 0x89, 0xab, 0xcd, 0xef];
|
||||
assert_eq!("0123456789abcdef", &format!("{:x}", FmtWrap(bytes)));
|
||||
}
|
|
@ -107,9 +107,3 @@ impl Hasher for FxHasher {
|
|||
self.hash as u64
|
||||
}
|
||||
}
|
||||
|
||||
pub fn hash<T: Hash>(v: &T) -> u64 {
|
||||
let mut state = FxHasher::default();
|
||||
v.hash(&mut state);
|
||||
state.finish()
|
||||
}
|
||||
|
|
|
@ -106,13 +106,6 @@ impl NodeIndex {
|
|||
}
|
||||
}
|
||||
|
||||
impl EdgeIndex {
|
||||
/// Returns unique id (unique with respect to the graph holding associated edge).
|
||||
pub fn edge_id(&self) -> usize {
|
||||
self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: Debug, E: Debug> Graph<N, E> {
|
||||
pub fn new() -> Graph<N, E> {
|
||||
Graph {
|
||||
|
@ -201,34 +194,10 @@ impl<N: Debug, E: Debug> Graph<N, E> {
|
|||
return idx;
|
||||
}
|
||||
|
||||
pub fn mut_edge_data(&mut self, idx: EdgeIndex) -> &mut E {
|
||||
&mut self.edges[idx.0].data
|
||||
}
|
||||
|
||||
pub fn edge_data(&self, idx: EdgeIndex) -> &E {
|
||||
&self.edges[idx.0].data
|
||||
}
|
||||
|
||||
pub fn edge(&self, idx: EdgeIndex) -> &Edge<E> {
|
||||
&self.edges[idx.0]
|
||||
}
|
||||
|
||||
pub fn first_adjacent(&self, node: NodeIndex, dir: Direction) -> EdgeIndex {
|
||||
//! Accesses the index of the first edge adjacent to `node`.
|
||||
//! This is useful if you wish to modify the graph while walking
|
||||
//! the linked list of edges.
|
||||
|
||||
self.nodes[node.0].first_edge[dir.repr]
|
||||
}
|
||||
|
||||
pub fn next_adjacent(&self, edge: EdgeIndex, dir: Direction) -> EdgeIndex {
|
||||
//! Accesses the next edge in a given direction.
|
||||
//! This is useful if you wish to modify the graph while walking
|
||||
//! the linked list of edges.
|
||||
|
||||
self.edges[edge.0].next_edge[dir.repr]
|
||||
}
|
||||
|
||||
// # Iterating over nodes, edges
|
||||
|
||||
pub fn enumerated_nodes(&self) -> EnumeratedNodes<N> {
|
||||
|
@ -282,25 +251,6 @@ impl<N: Debug, E: Debug> Graph<N, E> {
|
|||
self.incoming_edges(target).sources()
|
||||
}
|
||||
|
||||
/// A common use for graphs in our compiler is to perform
|
||||
/// fixed-point iteration. In this case, each edge represents a
|
||||
/// constraint, and the nodes themselves are associated with
|
||||
/// variables or other bitsets. This method facilitates such a
|
||||
/// computation.
|
||||
pub fn iterate_until_fixed_point<'a, F>(&'a self, mut op: F)
|
||||
where F: FnMut(usize, EdgeIndex, &'a Edge<E>) -> bool
|
||||
{
|
||||
let mut iteration = 0;
|
||||
let mut changed = true;
|
||||
while changed {
|
||||
changed = false;
|
||||
iteration += 1;
|
||||
for (edge_index, edge) in self.enumerated_edges() {
|
||||
changed |= op(iteration, edge_index, edge);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn depth_traverse<'a>(&'a self,
|
||||
start: NodeIndex,
|
||||
direction: Direction)
|
||||
|
@ -343,35 +293,6 @@ impl<N: Debug, E: Debug> Graph<N, E> {
|
|||
assert_eq!(result.len(), self.len_nodes());
|
||||
result
|
||||
}
|
||||
|
||||
/// Whether or not a node can be reached from itself.
|
||||
pub fn is_node_cyclic(&self, starting_node_index: NodeIndex) -> bool {
|
||||
// This is similar to depth traversal below, but we
|
||||
// can't use that, because depth traversal doesn't show
|
||||
// the starting node a second time.
|
||||
let mut visited = BitVector::new(self.len_nodes());
|
||||
let mut stack = vec![starting_node_index];
|
||||
|
||||
while let Some(current_node_index) = stack.pop() {
|
||||
visited.insert(current_node_index.0);
|
||||
|
||||
// Directionality doesn't change the answer,
|
||||
// so just use outgoing edges.
|
||||
for (_, edge) in self.outgoing_edges(current_node_index) {
|
||||
let target_node_index = edge.target();
|
||||
|
||||
if target_node_index == starting_node_index {
|
||||
return true;
|
||||
}
|
||||
|
||||
if !visited.contains(target_node_index.0) {
|
||||
stack.push(target_node_index);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
// # Iterators
|
||||
|
@ -479,16 +400,6 @@ pub struct DepthFirstTraversal<'g, N, E>
|
|||
}
|
||||
|
||||
impl<'g, N: Debug, E: Debug> DepthFirstTraversal<'g, N, E> {
|
||||
pub fn new(graph: &'g Graph<N, E>, direction: Direction) -> Self {
|
||||
let visited = BitVector::new(graph.len_nodes());
|
||||
DepthFirstTraversal {
|
||||
graph,
|
||||
stack: vec![],
|
||||
visited,
|
||||
direction,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with_start_node(graph: &'g Graph<N, E>,
|
||||
start_node: NodeIndex,
|
||||
direction: Direction)
|
||||
|
@ -503,13 +414,6 @@ impl<'g, N: Debug, E: Debug> DepthFirstTraversal<'g, N, E> {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn reset(&mut self, start_node: NodeIndex) {
|
||||
self.stack.truncate(0);
|
||||
self.stack.push(start_node);
|
||||
self.visited.clear();
|
||||
self.visited.insert(start_node.node_id());
|
||||
}
|
||||
|
||||
fn visit(&mut self, node: NodeIndex) {
|
||||
if self.visited.insert(node.node_id()) {
|
||||
self.stack.push(node);
|
||||
|
@ -532,19 +436,6 @@ impl<'g, N: Debug, E: Debug> Iterator for DepthFirstTraversal<'g, N, E> {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn each_edge_index<F>(max_edge_index: EdgeIndex, mut f: F)
|
||||
where F: FnMut(EdgeIndex) -> bool
|
||||
{
|
||||
let mut i = 0;
|
||||
let n = max_edge_index.0;
|
||||
while i < n {
|
||||
if !f(EdgeIndex(i)) {
|
||||
return;
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
|
||||
impl<E> Edge<E> {
|
||||
pub fn source(&self) -> NodeIndex {
|
||||
self.source
|
||||
|
|
|
@ -43,29 +43,6 @@ fn create_graph() -> TestGraph {
|
|||
return graph;
|
||||
}
|
||||
|
||||
fn create_graph_with_cycle() -> TestGraph {
|
||||
let mut graph = Graph::new();
|
||||
|
||||
// Create a graph with a cycle.
|
||||
//
|
||||
// A --> B <-- +
|
||||
// | |
|
||||
// v |
|
||||
// C --> D
|
||||
|
||||
let a = graph.add_node("A");
|
||||
let b = graph.add_node("B");
|
||||
let c = graph.add_node("C");
|
||||
let d = graph.add_node("D");
|
||||
|
||||
graph.add_edge(a, b, "AB");
|
||||
graph.add_edge(b, c, "BC");
|
||||
graph.add_edge(c, d, "CD");
|
||||
graph.add_edge(d, b, "DB");
|
||||
|
||||
return graph;
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn each_node() {
|
||||
let graph = create_graph();
|
||||
|
@ -82,7 +59,6 @@ fn each_edge() {
|
|||
let graph = create_graph();
|
||||
let expected = ["AB", "BC", "BD", "DE", "EC", "FB"];
|
||||
graph.each_edge(|idx, edge| {
|
||||
assert_eq!(&expected[idx.0], graph.edge_data(idx));
|
||||
assert_eq!(expected[idx.0], edge.data);
|
||||
true
|
||||
});
|
||||
|
@ -97,7 +73,6 @@ fn test_adjacent_edges<N: PartialEq + Debug, E: PartialEq + Debug>(graph: &Graph
|
|||
|
||||
let mut counter = 0;
|
||||
for (edge_index, edge) in graph.incoming_edges(start_index) {
|
||||
assert!(graph.edge_data(edge_index) == &edge.data);
|
||||
assert!(counter < expected_incoming.len());
|
||||
debug!("counter={:?} expected={:?} edge_index={:?} edge={:?}",
|
||||
counter,
|
||||
|
@ -117,7 +92,6 @@ fn test_adjacent_edges<N: PartialEq + Debug, E: PartialEq + Debug>(graph: &Graph
|
|||
|
||||
let mut counter = 0;
|
||||
for (edge_index, edge) in graph.outgoing_edges(start_index) {
|
||||
assert!(graph.edge_data(edge_index) == &edge.data);
|
||||
assert!(counter < expected_outgoing.len());
|
||||
debug!("counter={:?} expected={:?} edge_index={:?} edge={:?}",
|
||||
counter,
|
||||
|
@ -163,58 +137,3 @@ fn each_adjacent_from_d() {
|
|||
let graph = create_graph();
|
||||
test_adjacent_edges(&graph, NodeIndex(3), "D", &[("BD", "B")], &[("DE", "E")]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn is_node_cyclic_a() {
|
||||
let graph = create_graph_with_cycle();
|
||||
assert!(!graph.is_node_cyclic(NodeIndex(0)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn is_node_cyclic_b() {
|
||||
let graph = create_graph_with_cycle();
|
||||
assert!(graph.is_node_cyclic(NodeIndex(1)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn nodes_in_postorder() {
|
||||
let expected = vec![
|
||||
("A", vec!["C", "E", "D", "B", "A", "F"]),
|
||||
("B", vec!["C", "E", "D", "B", "A", "F"]),
|
||||
("C", vec!["C", "E", "D", "B", "A", "F"]),
|
||||
("D", vec!["C", "E", "D", "B", "A", "F"]),
|
||||
("E", vec!["C", "E", "D", "B", "A", "F"]),
|
||||
("F", vec!["C", "E", "D", "B", "F", "A"])
|
||||
];
|
||||
|
||||
let graph = create_graph();
|
||||
|
||||
for ((idx, node), &(node_name, ref expected))
|
||||
in graph.enumerated_nodes().zip(&expected)
|
||||
{
|
||||
assert_eq!(node.data, node_name);
|
||||
assert_eq!(expected,
|
||||
&graph.nodes_in_postorder(OUTGOING, idx)
|
||||
.into_iter().map(|idx| *graph.node_data(idx))
|
||||
.collect::<Vec<&str>>());
|
||||
}
|
||||
|
||||
let expected = vec![
|
||||
("A", vec!["D", "C", "B", "A"]),
|
||||
("B", vec!["D", "C", "B", "A"]),
|
||||
("C", vec!["B", "D", "C", "A"]),
|
||||
("D", vec!["C", "B", "D", "A"]),
|
||||
];
|
||||
|
||||
let graph = create_graph_with_cycle();
|
||||
|
||||
for ((idx, node), &(node_name, ref expected))
|
||||
in graph.enumerated_nodes().zip(&expected)
|
||||
{
|
||||
assert_eq!(node.data, node_name);
|
||||
assert_eq!(expected,
|
||||
&graph.nodes_in_postorder(OUTGOING, idx)
|
||||
.into_iter().map(|idx| *graph.node_data(idx))
|
||||
.collect::<Vec<&str>>());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,71 +0,0 @@
|
|||
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use std::fmt;
|
||||
use std::cell::Cell;
|
||||
|
||||
/// A write-once variable. When constructed, it is empty, and
|
||||
/// can only be set once.
|
||||
///
|
||||
/// Ivars ensure that data that can only be initialized once. A full
|
||||
/// implementation is used for concurrency and blocks on a read of an
|
||||
/// unfulfilled value. This implementation is more minimal and panics
|
||||
/// if you attempt to read the value before it has been set. It is also
|
||||
/// not `Sync`, but may be extended in the future to be usable as a true
|
||||
/// concurrency type.
|
||||
///
|
||||
/// The `T: Copy` bound is not strictly needed, but it is required by
|
||||
/// Cell (so removing it would require using UnsafeCell), and it
|
||||
/// suffices for the current purposes.
|
||||
#[derive(PartialEq)]
|
||||
pub struct Ivar<T: Copy> {
|
||||
data: Cell<Option<T>>,
|
||||
}
|
||||
|
||||
impl<T: Copy> Ivar<T> {
|
||||
pub fn new() -> Ivar<T> {
|
||||
Ivar { data: Cell::new(None) }
|
||||
}
|
||||
|
||||
pub fn get(&self) -> Option<T> {
|
||||
self.data.get()
|
||||
}
|
||||
|
||||
pub fn fulfill(&self, value: T) {
|
||||
assert!(self.data.get().is_none(), "Value already set!");
|
||||
self.data.set(Some(value));
|
||||
}
|
||||
|
||||
pub fn is_fulfilled(&self) -> bool {
|
||||
self.data.get().is_some()
|
||||
}
|
||||
|
||||
pub fn unwrap(&self) -> T {
|
||||
self.get().unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Copy + fmt::Debug> fmt::Debug for Ivar<T> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self.get() {
|
||||
Some(val) => write!(f, "Ivar({:?})", val),
|
||||
None => f.write_str("Ivar(<unfulfilled>)"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Copy> Clone for Ivar<T> {
|
||||
fn clone(&self) -> Ivar<T> {
|
||||
match self.get() {
|
||||
Some(val) => Ivar { data: Cell::new(Some(val)) },
|
||||
None => Ivar::new(),
|
||||
}
|
||||
}
|
||||
}
|
|
@ -52,11 +52,9 @@ pub mod accumulate_vec;
|
|||
pub mod small_vec;
|
||||
pub mod base_n;
|
||||
pub mod bitslice;
|
||||
pub mod blake2b;
|
||||
pub mod bitvec;
|
||||
pub mod fmt_wrap;
|
||||
pub mod blake2b;
|
||||
pub mod graph;
|
||||
pub mod ivar;
|
||||
pub mod indexed_set;
|
||||
pub mod indexed_vec;
|
||||
pub mod obligation_forest;
|
||||
|
|
|
@ -57,11 +57,6 @@ pub trait ObligationProcessor {
|
|||
where I: Clone + Iterator<Item=&'c Self::Obligation>;
|
||||
}
|
||||
|
||||
struct SnapshotData {
|
||||
node_len: usize,
|
||||
cache_list_len: usize,
|
||||
}
|
||||
|
||||
pub struct ObligationForest<O: ForestObligation> {
|
||||
/// The list of obligations. In between calls to
|
||||
/// `process_obligations`, this list only contains nodes in the
|
||||
|
@ -83,14 +78,9 @@ pub struct ObligationForest<O: ForestObligation> {
|
|||
/// A list of the obligations added in snapshots, to allow
|
||||
/// for their removal.
|
||||
cache_list: Vec<O::Predicate>,
|
||||
snapshots: Vec<SnapshotData>,
|
||||
scratch: Option<Vec<usize>>,
|
||||
}
|
||||
|
||||
pub struct Snapshot {
|
||||
len: usize,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct Node<O> {
|
||||
obligation: O,
|
||||
|
@ -166,7 +156,6 @@ impl<O: ForestObligation> ObligationForest<O> {
|
|||
pub fn new() -> ObligationForest<O> {
|
||||
ObligationForest {
|
||||
nodes: vec![],
|
||||
snapshots: vec![],
|
||||
done_cache: FxHashSet(),
|
||||
waiting_cache: FxHashMap(),
|
||||
cache_list: vec![],
|
||||
|
@ -180,39 +169,6 @@ impl<O: ForestObligation> ObligationForest<O> {
|
|||
self.nodes.len()
|
||||
}
|
||||
|
||||
pub fn start_snapshot(&mut self) -> Snapshot {
|
||||
self.snapshots.push(SnapshotData {
|
||||
node_len: self.nodes.len(),
|
||||
cache_list_len: self.cache_list.len()
|
||||
});
|
||||
Snapshot { len: self.snapshots.len() }
|
||||
}
|
||||
|
||||
pub fn commit_snapshot(&mut self, snapshot: Snapshot) {
|
||||
assert_eq!(snapshot.len, self.snapshots.len());
|
||||
let info = self.snapshots.pop().unwrap();
|
||||
assert!(self.nodes.len() >= info.node_len);
|
||||
assert!(self.cache_list.len() >= info.cache_list_len);
|
||||
}
|
||||
|
||||
pub fn rollback_snapshot(&mut self, snapshot: Snapshot) {
|
||||
// Check that we are obeying stack discipline.
|
||||
assert_eq!(snapshot.len, self.snapshots.len());
|
||||
let info = self.snapshots.pop().unwrap();
|
||||
|
||||
for entry in &self.cache_list[info.cache_list_len..] {
|
||||
self.done_cache.remove(entry);
|
||||
self.waiting_cache.remove(entry);
|
||||
}
|
||||
|
||||
self.nodes.truncate(info.node_len);
|
||||
self.cache_list.truncate(info.cache_list_len);
|
||||
}
|
||||
|
||||
pub fn in_snapshot(&self) -> bool {
|
||||
!self.snapshots.is_empty()
|
||||
}
|
||||
|
||||
/// Registers an obligation
|
||||
///
|
||||
/// This CAN be done in a snapshot
|
||||
|
@ -262,7 +218,6 @@ impl<O: ForestObligation> ObligationForest<O> {
|
|||
///
|
||||
/// This cannot be done during a snapshot.
|
||||
pub fn to_errors<E: Clone>(&mut self, error: E) -> Vec<Error<O, E>> {
|
||||
assert!(!self.in_snapshot());
|
||||
let mut errors = vec![];
|
||||
for index in 0..self.nodes.len() {
|
||||
if let NodeState::Pending = self.nodes[index].state.get() {
|
||||
|
@ -297,7 +252,6 @@ impl<O: ForestObligation> ObligationForest<O> {
|
|||
where P: ObligationProcessor<Obligation=O>
|
||||
{
|
||||
debug!("process_obligations(len={})", self.nodes.len());
|
||||
assert!(!self.in_snapshot()); // cannot unroll this action
|
||||
|
||||
let mut errors = vec![];
|
||||
let mut stalled = true;
|
||||
|
@ -528,8 +482,6 @@ impl<O: ForestObligation> ObligationForest<O> {
|
|||
/// on these nodes may be present. This is done by e.g. `process_cycles`.
|
||||
#[inline(never)]
|
||||
fn compress(&mut self) -> Vec<O> {
|
||||
assert!(!self.in_snapshot()); // didn't write code to unroll this action
|
||||
|
||||
let nodes_len = self.nodes.len();
|
||||
let mut node_rewrites: Vec<_> = self.scratch.take().unwrap();
|
||||
node_rewrites.extend(0..nodes_len);
|
||||
|
|
|
@ -275,7 +275,8 @@ impl<'tcx, K: UnifyKey> UnificationTable<K>
|
|||
self.get(id).value
|
||||
}
|
||||
|
||||
pub fn unioned(&mut self, a_id: K, b_id: K) -> bool {
|
||||
#[cfg(test)]
|
||||
fn unioned(&mut self, a_id: K, b_id: K) -> bool {
|
||||
self.find(a_id) == self.find(b_id)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -102,7 +102,7 @@ use syntax::parse::{self, PResult};
|
|||
use syntax_pos::{DUMMY_SP, MultiSpan};
|
||||
|
||||
#[cfg(test)]
|
||||
pub mod test;
|
||||
mod test;
|
||||
|
||||
pub mod driver;
|
||||
pub mod pretty;
|
||||
|
@ -859,17 +859,17 @@ impl RustcDefaultCalls {
|
|||
}
|
||||
|
||||
/// Returns a version string such as "0.12.0-dev".
|
||||
pub fn release_str() -> Option<&'static str> {
|
||||
fn release_str() -> Option<&'static str> {
|
||||
option_env!("CFG_RELEASE")
|
||||
}
|
||||
|
||||
/// Returns the full SHA1 hash of HEAD of the Git repo from which rustc was built.
|
||||
pub fn commit_hash_str() -> Option<&'static str> {
|
||||
fn commit_hash_str() -> Option<&'static str> {
|
||||
option_env!("CFG_VER_HASH")
|
||||
}
|
||||
|
||||
/// Returns the "commit date" of HEAD of the Git repo from which rustc was built as a static string.
|
||||
pub fn commit_date_str() -> Option<&'static str> {
|
||||
fn commit_date_str() -> Option<&'static str> {
|
||||
option_env!("CFG_VER_DATE")
|
||||
}
|
||||
|
||||
|
|
|
@ -105,10 +105,6 @@ impl Diagnostic {
|
|||
self.level == Level::Cancelled
|
||||
}
|
||||
|
||||
pub fn is_fatal(&self) -> bool {
|
||||
self.level == Level::Fatal
|
||||
}
|
||||
|
||||
/// Add a span/label to be included in the resulting snippet.
|
||||
/// This is pushed onto the `MultiSpan` that was created when the
|
||||
/// diagnostic was first built. If you don't call this function at
|
||||
|
@ -278,18 +274,10 @@ impl Diagnostic {
|
|||
self.message.iter().map(|i| i.0.to_owned()).collect::<String>()
|
||||
}
|
||||
|
||||
pub fn set_message(&mut self, message: &str) {
|
||||
self.message = vec![(message.to_owned(), Style::NoStyle)];
|
||||
}
|
||||
|
||||
pub fn styled_message(&self) -> &Vec<(String, Style)> {
|
||||
&self.message
|
||||
}
|
||||
|
||||
pub fn level(&self) -> Level {
|
||||
self.level
|
||||
}
|
||||
|
||||
/// Used by a lint. Copies over all details *but* the "main
|
||||
/// message".
|
||||
pub fn copy_details_not_message(&mut self, from: &Diagnostic) {
|
||||
|
|
|
@ -183,13 +183,6 @@ impl<'a> DiagnosticBuilder<'a> {
|
|||
diagnostic: Diagnostic::new_with_code(level, code, message)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_diagnostic(mut self) -> Diagnostic {
|
||||
// annoyingly, the Drop impl means we can't actually move
|
||||
let result = self.diagnostic.clone();
|
||||
self.cancel();
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Debug for DiagnosticBuilder<'a> {
|
||||
|
|
|
@ -38,8 +38,8 @@ use std::cell::{RefCell, Cell};
|
|||
use std::{error, fmt};
|
||||
use std::rc::Rc;
|
||||
|
||||
pub mod diagnostic;
|
||||
pub mod diagnostic_builder;
|
||||
mod diagnostic;
|
||||
mod diagnostic_builder;
|
||||
pub mod emitter;
|
||||
mod snippet;
|
||||
pub mod registry;
|
||||
|
@ -111,7 +111,7 @@ impl CodeSuggestion {
|
|||
}
|
||||
|
||||
/// Returns the number of substitutions
|
||||
pub fn substitution_spans<'a>(&'a self) -> impl Iterator<Item = Span> + 'a {
|
||||
fn substitution_spans<'a>(&'a self) -> impl Iterator<Item = Span> + 'a {
|
||||
self.substitution_parts.iter().map(|sub| sub.span)
|
||||
}
|
||||
|
||||
|
@ -217,8 +217,10 @@ impl CodeSuggestion {
|
|||
if !buf.ends_with('\n') {
|
||||
push_trailing(buf, prev_line.as_ref(), &prev_hi, None);
|
||||
}
|
||||
// remove trailing newline
|
||||
buf.pop();
|
||||
// remove trailing newlines
|
||||
while buf.ends_with('\n') {
|
||||
buf.pop();
|
||||
}
|
||||
}
|
||||
bufs
|
||||
}
|
||||
|
@ -260,7 +262,7 @@ impl error::Error for ExplicitBug {
|
|||
}
|
||||
}
|
||||
|
||||
pub use diagnostic::{Diagnostic, SubDiagnostic, DiagnosticStyledString, StringPart};
|
||||
pub use diagnostic::{Diagnostic, SubDiagnostic, DiagnosticStyledString};
|
||||
pub use diagnostic_builder::DiagnosticBuilder;
|
||||
|
||||
/// A handler deals with errors; certain errors
|
||||
|
@ -489,7 +491,7 @@ impl Handler {
|
|||
self.bug(&format!("unimplemented {}", msg));
|
||||
}
|
||||
|
||||
pub fn bump_err_count(&self) {
|
||||
fn bump_err_count(&self) {
|
||||
self.panic_if_treat_err_as_bug();
|
||||
self.err_count.set(self.err_count.get() + 1);
|
||||
}
|
||||
|
@ -569,7 +571,7 @@ impl fmt::Display for Level {
|
|||
}
|
||||
|
||||
impl Level {
|
||||
pub fn color(self) -> term::color::Color {
|
||||
fn color(self) -> term::color::Color {
|
||||
match self {
|
||||
Bug | Fatal | PhaseFatal | Error => term::color::BRIGHT_RED,
|
||||
Warning => {
|
||||
|
@ -596,12 +598,3 @@ impl Level {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expect<T, M>(diag: &Handler, opt: Option<T>, msg: M) -> T
|
||||
where M: FnOnce() -> String
|
||||
{
|
||||
match opt {
|
||||
Some(t) => t,
|
||||
None => diag.bug(&msg()),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -62,10 +62,6 @@ impl IncrementalHashesMap {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn get(&self, k: &DepNode) -> Option<&Fingerprint> {
|
||||
self.hashes.get(k)
|
||||
}
|
||||
|
||||
pub fn insert(&mut self, k: DepNode, v: Fingerprint) {
|
||||
assert!(self.hashes.insert(k, v).is_none());
|
||||
}
|
||||
|
|
|
@ -44,4 +44,3 @@ pub use persist::save_trans_partition;
|
|||
pub use persist::save_work_products;
|
||||
pub use persist::in_incr_comp_dir;
|
||||
pub use persist::finalize_session_directory;
|
||||
pub use persist::delete_workproduct_files;
|
||||
|
|
|
@ -28,4 +28,3 @@ pub use self::load::load_dep_graph;
|
|||
pub use self::save::save_dep_graph;
|
||||
pub use self::save::save_work_products;
|
||||
pub use self::work_product::save_trans_partition;
|
||||
pub use self::work_product::delete_workproduct_files;
|
||||
|
|
|
@ -45,10 +45,10 @@ extern crate rustc_back;
|
|||
extern crate rustc_const_eval;
|
||||
extern crate syntax_pos;
|
||||
|
||||
pub use rustc::lint;
|
||||
pub use rustc::middle;
|
||||
pub use rustc::session;
|
||||
pub use rustc::util;
|
||||
use rustc::lint;
|
||||
use rustc::middle;
|
||||
use rustc::session;
|
||||
use rustc::util;
|
||||
|
||||
use session::Session;
|
||||
use lint::LintId;
|
||||
|
|
|
@ -38,9 +38,7 @@ pub use self::TypeKind::*;
|
|||
pub use self::AtomicRmwBinOp::*;
|
||||
pub use self::MetadataType::*;
|
||||
pub use self::CodeGenOptSize::*;
|
||||
pub use self::DiagnosticKind::*;
|
||||
pub use self::CallConv::*;
|
||||
pub use self::DiagnosticSeverity::*;
|
||||
pub use self::Linkage::*;
|
||||
|
||||
use std::str::FromStr;
|
||||
|
@ -51,7 +49,7 @@ use libc::{c_uint, c_char, size_t};
|
|||
|
||||
pub mod archive_ro;
|
||||
pub mod diagnostic;
|
||||
pub mod ffi;
|
||||
mod ffi;
|
||||
|
||||
pub use ffi::*;
|
||||
|
||||
|
@ -120,7 +118,7 @@ impl FromStr for ArchiveKind {
|
|||
|
||||
#[allow(missing_copy_implementations)]
|
||||
pub enum RustString_opaque {}
|
||||
pub type RustStringRef = *mut RustString_opaque;
|
||||
type RustStringRef = *mut RustString_opaque;
|
||||
type RustStringRepr = *mut RefCell<Vec<u8>>;
|
||||
|
||||
/// Appending to a Rust string -- used by RawRustStringOstream.
|
||||
|
@ -199,8 +197,8 @@ impl Attribute {
|
|||
|
||||
// Memory-managed interface to target data.
|
||||
|
||||
pub struct TargetData {
|
||||
pub lltd: TargetDataRef,
|
||||
struct TargetData {
|
||||
lltd: TargetDataRef,
|
||||
}
|
||||
|
||||
impl Drop for TargetData {
|
||||
|
@ -211,7 +209,7 @@ impl Drop for TargetData {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn mk_target_data(string_rep: &str) -> TargetData {
|
||||
fn mk_target_data(string_rep: &str) -> TargetData {
|
||||
let string_rep = CString::new(string_rep).unwrap();
|
||||
TargetData { lltd: unsafe { LLVMCreateTargetData(string_rep.as_ptr()) } }
|
||||
}
|
||||
|
@ -272,7 +270,7 @@ pub fn get_param(llfn: ValueRef, index: c_uint) -> ValueRef {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn get_params(llfn: ValueRef) -> Vec<ValueRef> {
|
||||
fn get_params(llfn: ValueRef) -> Vec<ValueRef> {
|
||||
unsafe {
|
||||
let num_params = LLVMCountParams(llfn);
|
||||
let mut params = Vec::with_capacity(num_params as usize);
|
||||
|
|
|
@ -32,7 +32,7 @@ use syntax_pos;
|
|||
|
||||
pub use rustc::middle::cstore::{NativeLibrary, NativeLibraryKind, LinkagePreference};
|
||||
pub use rustc::middle::cstore::NativeLibraryKind::*;
|
||||
pub use rustc::middle::cstore::{CrateSource, LinkMeta, LibSource};
|
||||
pub use rustc::middle::cstore::{CrateSource, LibSource};
|
||||
|
||||
pub use cstore_impl::{provide, provide_local};
|
||||
|
||||
|
@ -142,14 +142,6 @@ impl CStore {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn reset(&self) {
|
||||
self.metas.borrow_mut().clear();
|
||||
self.extern_mod_crate_map.borrow_mut().clear();
|
||||
self.used_libraries.borrow_mut().clear();
|
||||
self.used_link_args.borrow_mut().clear();
|
||||
self.statically_included_foreign_items.borrow_mut().clear();
|
||||
}
|
||||
|
||||
pub fn crate_dependencies_in_rpo(&self, krate: CrateNum) -> Vec<CrateNum> {
|
||||
let mut ordering = Vec::new();
|
||||
self.push_dependencies_in_postorder(&mut ordering, krate);
|
||||
|
|
|
@ -100,32 +100,6 @@ impl<'tcx> LazySeq<Index> {
|
|||
Some(Lazy::with_position(position as usize))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn iter_enumerated<'a>(&self,
|
||||
bytes: &'a [u8])
|
||||
-> impl Iterator<Item = (DefIndex, Lazy<Entry<'tcx>>)> + 'a {
|
||||
let words = &bytes_to_words(&bytes[self.position..])[..self.len];
|
||||
let lo_count = u32::from_le(words[0].get()) as usize;
|
||||
let lo = &words[1 .. lo_count + 1];
|
||||
let hi = &words[1 + lo_count ..];
|
||||
|
||||
lo.iter().map(|word| word.get()).enumerate().filter_map(|(index, pos)| {
|
||||
if pos == u32::MAX {
|
||||
None
|
||||
} else {
|
||||
let pos = u32::from_le(pos) as usize;
|
||||
Some((DefIndex::new(index), Lazy::with_position(pos)))
|
||||
}
|
||||
}).chain(hi.iter().map(|word| word.get()).enumerate().filter_map(|(index, pos)| {
|
||||
if pos == u32::MAX {
|
||||
None
|
||||
} else {
|
||||
let pos = u32::from_le(pos) as usize;
|
||||
Some((DefIndex::new(index + DefIndexAddressSpace::High.start()),
|
||||
Lazy::with_position(pos)))
|
||||
}
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
#[repr(packed)]
|
||||
|
|
|
@ -47,8 +47,6 @@ extern crate rustc_data_structures;
|
|||
|
||||
mod diagnostics;
|
||||
|
||||
pub use rustc::middle;
|
||||
|
||||
mod astencode;
|
||||
mod index_builder;
|
||||
mod index;
|
||||
|
|
|
@ -306,10 +306,6 @@ impl<'a> Context<'a> {
|
|||
self.find_library_crate()
|
||||
}
|
||||
|
||||
pub fn load_library_crate(&mut self) -> Library {
|
||||
self.find_library_crate().unwrap_or_else(|| self.report_errs())
|
||||
}
|
||||
|
||||
pub fn report_errs(&mut self) -> ! {
|
||||
let add = match self.root {
|
||||
&None => String::new(),
|
||||
|
|
|
@ -14,16 +14,13 @@
|
|||
|
||||
use rustc::ty::TyCtxt;
|
||||
use rustc::mir::{self, Mir, Location};
|
||||
use rustc_data_structures::bitslice::BitSlice; // adds set_bit/get_bit to &[usize] bitvector rep.
|
||||
use rustc_data_structures::bitslice::{BitwiseOperator};
|
||||
use rustc_data_structures::indexed_set::{IdxSet};
|
||||
use rustc_data_structures::indexed_vec::Idx;
|
||||
|
||||
use super::MoveDataParamEnv;
|
||||
use util::elaborate_drops::DropFlagState;
|
||||
|
||||
use super::move_paths::{HasMoveData, MoveData, MoveOutIndex, MovePathIndex};
|
||||
use super::move_paths::LookupResult;
|
||||
use super::move_paths::{HasMoveData, MoveData, MovePathIndex};
|
||||
use super::{BitDenotation, BlockSets, DataflowOperator};
|
||||
|
||||
use super::drop_flag_effects_for_function_entry;
|
||||
|
@ -204,40 +201,6 @@ impl<'a, 'tcx: 'a> HasMoveData<'tcx> for DefinitelyInitializedLvals<'a, 'tcx> {
|
|||
fn move_data(&self) -> &MoveData<'tcx> { &self.mdpe.move_data }
|
||||
}
|
||||
|
||||
/// `MovingOutStatements` tracks the statements that perform moves out
|
||||
/// of particular l-values. More precisely, it tracks whether the
|
||||
/// *effect* of such moves (namely, the uninitialization of the
|
||||
/// l-value in question) can reach some point in the control-flow of
|
||||
/// the function, or if that effect is "killed" by some intervening
|
||||
/// operation reinitializing that l-value.
|
||||
///
|
||||
/// The resulting dataflow is a more enriched version of
|
||||
/// `MaybeUninitializedLvals`. Both structures on their own only tell
|
||||
/// you if an l-value *might* be uninitialized at a given point in the
|
||||
/// control flow. But `MovingOutStatements` also includes the added
|
||||
/// data of *which* particular statement causing the deinitialization
|
||||
/// that the borrow checker's error message may need to report.
|
||||
#[allow(dead_code)]
|
||||
pub struct MovingOutStatements<'a, 'tcx: 'a> {
|
||||
tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
mir: &'a Mir<'tcx>,
|
||||
mdpe: &'a MoveDataParamEnv<'tcx>,
|
||||
}
|
||||
|
||||
impl<'a, 'tcx: 'a> MovingOutStatements<'a, 'tcx> {
|
||||
pub fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
mir: &'a Mir<'tcx>,
|
||||
mdpe: &'a MoveDataParamEnv<'tcx>)
|
||||
-> Self
|
||||
{
|
||||
MovingOutStatements { tcx: tcx, mir: mir, mdpe: mdpe }
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> HasMoveData<'tcx> for MovingOutStatements<'a, 'tcx> {
|
||||
fn move_data(&self) -> &MoveData<'tcx> { &self.mdpe.move_data }
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> MaybeInitializedLvals<'a, 'tcx> {
|
||||
fn update_bits(sets: &mut BlockSets<MovePathIndex>, path: MovePathIndex,
|
||||
state: DropFlagState)
|
||||
|
@ -432,121 +395,6 @@ impl<'a, 'tcx> BitDenotation for DefinitelyInitializedLvals<'a, 'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> BitDenotation for MovingOutStatements<'a, 'tcx> {
|
||||
type Idx = MoveOutIndex;
|
||||
fn name() -> &'static str { "moving_out" }
|
||||
fn bits_per_block(&self) -> usize {
|
||||
self.move_data().moves.len()
|
||||
}
|
||||
|
||||
fn start_block_effect(&self, _sets: &mut BlockSets<MoveOutIndex>) {
|
||||
// no move-statements have been executed prior to function
|
||||
// execution, so this method has no effect on `_sets`.
|
||||
}
|
||||
fn statement_effect(&self,
|
||||
sets: &mut BlockSets<MoveOutIndex>,
|
||||
location: Location) {
|
||||
let (tcx, mir, move_data) = (self.tcx, self.mir, self.move_data());
|
||||
let stmt = &mir[location.block].statements[location.statement_index];
|
||||
let loc_map = &move_data.loc_map;
|
||||
let path_map = &move_data.path_map;
|
||||
let rev_lookup = &move_data.rev_lookup;
|
||||
|
||||
debug!("stmt {:?} at loc {:?} moves out of move_indexes {:?}",
|
||||
stmt, location, &loc_map[location]);
|
||||
for move_index in &loc_map[location] {
|
||||
// Every path deinitialized by a *particular move*
|
||||
// has corresponding bit, "gen'ed" (i.e. set)
|
||||
// here, in dataflow vector
|
||||
zero_to_one(sets.gen_set.words_mut(), *move_index);
|
||||
}
|
||||
let bits_per_block = self.bits_per_block();
|
||||
match stmt.kind {
|
||||
mir::StatementKind::SetDiscriminant { .. } => {
|
||||
span_bug!(stmt.source_info.span, "SetDiscriminant should not exist in borrowck");
|
||||
}
|
||||
mir::StatementKind::Assign(ref lvalue, ref rvalue) => {
|
||||
// assigning into this `lvalue` kills all
|
||||
// MoveOuts from it, and *also* all MoveOuts
|
||||
// for children and associated fragment sets.
|
||||
match rvalue.initialization_state() {
|
||||
mir::tcx::RvalueInitializationState::Shallow => {
|
||||
if let LookupResult::Exact(mpi) = rev_lookup.find(lvalue) {
|
||||
for moi in &path_map[mpi] {
|
||||
assert!(moi.index() < bits_per_block);
|
||||
sets.kill_set.add(&moi);
|
||||
}
|
||||
}
|
||||
}
|
||||
mir::tcx::RvalueInitializationState::Deep => {
|
||||
on_lookup_result_bits(tcx,
|
||||
mir,
|
||||
move_data,
|
||||
rev_lookup.find(lvalue),
|
||||
|mpi| for moi in &path_map[mpi] {
|
||||
assert!(moi.index() < bits_per_block);
|
||||
sets.kill_set.add(&moi);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
mir::StatementKind::StorageLive(_) |
|
||||
mir::StatementKind::StorageDead(_) |
|
||||
mir::StatementKind::InlineAsm { .. } |
|
||||
mir::StatementKind::EndRegion(_) |
|
||||
mir::StatementKind::Validate(..) |
|
||||
mir::StatementKind::Nop => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn terminator_effect(&self,
|
||||
sets: &mut BlockSets<MoveOutIndex>,
|
||||
location: Location)
|
||||
{
|
||||
let (mir, move_data) = (self.mir, self.move_data());
|
||||
let term = mir[location.block].terminator();
|
||||
let loc_map = &move_data.loc_map;
|
||||
debug!("terminator {:?} at loc {:?} moves out of move_indexes {:?}",
|
||||
term, location, &loc_map[location]);
|
||||
let bits_per_block = self.bits_per_block();
|
||||
for move_index in &loc_map[location] {
|
||||
assert!(move_index.index() < bits_per_block);
|
||||
zero_to_one(sets.gen_set.words_mut(), *move_index);
|
||||
}
|
||||
}
|
||||
|
||||
fn propagate_call_return(&self,
|
||||
in_out: &mut IdxSet<MoveOutIndex>,
|
||||
_call_bb: mir::BasicBlock,
|
||||
_dest_bb: mir::BasicBlock,
|
||||
dest_lval: &mir::Lvalue) {
|
||||
let move_data = self.move_data();
|
||||
let bits_per_block = self.bits_per_block();
|
||||
|
||||
let path_map = &move_data.path_map;
|
||||
on_lookup_result_bits(self.tcx,
|
||||
self.mir,
|
||||
move_data,
|
||||
move_data.rev_lookup.find(dest_lval),
|
||||
|mpi| for moi in &path_map[mpi] {
|
||||
assert!(moi.index() < bits_per_block);
|
||||
in_out.remove(&moi);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn zero_to_one(bitvec: &mut [usize], move_index: MoveOutIndex) {
|
||||
let retval = bitvec.set_bit(move_index.index());
|
||||
assert!(retval);
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> BitwiseOperator for MovingOutStatements<'a, 'tcx> {
|
||||
#[inline]
|
||||
fn join(&self, pred1: usize, pred2: usize) -> usize {
|
||||
pred1 | pred2 // moves from both preds are in scope
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> BitwiseOperator for MaybeInitializedLvals<'a, 'tcx> {
|
||||
#[inline]
|
||||
fn join(&self, pred1: usize, pred2: usize) -> usize {
|
||||
|
@ -578,13 +426,6 @@ impl<'a, 'tcx> BitwiseOperator for DefinitelyInitializedLvals<'a, 'tcx> {
|
|||
// propagating, or you start at all-ones and then use Intersect as
|
||||
// your merge when propagating.
|
||||
|
||||
impl<'a, 'tcx> DataflowOperator for MovingOutStatements<'a, 'tcx> {
|
||||
#[inline]
|
||||
fn bottom_value() -> bool {
|
||||
false // bottom = no loans in scope by default
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> DataflowOperator for MaybeInitializedLvals<'a, 'tcx> {
|
||||
#[inline]
|
||||
fn bottom_value() -> bool {
|
||||
|
|
|
@ -25,7 +25,7 @@ use std::path::PathBuf;
|
|||
use std::usize;
|
||||
|
||||
pub use self::impls::{MaybeInitializedLvals, MaybeUninitializedLvals};
|
||||
pub use self::impls::{DefinitelyInitializedLvals, MovingOutStatements};
|
||||
pub use self::impls::{DefinitelyInitializedLvals};
|
||||
pub use self::impls::borrows::{Borrows, BorrowData, BorrowIndex};
|
||||
pub(crate) use self::drop_flag_effects::*;
|
||||
|
||||
|
@ -364,8 +364,6 @@ impl<'a, 'tcx: 'a, O> DataflowAnalysis<'a, 'tcx, O> where O: BitDenotation
|
|||
DataflowResults(self.flow_state)
|
||||
}
|
||||
|
||||
pub fn flow_state(&self) -> &DataflowState<O> { &self.flow_state }
|
||||
|
||||
pub fn mir(&self) -> &'a Mir<'tcx> { self.mir }
|
||||
}
|
||||
|
||||
|
|
|
@ -43,10 +43,10 @@ extern crate rustc_const_math;
|
|||
extern crate rustc_const_eval;
|
||||
extern crate core; // for NonZero
|
||||
|
||||
pub mod diagnostics;
|
||||
mod diagnostics;
|
||||
|
||||
mod build;
|
||||
pub mod dataflow;
|
||||
mod dataflow;
|
||||
mod hair;
|
||||
mod shim;
|
||||
pub mod transform;
|
||||
|
|
|
@ -27,7 +27,6 @@ use dataflow::{do_dataflow};
|
|||
use dataflow::{MoveDataParamEnv};
|
||||
use dataflow::{BitDenotation, BlockSets, DataflowResults, DataflowResultsConsumer};
|
||||
use dataflow::{MaybeInitializedLvals, MaybeUninitializedLvals};
|
||||
use dataflow::{MovingOutStatements};
|
||||
use dataflow::{Borrows, BorrowData, BorrowIndex};
|
||||
use dataflow::move_paths::{HasMoveData, MoveData, MovePathIndex, LookupResult};
|
||||
use util::borrowck_errors::{BorrowckErrors, Origin};
|
||||
|
@ -80,9 +79,6 @@ fn borrowck_mir<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, src: MirSource, mir: &Mir
|
|||
let flow_uninits = do_dataflow(tcx, mir, id, &attributes, &dead_unwinds,
|
||||
MaybeUninitializedLvals::new(tcx, mir, &mdpe),
|
||||
|bd, i| &bd.move_data().move_paths[i]);
|
||||
let flow_move_outs = do_dataflow(tcx, mir, id, &attributes, &dead_unwinds,
|
||||
MovingOutStatements::new(tcx, mir, &mdpe),
|
||||
|bd, i| &bd.move_data().moves[i]);
|
||||
|
||||
let mut mbcx = MirBorrowckCtxt {
|
||||
tcx: tcx,
|
||||
|
@ -95,8 +91,7 @@ fn borrowck_mir<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, src: MirSource, mir: &Mir
|
|||
|
||||
let mut state = InProgress::new(flow_borrows,
|
||||
flow_inits,
|
||||
flow_uninits,
|
||||
flow_move_outs);
|
||||
flow_uninits);
|
||||
|
||||
mbcx.analyze_results(&mut state); // entry point for DataflowResultsConsumer
|
||||
});
|
||||
|
@ -119,7 +114,6 @@ pub struct InProgress<'b, 'tcx: 'b> {
|
|||
borrows: FlowInProgress<Borrows<'b, 'tcx>>,
|
||||
inits: FlowInProgress<MaybeInitializedLvals<'b, 'tcx>>,
|
||||
uninits: FlowInProgress<MaybeUninitializedLvals<'b, 'tcx>>,
|
||||
move_outs: FlowInProgress<MovingOutStatements<'b, 'tcx>>,
|
||||
}
|
||||
|
||||
struct FlowInProgress<BD> where BD: BitDenotation {
|
||||
|
@ -519,10 +513,6 @@ impl<'c, 'b, 'a: 'b+'c, 'gcx, 'tcx: 'a> MirBorrowckCtxt<'c, 'b, 'a, 'gcx, 'tcx>
|
|||
if let Some(mpi) = self.move_path_for_lvalue(context, move_data, lvalue) {
|
||||
if maybe_uninits.curr_state.contains(&mpi) {
|
||||
// find and report move(s) that could cause this to be uninitialized
|
||||
|
||||
// FIXME: for each move in flow_state.move_outs ...
|
||||
&flow_state.move_outs;
|
||||
|
||||
self.report_use_of_moved(context, lvalue_span);
|
||||
} else {
|
||||
// sanity check: initialized on *some* path, right?
|
||||
|
@ -1138,13 +1128,12 @@ impl ContextKind {
|
|||
impl<'b, 'tcx: 'b> InProgress<'b, 'tcx> {
|
||||
pub(super) fn new(borrows: DataflowResults<Borrows<'b, 'tcx>>,
|
||||
inits: DataflowResults<MaybeInitializedLvals<'b, 'tcx>>,
|
||||
uninits: DataflowResults<MaybeUninitializedLvals<'b, 'tcx>>,
|
||||
move_outs: DataflowResults<MovingOutStatements<'b, 'tcx>>) -> Self {
|
||||
uninits: DataflowResults<MaybeUninitializedLvals<'b, 'tcx>>)
|
||||
-> Self {
|
||||
InProgress {
|
||||
borrows: FlowInProgress::new(borrows),
|
||||
inits: FlowInProgress::new(inits),
|
||||
uninits: FlowInProgress::new(uninits),
|
||||
move_outs: FlowInProgress::new(move_outs),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -789,12 +789,6 @@ impl<'a, 'gcx, 'tcx> TypeChecker<'a, 'gcx, 'tcx> {
|
|||
|
||||
pub struct TypeckMir;
|
||||
|
||||
impl TypeckMir {
|
||||
pub fn new() -> Self {
|
||||
TypeckMir
|
||||
}
|
||||
}
|
||||
|
||||
impl MirPass for TypeckMir {
|
||||
fn run_pass<'a, 'tcx>(&self,
|
||||
tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
|
|
|
@ -50,10 +50,6 @@ impl<'tcx> DefUseAnalysis<'tcx> {
|
|||
&self.info[local]
|
||||
}
|
||||
|
||||
pub fn local_info_mut(&mut self, local: Local) -> &mut Info<'tcx> {
|
||||
&mut self.info[local]
|
||||
}
|
||||
|
||||
fn mutate_defs_and_uses<F>(&self, local: Local, mir: &mut Mir<'tcx>, mut callback: F)
|
||||
where F: for<'a> FnMut(&'a mut Lvalue<'tcx>,
|
||||
LvalueContext<'tcx>,
|
||||
|
|
|
@ -36,13 +36,13 @@ extern crate syntax;
|
|||
extern crate syntax_pos;
|
||||
extern crate rustc_errors as errors;
|
||||
|
||||
pub mod diagnostics;
|
||||
mod diagnostics;
|
||||
|
||||
pub mod ast_validation;
|
||||
pub mod consts;
|
||||
pub mod hir_stats;
|
||||
pub mod loops;
|
||||
pub mod mir_stats;
|
||||
mod mir_stats;
|
||||
pub mod no_asm;
|
||||
pub mod static_recursion;
|
||||
|
||||
|
|
|
@ -13,7 +13,6 @@
|
|||
// completely accurate (some things might be counted twice, others missed).
|
||||
|
||||
use rustc_const_math::{ConstUsize};
|
||||
use rustc::hir::def_id::LOCAL_CRATE;
|
||||
use rustc::middle::const_val::{ConstVal};
|
||||
use rustc::mir::{AggregateKind, AssertMessage, BasicBlock, BasicBlockData};
|
||||
use rustc::mir::{Constant, Literal, Location, LocalDecl};
|
||||
|
@ -22,9 +21,7 @@ use rustc::mir::{Mir, Operand, ProjectionElem};
|
|||
use rustc::mir::{Rvalue, SourceInfo, Statement, StatementKind};
|
||||
use rustc::mir::{Terminator, TerminatorKind, VisibilityScope, VisibilityScopeData};
|
||||
use rustc::mir::visit as mir_visit;
|
||||
use rustc::mir::visit::Visitor;
|
||||
use rustc::ty::{ClosureSubsts, TyCtxt};
|
||||
use rustc::util::common::to_readable_str;
|
||||
use rustc::util::nodemap::{FxHashMap};
|
||||
|
||||
struct NodeData {
|
||||
|
@ -37,21 +34,6 @@ struct StatCollector<'a, 'tcx: 'a> {
|
|||
data: FxHashMap<&'static str, NodeData>,
|
||||
}
|
||||
|
||||
pub fn print_mir_stats<'tcx, 'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>, title: &str) {
|
||||
let mut collector = StatCollector {
|
||||
_tcx: tcx,
|
||||
data: FxHashMap(),
|
||||
};
|
||||
// For debugging instrumentation like this, we don't need to worry
|
||||
// about maintaining the dep graph.
|
||||
let _ignore = tcx.dep_graph.in_ignore();
|
||||
for &def_id in tcx.mir_keys(LOCAL_CRATE).iter() {
|
||||
let mir = tcx.optimized_mir(def_id);
|
||||
collector.visit_mir(&mir);
|
||||
}
|
||||
collector.print(title);
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> StatCollector<'a, 'tcx> {
|
||||
|
||||
fn record_with_size(&mut self, label: &'static str, node_size: usize) {
|
||||
|
@ -67,27 +49,6 @@ impl<'a, 'tcx> StatCollector<'a, 'tcx> {
|
|||
fn record<T>(&mut self, label: &'static str, node: &T) {
|
||||
self.record_with_size(label, ::std::mem::size_of_val(node));
|
||||
}
|
||||
|
||||
fn print(&self, title: &str) {
|
||||
let mut stats: Vec<_> = self.data.iter().collect();
|
||||
|
||||
stats.sort_by_key(|&(_, ref d)| d.count * d.size);
|
||||
|
||||
println!("\n{}\n", title);
|
||||
|
||||
println!("{:<32}{:>18}{:>14}{:>14}",
|
||||
"Name", "Accumulated Size", "Count", "Item Size");
|
||||
println!("------------------------------------------------------------------------------");
|
||||
|
||||
for (label, data) in stats {
|
||||
println!("{:<32}{:>18}{:>14}{:>14}",
|
||||
label,
|
||||
to_readable_str(data.count * data.size),
|
||||
to_readable_str(data.count),
|
||||
to_readable_str(data.size));
|
||||
}
|
||||
println!("------------------------------------------------------------------------------");
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> mir_visit::Visitor<'tcx> for StatCollector<'a, 'tcx> {
|
||||
|
|
|
@ -282,6 +282,61 @@ pub fn find(name: &str) -> Option<Intrinsic> {
|
|||
output: &::U32x4,
|
||||
definition: Named("llvm.ppc.altivec.vavguw")
|
||||
},
|
||||
"_vec_packssh" => Intrinsic {
|
||||
inputs: { static INPUTS: [&'static Type; 2] = [&::I16x8, &::I16x8]; &INPUTS },
|
||||
output: &::I8x16,
|
||||
definition: Named("llvm.ppc.altivec.vpkshss")
|
||||
},
|
||||
"_vec_packsuh" => Intrinsic {
|
||||
inputs: { static INPUTS: [&'static Type; 2] = [&::U16x8, &::U16x8]; &INPUTS },
|
||||
output: &::U8x16,
|
||||
definition: Named("llvm.ppc.altivec.vpkuhus")
|
||||
},
|
||||
"_vec_packssw" => Intrinsic {
|
||||
inputs: { static INPUTS: [&'static Type; 2] = [&::I32x4, &::I32x4]; &INPUTS },
|
||||
output: &::I16x8,
|
||||
definition: Named("llvm.ppc.altivec.vpkswss")
|
||||
},
|
||||
"_vec_packsuw" => Intrinsic {
|
||||
inputs: { static INPUTS: [&'static Type; 2] = [&::U32x4, &::U32x4]; &INPUTS },
|
||||
output: &::U16x8,
|
||||
definition: Named("llvm.ppc.altivec.vpkuwus")
|
||||
},
|
||||
"_vec_packsush" => Intrinsic {
|
||||
inputs: { static INPUTS: [&'static Type; 2] = [&::I16x8, &::I16x8]; &INPUTS },
|
||||
output: &::U8x16,
|
||||
definition: Named("llvm.ppc.altivec.vpkshus")
|
||||
},
|
||||
"_vec_packsusw" => Intrinsic {
|
||||
inputs: { static INPUTS: [&'static Type; 2] = [&::I32x4, &::I32x4]; &INPUTS },
|
||||
output: &::U16x8,
|
||||
definition: Named("llvm.ppc.altivec.vpkswus")
|
||||
},
|
||||
"_vec_packpx" => Intrinsic {
|
||||
inputs: { static INPUTS: [&'static Type; 2] = [&::I32x4, &::I32x4]; &INPUTS },
|
||||
output: &::I16x8,
|
||||
definition: Named("llvm.ppc.altivec.vpkpx")
|
||||
},
|
||||
"_vec_unpacklsb" => Intrinsic {
|
||||
inputs: { static INPUTS: [&'static Type; 1] = [&::I8x16]; &INPUTS },
|
||||
output: &::I16x8,
|
||||
definition: Named("llvm.ppc.altivec.vupklsb")
|
||||
},
|
||||
"_vec_unpacklsh" => Intrinsic {
|
||||
inputs: { static INPUTS: [&'static Type; 1] = [&::I16x8]; &INPUTS },
|
||||
output: &::I32x4,
|
||||
definition: Named("llvm.ppc.altivec.vupklsh")
|
||||
},
|
||||
"_vec_unpackhsb" => Intrinsic {
|
||||
inputs: { static INPUTS: [&'static Type; 1] = [&::I8x16]; &INPUTS },
|
||||
output: &::I16x8,
|
||||
definition: Named("llvm.ppc.altivec.vupkhsb")
|
||||
},
|
||||
"_vec_unpackhsh" => Intrinsic {
|
||||
inputs: { static INPUTS: [&'static Type; 1] = [&::I16x8]; &INPUTS },
|
||||
output: &::I32x4,
|
||||
definition: Named("llvm.ppc.altivec.vupkhsh")
|
||||
},
|
||||
_ => return None,
|
||||
})
|
||||
}
|
||||
|
|
|
@ -80,7 +80,7 @@ extern crate rustc_errors as errors;
|
|||
|
||||
pub use self::registry::Registry;
|
||||
|
||||
pub mod diagnostics;
|
||||
mod diagnostics;
|
||||
pub mod registry;
|
||||
pub mod load;
|
||||
pub mod build;
|
||||
|
|
|
@ -41,7 +41,7 @@ use std::cmp;
|
|||
use std::mem::replace;
|
||||
use std::rc::Rc;
|
||||
|
||||
pub mod diagnostics;
|
||||
mod diagnostics;
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// Visitor used to determine if pub(restricted) is used anywhere in the crate.
|
||||
|
|
|
@ -581,6 +581,55 @@ impl<T> ::std::ops::IndexMut<Namespace> for PerNS<T> {
|
|||
}
|
||||
}
|
||||
|
||||
struct UsePlacementFinder {
|
||||
target_module: NodeId,
|
||||
span: Option<Span>,
|
||||
found_use: bool,
|
||||
}
|
||||
|
||||
impl<'tcx> Visitor<'tcx> for UsePlacementFinder {
|
||||
fn visit_mod(
|
||||
&mut self,
|
||||
module: &'tcx ast::Mod,
|
||||
_: Span,
|
||||
_: &[ast::Attribute],
|
||||
node_id: NodeId,
|
||||
) {
|
||||
if self.span.is_some() {
|
||||
return;
|
||||
}
|
||||
if node_id != self.target_module {
|
||||
visit::walk_mod(self, module);
|
||||
return;
|
||||
}
|
||||
// find a use statement
|
||||
for item in &module.items {
|
||||
match item.node {
|
||||
ItemKind::Use(..) => {
|
||||
// don't suggest placing a use before the prelude
|
||||
// import or other generated ones
|
||||
if item.span == DUMMY_SP {
|
||||
let mut span = item.span;
|
||||
span.hi = span.lo;
|
||||
self.span = Some(span);
|
||||
self.found_use = true;
|
||||
return;
|
||||
}
|
||||
},
|
||||
// don't place use before extern crate
|
||||
ItemKind::ExternCrate(_) => {}
|
||||
// but place them before the first other item
|
||||
_ => if self.span.map_or(true, |span| item.span < span ) {
|
||||
let mut span = item.span;
|
||||
span.hi = span.lo;
|
||||
self.span = Some(span);
|
||||
},
|
||||
}
|
||||
}
|
||||
assert!(self.span.is_some(), "a file can't have no items and emit suggestions");
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> Visitor<'tcx> for Resolver<'a> {
|
||||
fn visit_item(&mut self, item: &'tcx Item) {
|
||||
self.resolve_item(item);
|
||||
|
@ -733,8 +782,6 @@ impl<'a, 'tcx> Visitor<'tcx> for Resolver<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
pub type ErrorMessage = Option<(Span, String)>;
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
enum TypeParameters<'a, 'b> {
|
||||
NoTypeParameters,
|
||||
|
@ -866,7 +913,7 @@ pub struct ModuleData<'a> {
|
|||
expansion: Mark,
|
||||
}
|
||||
|
||||
pub type Module<'a> = &'a ModuleData<'a>;
|
||||
type Module<'a> = &'a ModuleData<'a>;
|
||||
|
||||
impl<'a> ModuleData<'a> {
|
||||
fn new(parent: Option<Module<'a>>,
|
||||
|
@ -990,6 +1037,16 @@ enum NameBindingKind<'a> {
|
|||
|
||||
struct PrivacyError<'a>(Span, Name, &'a NameBinding<'a>);
|
||||
|
||||
struct UseError<'a> {
|
||||
err: DiagnosticBuilder<'a>,
|
||||
/// Attach `use` statements for these candidates
|
||||
candidates: Vec<ImportSuggestion>,
|
||||
/// The node id of the module to place the use statements in
|
||||
node_id: NodeId,
|
||||
/// Whether the diagnostic should state that it's "better"
|
||||
better: bool,
|
||||
}
|
||||
|
||||
struct AmbiguityError<'a> {
|
||||
span: Span,
|
||||
name: Name,
|
||||
|
@ -1190,15 +1247,20 @@ pub struct Resolver<'a> {
|
|||
extern_module_map: FxHashMap<(DefId, bool /* MacrosOnly? */), Module<'a>>,
|
||||
|
||||
pub make_glob_map: bool,
|
||||
// Maps imports to the names of items actually imported (this actually maps
|
||||
// all imports, but only glob imports are actually interesting).
|
||||
/// Maps imports to the names of items actually imported (this actually maps
|
||||
/// all imports, but only glob imports are actually interesting).
|
||||
pub glob_map: GlobMap,
|
||||
|
||||
used_imports: FxHashSet<(NodeId, Namespace)>,
|
||||
pub maybe_unused_trait_imports: NodeSet,
|
||||
|
||||
/// privacy errors are delayed until the end in order to deduplicate them
|
||||
privacy_errors: Vec<PrivacyError<'a>>,
|
||||
/// ambiguity errors are delayed for deduplication
|
||||
ambiguity_errors: Vec<AmbiguityError<'a>>,
|
||||
/// `use` injections are delayed for better placement and deduplication
|
||||
use_injections: Vec<UseError<'a>>,
|
||||
|
||||
gated_errors: FxHashSet<Span>,
|
||||
disallowed_shadowing: Vec<&'a LegacyBinding<'a>>,
|
||||
|
||||
|
@ -1401,6 +1463,7 @@ impl<'a> Resolver<'a> {
|
|||
|
||||
privacy_errors: Vec::new(),
|
||||
ambiguity_errors: Vec::new(),
|
||||
use_injections: Vec::new(),
|
||||
gated_errors: FxHashSet(),
|
||||
disallowed_shadowing: Vec::new(),
|
||||
|
||||
|
@ -1465,10 +1528,11 @@ impl<'a> Resolver<'a> {
|
|||
ImportResolver { resolver: self }.finalize_imports();
|
||||
self.current_module = self.graph_root;
|
||||
self.finalize_current_module_macro_resolutions();
|
||||
|
||||
visit::walk_crate(self, krate);
|
||||
|
||||
check_unused::check_crate(self, krate);
|
||||
self.report_errors();
|
||||
self.report_errors(krate);
|
||||
self.crate_loader.postprocess(krate);
|
||||
}
|
||||
|
||||
|
@ -2413,25 +2477,20 @@ impl<'a> Resolver<'a> {
|
|||
__diagnostic_used!(E0411);
|
||||
err.code("E0411".into());
|
||||
err.span_label(span, "`Self` is only available in traits and impls");
|
||||
return err;
|
||||
return (err, Vec::new());
|
||||
}
|
||||
if is_self_value(path, ns) {
|
||||
__diagnostic_used!(E0424);
|
||||
err.code("E0424".into());
|
||||
err.span_label(span, format!("`self` value is only available in \
|
||||
methods with `self` parameter"));
|
||||
return err;
|
||||
return (err, Vec::new());
|
||||
}
|
||||
|
||||
// Try to lookup the name in more relaxed fashion for better error reporting.
|
||||
let ident = *path.last().unwrap();
|
||||
let candidates = this.lookup_import_candidates(ident.node.name, ns, is_expected);
|
||||
if !candidates.is_empty() {
|
||||
let mut module_span = this.current_module.span;
|
||||
module_span.hi = module_span.lo;
|
||||
// Report import candidates as help and proceed searching for labels.
|
||||
show_candidates(&mut err, module_span, &candidates, def.is_some());
|
||||
} else if is_expected(Def::Enum(DefId::local(CRATE_DEF_INDEX))) {
|
||||
if candidates.is_empty() && is_expected(Def::Enum(DefId::local(CRATE_DEF_INDEX))) {
|
||||
let enum_candidates =
|
||||
this.lookup_import_candidates(ident.node.name, ns, is_enum_variant);
|
||||
let mut enum_candidates = enum_candidates.iter()
|
||||
|
@ -2471,7 +2530,7 @@ impl<'a> Resolver<'a> {
|
|||
format!("Self::{}", path_str));
|
||||
}
|
||||
}
|
||||
return err;
|
||||
return (err, candidates);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2488,22 +2547,22 @@ impl<'a> Resolver<'a> {
|
|||
match (def, source) {
|
||||
(Def::Macro(..), _) => {
|
||||
err.span_label(span, format!("did you mean `{}!(...)`?", path_str));
|
||||
return err;
|
||||
return (err, candidates);
|
||||
}
|
||||
(Def::TyAlias(..), PathSource::Trait) => {
|
||||
err.span_label(span, "type aliases cannot be used for traits");
|
||||
return err;
|
||||
return (err, candidates);
|
||||
}
|
||||
(Def::Mod(..), PathSource::Expr(Some(parent))) => match parent.node {
|
||||
ExprKind::Field(_, ident) => {
|
||||
err.span_label(parent.span, format!("did you mean `{}::{}`?",
|
||||
path_str, ident.node));
|
||||
return err;
|
||||
return (err, candidates);
|
||||
}
|
||||
ExprKind::MethodCall(ref segment, ..) => {
|
||||
err.span_label(parent.span, format!("did you mean `{}::{}(...)`?",
|
||||
path_str, segment.identifier));
|
||||
return err;
|
||||
return (err, candidates);
|
||||
}
|
||||
_ => {}
|
||||
},
|
||||
|
@ -2519,7 +2578,7 @@ impl<'a> Resolver<'a> {
|
|||
}
|
||||
err.span_label(span, format!("did you mean `{} {{ /* fields */ }}`?",
|
||||
path_str));
|
||||
return err;
|
||||
return (err, candidates);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
@ -2530,10 +2589,14 @@ impl<'a> Resolver<'a> {
|
|||
err.span_label(base_span, fallback_label);
|
||||
this.type_ascription_suggestion(&mut err, base_span);
|
||||
}
|
||||
err
|
||||
(err, candidates)
|
||||
};
|
||||
let report_errors = |this: &mut Self, def: Option<Def>| {
|
||||
report_errors(this, def).emit();
|
||||
let (err, candidates) = report_errors(this, def);
|
||||
let def_id = this.current_module.normal_ancestor_id;
|
||||
let node_id = this.definitions.as_local_node_id(def_id).unwrap();
|
||||
let better = def.is_some();
|
||||
this.use_injections.push(UseError { err, candidates, node_id, better });
|
||||
err_path_resolution()
|
||||
};
|
||||
|
||||
|
@ -2966,31 +3029,6 @@ impl<'a> Resolver<'a> {
|
|||
return def;
|
||||
}
|
||||
|
||||
// Calls `f` with a `Resolver` whose current lexical scope is `module`'s lexical scope,
|
||||
// i.e. the module's items and the prelude (unless the module is `#[no_implicit_prelude]`).
|
||||
// FIXME #34673: This needs testing.
|
||||
pub fn with_module_lexical_scope<T, F>(&mut self, module: Module<'a>, f: F) -> T
|
||||
where F: FnOnce(&mut Resolver<'a>) -> T,
|
||||
{
|
||||
self.with_empty_ribs(|this| {
|
||||
this.ribs[ValueNS].push(Rib::new(ModuleRibKind(module)));
|
||||
this.ribs[TypeNS].push(Rib::new(ModuleRibKind(module)));
|
||||
f(this)
|
||||
})
|
||||
}
|
||||
|
||||
fn with_empty_ribs<T, F>(&mut self, f: F) -> T
|
||||
where F: FnOnce(&mut Resolver<'a>) -> T,
|
||||
{
|
||||
let ribs = replace(&mut self.ribs, PerNS::<Vec<Rib>>::default());
|
||||
let label_ribs = replace(&mut self.label_ribs, Vec::new());
|
||||
|
||||
let result = f(self);
|
||||
self.ribs = ribs;
|
||||
self.label_ribs = label_ribs;
|
||||
result
|
||||
}
|
||||
|
||||
fn lookup_assoc_candidate<FilterFn>(&mut self,
|
||||
ident: Ident,
|
||||
ns: Namespace,
|
||||
|
@ -3458,8 +3496,9 @@ impl<'a> Resolver<'a> {
|
|||
vis.is_accessible_from(module.normal_ancestor_id, self)
|
||||
}
|
||||
|
||||
fn report_errors(&mut self) {
|
||||
fn report_errors(&mut self, krate: &Crate) {
|
||||
self.report_shadowing_errors();
|
||||
self.report_with_use_injections(krate);
|
||||
let mut reported_spans = FxHashSet();
|
||||
|
||||
for &AmbiguityError { span, name, b1, b2, lexical, legacy } in &self.ambiguity_errors {
|
||||
|
@ -3507,6 +3546,22 @@ impl<'a> Resolver<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
fn report_with_use_injections(&mut self, krate: &Crate) {
|
||||
for UseError { mut err, candidates, node_id, better } in self.use_injections.drain(..) {
|
||||
let mut finder = UsePlacementFinder {
|
||||
target_module: node_id,
|
||||
span: None,
|
||||
found_use: false,
|
||||
};
|
||||
visit::walk_crate(&mut finder, krate);
|
||||
if !candidates.is_empty() {
|
||||
let span = finder.span.expect("did not find module");
|
||||
show_candidates(&mut err, span, &candidates, better, finder.found_use);
|
||||
}
|
||||
err.emit();
|
||||
}
|
||||
}
|
||||
|
||||
fn report_shadowing_errors(&mut self) {
|
||||
for (ident, scope) in replace(&mut self.lexical_macro_resolutions, Vec::new()) {
|
||||
self.resolve_legacy_scope(scope, ident, true);
|
||||
|
@ -3697,7 +3752,8 @@ fn import_candidate_to_paths(suggestion: &ImportSuggestion) -> (Span, String, St
|
|||
fn show_candidates(err: &mut DiagnosticBuilder,
|
||||
span: Span,
|
||||
candidates: &[ImportSuggestion],
|
||||
better: bool) {
|
||||
better: bool,
|
||||
found_use: bool) {
|
||||
|
||||
// we want consistent results across executions, but candidates are produced
|
||||
// by iterating through a hash map, so make sure they are ordered:
|
||||
|
@ -3713,7 +3769,14 @@ fn show_candidates(err: &mut DiagnosticBuilder,
|
|||
let msg = format!("possible {}candidate{} into scope", better, msg_diff);
|
||||
|
||||
for candidate in &mut path_strings {
|
||||
*candidate = format!("use {};\n", candidate);
|
||||
// produce an additional newline to separate the new use statement
|
||||
// from the directly following item.
|
||||
let additional_newline = if found_use {
|
||||
""
|
||||
} else {
|
||||
"\n"
|
||||
};
|
||||
*candidate = format!("use {};\n{}", candidate, additional_newline);
|
||||
}
|
||||
|
||||
err.span_suggestions(span, &msg, path_strings);
|
||||
|
|
|
@ -51,7 +51,7 @@ use std::env;
|
|||
use std::fs::File;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use syntax::ast::{self, NodeId, PatKind, Attribute, CRATE_NODE_ID};
|
||||
use syntax::ast::{self, NodeId, PatKind, Attribute};
|
||||
use syntax::parse::lexer::comments::strip_doc_comment_decoration;
|
||||
use syntax::parse::token;
|
||||
use syntax::print::pprust;
|
||||
|
@ -80,8 +80,6 @@ pub struct SaveContext<'l, 'tcx: 'l> {
|
|||
|
||||
#[derive(Debug)]
|
||||
pub enum Data {
|
||||
/// Data about a macro use.
|
||||
MacroUseData(MacroRef),
|
||||
RefData(Ref),
|
||||
DefData(Def),
|
||||
RelationData(Relation),
|
||||
|
@ -759,11 +757,6 @@ impl<'l, 'tcx: 'l> SaveContext<'l, 'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn enclosing_scope(&self, id: NodeId) -> NodeId {
|
||||
self.tcx.hir.get_enclosing_scope(id).unwrap_or(CRATE_NODE_ID)
|
||||
}
|
||||
|
||||
fn docs_for_attrs(&self, attrs: &[Attribute]) -> String {
|
||||
let mut result = String::new();
|
||||
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
|
||||
//! A helper class for dealing with static archives
|
||||
|
||||
use std::ffi::{CString, CStr, OsString};
|
||||
use std::ffi::{CString, CStr};
|
||||
use std::io;
|
||||
use std::mem;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
@ -28,8 +28,6 @@ pub struct ArchiveConfig<'a> {
|
|||
pub dst: PathBuf,
|
||||
pub src: Option<PathBuf>,
|
||||
pub lib_search_paths: Vec<PathBuf>,
|
||||
pub ar_prog: String,
|
||||
pub command_path: OsString,
|
||||
}
|
||||
|
||||
/// Helper for adding many files to an archive with a single invocation of
|
||||
|
|
|
@ -138,12 +138,6 @@ pub fn msvc_link_exe_cmd(_sess: &Session) -> (Command, Vec<(OsString, OsString)>
|
|||
(Command::new("link.exe"), vec![])
|
||||
}
|
||||
|
||||
pub fn get_ar_prog(sess: &Session) -> String {
|
||||
sess.opts.cg.ar.clone().unwrap_or_else(|| {
|
||||
sess.target.target.options.ar.clone()
|
||||
})
|
||||
}
|
||||
|
||||
fn command_path(sess: &Session) -> OsString {
|
||||
// The compiler's sysroot often has some bundled tools, so add it to the
|
||||
// PATH for the child.
|
||||
|
@ -383,8 +377,6 @@ fn archive_config<'a>(sess: &'a Session,
|
|||
dst: output.to_path_buf(),
|
||||
src: input.map(|p| p.to_path_buf()),
|
||||
lib_search_paths: archive_search_paths(sess),
|
||||
ar_prog: get_ar_prog(sess),
|
||||
command_path: command_path(sess),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -785,8 +785,6 @@ pub fn start_async_translation(sess: &Session,
|
|||
crate_name,
|
||||
link,
|
||||
metadata,
|
||||
exported_symbols,
|
||||
no_builtins,
|
||||
windows_subsystem,
|
||||
linker_info,
|
||||
no_integrated_as,
|
||||
|
@ -1801,8 +1799,6 @@ pub struct OngoingCrateTranslation {
|
|||
crate_name: Symbol,
|
||||
link: LinkMeta,
|
||||
metadata: EncodedMetadata,
|
||||
exported_symbols: Arc<ExportedSymbols>,
|
||||
no_builtins: bool,
|
||||
windows_subsystem: Option<String>,
|
||||
linker_info: LinkerInfo,
|
||||
no_integrated_as: bool,
|
||||
|
@ -1852,13 +1848,10 @@ impl OngoingCrateTranslation {
|
|||
crate_name: self.crate_name,
|
||||
link: self.link,
|
||||
metadata: self.metadata,
|
||||
exported_symbols: self.exported_symbols,
|
||||
no_builtins: self.no_builtins,
|
||||
windows_subsystem: self.windows_subsystem,
|
||||
linker_info: self.linker_info,
|
||||
|
||||
modules: compiled_modules.modules,
|
||||
metadata_module: compiled_modules.metadata_module,
|
||||
allocator_module: compiled_modules.allocator_module,
|
||||
};
|
||||
|
||||
|
|
|
@ -27,10 +27,9 @@ use type_::Type;
|
|||
use rustc_data_structures::base_n;
|
||||
use rustc::session::config::{self, NoDebugInfo, OutputFilenames};
|
||||
use rustc::session::Session;
|
||||
use rustc::ty::subst::Substs;
|
||||
use rustc::ty::{self, Ty, TyCtxt};
|
||||
use rustc::ty::layout::{LayoutCx, LayoutError, LayoutTyper, TyLayout};
|
||||
use rustc::util::nodemap::{DefIdMap, FxHashMap, FxHashSet};
|
||||
use rustc::util::nodemap::{FxHashMap, FxHashSet};
|
||||
|
||||
use std::ffi::{CStr, CString};
|
||||
use std::cell::{Cell, RefCell};
|
||||
|
@ -39,7 +38,6 @@ use std::iter;
|
|||
use std::str;
|
||||
use std::sync::Arc;
|
||||
use std::marker::PhantomData;
|
||||
use syntax::ast;
|
||||
use syntax::symbol::InternedString;
|
||||
use syntax_pos::DUMMY_SP;
|
||||
use abi::Abi;
|
||||
|
@ -124,12 +122,6 @@ pub struct LocalCrateContext<'a, 'tcx: 'a> {
|
|||
/// Cache of emitted const globals (value -> global)
|
||||
const_globals: RefCell<FxHashMap<ValueRef, ValueRef>>,
|
||||
|
||||
/// Cache of emitted const values
|
||||
const_values: RefCell<FxHashMap<(ast::NodeId, &'tcx Substs<'tcx>), ValueRef>>,
|
||||
|
||||
/// Cache of external const values
|
||||
extern_const_values: RefCell<DefIdMap<ValueRef>>,
|
||||
|
||||
/// Mapping from static definitions to their DefId's.
|
||||
statics: RefCell<FxHashMap<ValueRef, DefId>>,
|
||||
|
||||
|
@ -144,7 +136,6 @@ pub struct LocalCrateContext<'a, 'tcx: 'a> {
|
|||
used_statics: RefCell<Vec<ValueRef>>,
|
||||
|
||||
lltypes: RefCell<FxHashMap<Ty<'tcx>, Type>>,
|
||||
type_hashcodes: RefCell<FxHashMap<Ty<'tcx>, String>>,
|
||||
int_type: Type,
|
||||
opaque_vec_type: Type,
|
||||
str_slice_type: Type,
|
||||
|
@ -157,9 +148,6 @@ pub struct LocalCrateContext<'a, 'tcx: 'a> {
|
|||
|
||||
intrinsics: RefCell<FxHashMap<&'static str, ValueRef>>,
|
||||
|
||||
/// Depth of the current type-of computation - used to bail out
|
||||
type_of_depth: Cell<usize>,
|
||||
|
||||
/// A counter that is used for generating local symbol names
|
||||
local_gen_sym_counter: Cell<usize>,
|
||||
|
||||
|
@ -406,13 +394,10 @@ impl<'a, 'tcx> LocalCrateContext<'a, 'tcx> {
|
|||
const_cstr_cache: RefCell::new(FxHashMap()),
|
||||
const_unsized: RefCell::new(FxHashMap()),
|
||||
const_globals: RefCell::new(FxHashMap()),
|
||||
const_values: RefCell::new(FxHashMap()),
|
||||
extern_const_values: RefCell::new(DefIdMap()),
|
||||
statics: RefCell::new(FxHashMap()),
|
||||
statics_to_rauw: RefCell::new(Vec::new()),
|
||||
used_statics: RefCell::new(Vec::new()),
|
||||
lltypes: RefCell::new(FxHashMap()),
|
||||
type_hashcodes: RefCell::new(FxHashMap()),
|
||||
int_type: Type::from_ref(ptr::null_mut()),
|
||||
opaque_vec_type: Type::from_ref(ptr::null_mut()),
|
||||
str_slice_type: Type::from_ref(ptr::null_mut()),
|
||||
|
@ -421,7 +406,6 @@ impl<'a, 'tcx> LocalCrateContext<'a, 'tcx> {
|
|||
eh_unwind_resume: Cell::new(None),
|
||||
rust_try_fn: Cell::new(None),
|
||||
intrinsics: RefCell::new(FxHashMap()),
|
||||
type_of_depth: Cell::new(0),
|
||||
local_gen_sym_counter: Cell::new(0),
|
||||
placeholder: PhantomData,
|
||||
};
|
||||
|
@ -545,15 +529,6 @@ impl<'b, 'tcx> CrateContext<'b, 'tcx> {
|
|||
&self.local().const_globals
|
||||
}
|
||||
|
||||
pub fn const_values<'a>(&'a self) -> &'a RefCell<FxHashMap<(ast::NodeId, &'tcx Substs<'tcx>),
|
||||
ValueRef>> {
|
||||
&self.local().const_values
|
||||
}
|
||||
|
||||
pub fn extern_const_values<'a>(&'a self) -> &'a RefCell<DefIdMap<ValueRef>> {
|
||||
&self.local().extern_const_values
|
||||
}
|
||||
|
||||
pub fn statics<'a>(&'a self) -> &'a RefCell<FxHashMap<ValueRef, DefId>> {
|
||||
&self.local().statics
|
||||
}
|
||||
|
@ -570,10 +545,6 @@ impl<'b, 'tcx> CrateContext<'b, 'tcx> {
|
|||
&self.local().lltypes
|
||||
}
|
||||
|
||||
pub fn type_hashcodes<'a>(&'a self) -> &'a RefCell<FxHashMap<Ty<'tcx>, String>> {
|
||||
&self.local().type_hashcodes
|
||||
}
|
||||
|
||||
pub fn stats<'a>(&'a self) -> &'a Stats {
|
||||
&self.local().stats
|
||||
}
|
||||
|
@ -582,10 +553,6 @@ impl<'b, 'tcx> CrateContext<'b, 'tcx> {
|
|||
self.local().int_type
|
||||
}
|
||||
|
||||
pub fn opaque_vec_type(&self) -> Type {
|
||||
self.local().opaque_vec_type
|
||||
}
|
||||
|
||||
pub fn str_slice_type(&self) -> Type {
|
||||
self.local().str_slice_type
|
||||
}
|
||||
|
@ -602,27 +569,6 @@ impl<'b, 'tcx> CrateContext<'b, 'tcx> {
|
|||
&self.local().intrinsics
|
||||
}
|
||||
|
||||
pub fn obj_size_bound(&self) -> u64 {
|
||||
self.tcx().data_layout.obj_size_bound()
|
||||
}
|
||||
|
||||
pub fn report_overbig_object(&self, obj: Ty<'tcx>) -> ! {
|
||||
self.sess().fatal(
|
||||
&format!("the type `{:?}` is too big for the current architecture",
|
||||
obj))
|
||||
}
|
||||
|
||||
pub fn enter_type_of(&self, ty: Ty<'tcx>) -> TypeOfDepthLock<'b, 'tcx> {
|
||||
let current_depth = self.local().type_of_depth.get();
|
||||
debug!("enter_type_of({:?}) at depth {:?}", ty, current_depth);
|
||||
if current_depth > self.sess().recursion_limit.get() {
|
||||
self.sess().fatal(
|
||||
&format!("overflow representing the type `{}`", ty))
|
||||
}
|
||||
self.local().type_of_depth.set(current_depth + 1);
|
||||
TypeOfDepthLock(self.local())
|
||||
}
|
||||
|
||||
pub fn check_overflow(&self) -> bool {
|
||||
self.shared.check_overflow
|
||||
}
|
||||
|
@ -631,12 +577,6 @@ impl<'b, 'tcx> CrateContext<'b, 'tcx> {
|
|||
self.shared.use_dll_storage_attrs()
|
||||
}
|
||||
|
||||
/// Given the def-id of some item that has no type parameters, make
|
||||
/// a suitable "empty substs" for it.
|
||||
pub fn empty_substs_for_def_id(&self, item_def_id: DefId) -> &'tcx Substs<'tcx> {
|
||||
self.tcx().empty_substs_for_def_id(item_def_id)
|
||||
}
|
||||
|
||||
/// Generate a new symbol name with the given prefix. This symbol name must
|
||||
/// only be used for definitions with `internal` or `private` linkage.
|
||||
pub fn generate_local_symbol_name(&self, prefix: &str) -> String {
|
||||
|
@ -776,14 +716,6 @@ impl<'a, 'tcx> LayoutTyper<'tcx> for &'a CrateContext<'a, 'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
pub struct TypeOfDepthLock<'a, 'tcx: 'a>(&'a LocalCrateContext<'a, 'tcx>);
|
||||
|
||||
impl<'a, 'tcx> Drop for TypeOfDepthLock<'a, 'tcx> {
|
||||
fn drop(&mut self) {
|
||||
self.0.type_of_depth.set(self.0.type_of_depth.get() - 1);
|
||||
}
|
||||
}
|
||||
|
||||
/// Declare any llvm intrinsics that you might need
|
||||
fn declare_intrinsic(ccx: &CrateContext, key: &str) -> Option<ValueRef> {
|
||||
macro_rules! ifn {
|
||||
|
|
|
@ -36,7 +36,6 @@
|
|||
|
||||
use rustc::dep_graph::WorkProduct;
|
||||
use syntax_pos::symbol::Symbol;
|
||||
use std::sync::Arc;
|
||||
|
||||
extern crate flate2;
|
||||
extern crate libc;
|
||||
|
@ -46,7 +45,7 @@ extern crate rustc_allocator;
|
|||
extern crate rustc_back;
|
||||
extern crate rustc_data_structures;
|
||||
extern crate rustc_incremental;
|
||||
pub extern crate rustc_llvm as llvm;
|
||||
extern crate rustc_llvm as llvm;
|
||||
extern crate rustc_platform_intrinsics as intrinsics;
|
||||
extern crate rustc_const_math;
|
||||
#[macro_use]
|
||||
|
@ -78,7 +77,7 @@ pub mod back {
|
|||
pub(crate) mod symbol_export;
|
||||
pub(crate) mod symbol_names;
|
||||
pub mod write;
|
||||
pub mod rpath;
|
||||
mod rpath;
|
||||
}
|
||||
|
||||
mod diagnostics;
|
||||
|
@ -138,8 +137,8 @@ pub struct ModuleTranslation {
|
|||
/// unique amongst **all** crates. Therefore, it should contain
|
||||
/// something unique to this crate (e.g., a module path) as well
|
||||
/// as the crate name and disambiguator.
|
||||
pub name: String,
|
||||
pub symbol_name_hash: u64,
|
||||
name: String,
|
||||
symbol_name_hash: u64,
|
||||
pub source: ModuleSource,
|
||||
pub kind: ModuleKind,
|
||||
}
|
||||
|
@ -206,7 +205,7 @@ pub enum ModuleSource {
|
|||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub struct ModuleLlvm {
|
||||
pub llcx: llvm::ContextRef,
|
||||
llcx: llvm::ContextRef,
|
||||
pub llmod: llvm::ModuleRef,
|
||||
}
|
||||
|
||||
|
@ -216,14 +215,11 @@ unsafe impl Sync for ModuleTranslation { }
|
|||
pub struct CrateTranslation {
|
||||
pub crate_name: Symbol,
|
||||
pub modules: Vec<CompiledModule>,
|
||||
pub metadata_module: CompiledModule,
|
||||
pub allocator_module: Option<CompiledModule>,
|
||||
allocator_module: Option<CompiledModule>,
|
||||
pub link: rustc::middle::cstore::LinkMeta,
|
||||
pub metadata: rustc::middle::cstore::EncodedMetadata,
|
||||
pub exported_symbols: Arc<back::symbol_export::ExportedSymbols>,
|
||||
pub no_builtins: bool,
|
||||
pub windows_subsystem: Option<String>,
|
||||
pub linker_info: back::linker::LinkerInfo
|
||||
windows_subsystem: Option<String>,
|
||||
linker_info: back::linker::LinkerInfo
|
||||
}
|
||||
|
||||
__build_diagnostic_array! { librustc_trans, DIAGNOSTICS }
|
||||
|
|
|
@ -237,19 +237,6 @@ impl Type {
|
|||
ty!(llvm::LLVMPointerType(self.to_ref(), 0))
|
||||
}
|
||||
|
||||
pub fn is_aggregate(&self) -> bool {
|
||||
match self.kind() {
|
||||
TypeKind::Struct | TypeKind::Array => true,
|
||||
_ => false
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_packed(&self) -> bool {
|
||||
unsafe {
|
||||
llvm::LLVMIsPackedStruct(self.to_ref()) == True
|
||||
}
|
||||
}
|
||||
|
||||
pub fn element_type(&self) -> Type {
|
||||
unsafe {
|
||||
Type::from_ref(llvm::LLVMGetElementType(self.to_ref()))
|
||||
|
@ -263,12 +250,6 @@ impl Type {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn array_length(&self) -> usize {
|
||||
unsafe {
|
||||
llvm::LLVMGetArrayLength(self.to_ref()) as usize
|
||||
}
|
||||
}
|
||||
|
||||
pub fn field_types(&self) -> Vec<Type> {
|
||||
unsafe {
|
||||
let n_elts = llvm::LLVMCountStructElementTypes(self.to_ref()) as usize;
|
||||
|
@ -282,10 +263,6 @@ impl Type {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn return_type(&self) -> Type {
|
||||
ty!(llvm::LLVMGetReturnType(self.to_ref()))
|
||||
}
|
||||
|
||||
pub fn func_params(&self) -> Vec<Type> {
|
||||
unsafe {
|
||||
let n_args = llvm::LLVMCountParamTypes(self.to_ref()) as usize;
|
||||
|
@ -324,13 +301,4 @@ impl Type {
|
|||
I128 => Type::i128(cx),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_primitive(ccx: &CrateContext, p: layout::Primitive) -> Type {
|
||||
match p {
|
||||
layout::Int(i) => Type::from_integer(ccx, i),
|
||||
layout::F32 => Type::f32(ccx),
|
||||
layout::F64 => Type::f64(ccx),
|
||||
layout::Pointer => bug!("It is not possible to convert Pointer directly to Type.")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,8 +27,6 @@ use rustc::hir;
|
|||
pub use self::MethodError::*;
|
||||
pub use self::CandidateSource::*;
|
||||
|
||||
pub use self::suggest::AllTraitsVec;
|
||||
|
||||
mod confirm;
|
||||
pub mod probe;
|
||||
mod suggest;
|
||||
|
|
|
@ -1687,11 +1687,10 @@ impl<'a, 'gcx, 'tcx> AstConv<'gcx, 'tcx> for FnCtxt<'a, 'gcx, 'tcx> {
|
|||
poly_trait_ref: ty::PolyTraitRef<'tcx>)
|
||||
-> Ty<'tcx>
|
||||
{
|
||||
let item = self.tcx().associated_item(item_def_id);
|
||||
let (trait_ref, _) =
|
||||
self.replace_late_bound_regions_with_fresh_var(
|
||||
span,
|
||||
infer::LateBoundRegionConversionTime::AssocTypeProjection(item.name),
|
||||
infer::LateBoundRegionConversionTime::AssocTypeProjection(item_def_id),
|
||||
&poly_trait_ref);
|
||||
|
||||
self.tcx().mk_projection(item_def_id, trait_ref.substs)
|
||||
|
|
|
@ -1805,8 +1805,7 @@ impl<'a, 'gcx, 'tcx> RegionCtxt<'a, 'gcx, 'tcx> {
|
|||
let (outlives, _) =
|
||||
self.replace_late_bound_regions_with_fresh_var(
|
||||
span,
|
||||
infer::AssocTypeProjection(
|
||||
self.tcx.associated_item(projection_ty.item_def_id).name),
|
||||
infer::AssocTypeProjection(projection_ty.item_def_id),
|
||||
&outlives);
|
||||
|
||||
debug!("projection_bounds: outlives={:?} (3)",
|
||||
|
|
|
@ -233,7 +233,10 @@ impl<'a, 'gcx> CheckTypeWellFormedVisitor<'a, 'gcx> {
|
|||
fcx.tcx.require_lang_item(lang_items::SizedTraitLangItem),
|
||||
traits::ObligationCause::new(field.span,
|
||||
fcx.body_id,
|
||||
traits::FieldSized));
|
||||
traits::FieldSized(match item.node.adt_kind() {
|
||||
Some(i) => i,
|
||||
None => bug!(),
|
||||
})));
|
||||
}
|
||||
|
||||
// All field types must be well-formed.
|
||||
|
|
|
@ -95,12 +95,11 @@ extern crate rustc_const_math;
|
|||
extern crate rustc_data_structures;
|
||||
extern crate rustc_errors as errors;
|
||||
|
||||
pub use rustc::dep_graph;
|
||||
pub use rustc::hir;
|
||||
pub use rustc::lint;
|
||||
pub use rustc::middle;
|
||||
pub use rustc::session;
|
||||
pub use rustc::util;
|
||||
use rustc::hir;
|
||||
use rustc::lint;
|
||||
use rustc::middle;
|
||||
use rustc::session;
|
||||
use rustc::util;
|
||||
|
||||
use hir::map as hir_map;
|
||||
use rustc::infer::InferOk;
|
||||
|
@ -118,7 +117,7 @@ use syntax_pos::Span;
|
|||
use std::iter;
|
||||
// NB: This module needs to be declared first so diagnostics are
|
||||
// registered before they are used.
|
||||
pub mod diagnostics;
|
||||
mod diagnostics;
|
||||
|
||||
mod check;
|
||||
mod check_unused;
|
||||
|
@ -130,8 +129,8 @@ mod coherence;
|
|||
mod variance;
|
||||
|
||||
pub struct TypeAndSubsts<'tcx> {
|
||||
pub substs: &'tcx Substs<'tcx>,
|
||||
pub ty: Ty<'tcx>,
|
||||
substs: &'tcx Substs<'tcx>,
|
||||
ty: Ty<'tcx>,
|
||||
}
|
||||
|
||||
fn require_c_abi_if_variadic(tcx: TyCtxt,
|
||||
|
|
|
@ -103,26 +103,31 @@ mod prim_bool { }
|
|||
/// [`String`]: string/struct.String.html
|
||||
///
|
||||
/// As always, remember that a human intuition for 'character' may not map to
|
||||
/// Unicode's definitions. For example, emoji symbols such as '❤️' can be more
|
||||
/// than one Unicode code point; this ❤️ in particular is two:
|
||||
/// Unicode's definitions. For example, despite looking similar, the 'é'
|
||||
/// character is one Unicode code point while 'é' is two Unicode code points:
|
||||
///
|
||||
/// ```
|
||||
/// let s = String::from("❤️");
|
||||
/// let mut chars = "é".chars();
|
||||
/// // U+00e9: 'latin small letter e with acute'
|
||||
/// assert_eq!(Some('\u{00e9}'), chars.next());
|
||||
/// assert_eq!(None, chars.next());
|
||||
///
|
||||
/// // we get two chars out of a single ❤️
|
||||
/// let mut iter = s.chars();
|
||||
/// assert_eq!(Some('\u{2764}'), iter.next());
|
||||
/// assert_eq!(Some('\u{fe0f}'), iter.next());
|
||||
/// assert_eq!(None, iter.next());
|
||||
/// let mut chars = "é".chars();
|
||||
/// // U+0065: 'latin small letter e'
|
||||
/// assert_eq!(Some('\u{0065}'), chars.next());
|
||||
/// // U+0301: 'combining acute accent'
|
||||
/// assert_eq!(Some('\u{0301}'), chars.next());
|
||||
/// assert_eq!(None, chars.next());
|
||||
/// ```
|
||||
///
|
||||
/// This means it won't fit into a `char`. Trying to create a literal with
|
||||
/// `let heart = '❤️';` gives an error:
|
||||
/// This means that the contents of the first string above _will_ fit into a
|
||||
/// `char` while the contents of the second string _will not_. Trying to create
|
||||
/// a `char` literal with the contents of the second string gives an error:
|
||||
///
|
||||
/// ```text
|
||||
/// error: character literal may only contain one codepoint: '❤
|
||||
/// let heart = '❤️';
|
||||
/// ^~
|
||||
/// error: character literal may only contain one codepoint: 'é'
|
||||
/// let c = 'é';
|
||||
/// ^^^^
|
||||
/// ```
|
||||
///
|
||||
/// Another implication of the 4-byte fixed size of a `char` is that
|
||||
|
|
|
@ -336,7 +336,7 @@ impl Command {
|
|||
panic!("return from exec without err");
|
||||
}
|
||||
} else {
|
||||
io::Error::new(io::ErrorKind::NotFound, "")
|
||||
io::Error::from_raw_os_error(syscall::ENOENT)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -13,11 +13,10 @@
|
|||
#![stable(feature = "rust1", since = "1.0.0")]
|
||||
|
||||
use fs;
|
||||
use net;
|
||||
use os::raw;
|
||||
use sys;
|
||||
use io;
|
||||
use sys_common::{self, AsInner, FromInner, IntoInner};
|
||||
use sys_common::{AsInner, FromInner, IntoInner};
|
||||
use libc;
|
||||
|
||||
/// Raw file descriptors.
|
||||
|
@ -93,19 +92,6 @@ impl IntoRawFd for fs::File {
|
|||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
impl AsRawFd for net::TcpStream {
|
||||
fn as_raw_fd(&self) -> RawFd { *self.as_inner().socket().as_inner() }
|
||||
}
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
impl AsRawFd for net::TcpListener {
|
||||
fn as_raw_fd(&self) -> RawFd { *self.as_inner().socket().as_inner() }
|
||||
}
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
impl AsRawFd for net::UdpSocket {
|
||||
fn as_raw_fd(&self) -> RawFd { *self.as_inner().socket().as_inner() }
|
||||
}
|
||||
|
||||
#[stable(feature = "asraw_stdio", since = "1.21.0")]
|
||||
impl AsRawFd for io::Stdin {
|
||||
fn as_raw_fd(&self) -> RawFd { libc::STDIN_FILENO }
|
||||
|
@ -120,44 +106,3 @@ impl AsRawFd for io::Stdout {
|
|||
impl AsRawFd for io::Stderr {
|
||||
fn as_raw_fd(&self) -> RawFd { libc::STDERR_FILENO }
|
||||
}
|
||||
|
||||
#[stable(feature = "from_raw_os", since = "1.1.0")]
|
||||
impl FromRawFd for net::TcpStream {
|
||||
unsafe fn from_raw_fd(fd: RawFd) -> net::TcpStream {
|
||||
let socket = sys::net::Socket::from_inner(fd);
|
||||
net::TcpStream::from_inner(sys_common::net::TcpStream::from_inner(socket))
|
||||
}
|
||||
}
|
||||
#[stable(feature = "from_raw_os", since = "1.1.0")]
|
||||
impl FromRawFd for net::TcpListener {
|
||||
unsafe fn from_raw_fd(fd: RawFd) -> net::TcpListener {
|
||||
let socket = sys::net::Socket::from_inner(fd);
|
||||
net::TcpListener::from_inner(sys_common::net::TcpListener::from_inner(socket))
|
||||
}
|
||||
}
|
||||
#[stable(feature = "from_raw_os", since = "1.1.0")]
|
||||
impl FromRawFd for net::UdpSocket {
|
||||
unsafe fn from_raw_fd(fd: RawFd) -> net::UdpSocket {
|
||||
let socket = sys::net::Socket::from_inner(fd);
|
||||
net::UdpSocket::from_inner(sys_common::net::UdpSocket::from_inner(socket))
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "into_raw_os", since = "1.4.0")]
|
||||
impl IntoRawFd for net::TcpStream {
|
||||
fn into_raw_fd(self) -> RawFd {
|
||||
self.into_inner().into_socket().into_inner()
|
||||
}
|
||||
}
|
||||
#[stable(feature = "into_raw_os", since = "1.4.0")]
|
||||
impl IntoRawFd for net::TcpListener {
|
||||
fn into_raw_fd(self) -> RawFd {
|
||||
self.into_inner().into_socket().into_inner()
|
||||
}
|
||||
}
|
||||
#[stable(feature = "into_raw_os", since = "1.4.0")]
|
||||
impl IntoRawFd for net::UdpSocket {
|
||||
fn into_raw_fd(self) -> RawFd {
|
||||
self.into_inner().into_socket().into_inner()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -30,14 +30,14 @@ use ffi::OsStr;
|
|||
use fmt;
|
||||
use io::{self, Initializer};
|
||||
use mem;
|
||||
use net::Shutdown;
|
||||
use net::{self, Shutdown};
|
||||
use os::unix::ffi::OsStrExt;
|
||||
use os::unix::io::{RawFd, AsRawFd, FromRawFd, IntoRawFd};
|
||||
use path::Path;
|
||||
use time::Duration;
|
||||
use sys::cvt;
|
||||
use sys::{self, cvt};
|
||||
use sys::net::Socket;
|
||||
use sys_common::{AsInner, FromInner, IntoInner};
|
||||
use sys_common::{self, AsInner, FromInner, IntoInner};
|
||||
|
||||
#[cfg(any(target_os = "linux", target_os = "android",
|
||||
target_os = "dragonfly", target_os = "freebsd",
|
||||
|
@ -588,6 +588,64 @@ impl IntoRawFd for UnixStream {
|
|||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
impl AsRawFd for net::TcpStream {
|
||||
fn as_raw_fd(&self) -> RawFd { *self.as_inner().socket().as_inner() }
|
||||
}
|
||||
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
impl AsRawFd for net::TcpListener {
|
||||
fn as_raw_fd(&self) -> RawFd { *self.as_inner().socket().as_inner() }
|
||||
}
|
||||
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
impl AsRawFd for net::UdpSocket {
|
||||
fn as_raw_fd(&self) -> RawFd { *self.as_inner().socket().as_inner() }
|
||||
}
|
||||
|
||||
#[stable(feature = "from_raw_os", since = "1.1.0")]
|
||||
impl FromRawFd for net::TcpStream {
|
||||
unsafe fn from_raw_fd(fd: RawFd) -> net::TcpStream {
|
||||
let socket = sys::net::Socket::from_inner(fd);
|
||||
net::TcpStream::from_inner(sys_common::net::TcpStream::from_inner(socket))
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "from_raw_os", since = "1.1.0")]
|
||||
impl FromRawFd for net::TcpListener {
|
||||
unsafe fn from_raw_fd(fd: RawFd) -> net::TcpListener {
|
||||
let socket = sys::net::Socket::from_inner(fd);
|
||||
net::TcpListener::from_inner(sys_common::net::TcpListener::from_inner(socket))
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "from_raw_os", since = "1.1.0")]
|
||||
impl FromRawFd for net::UdpSocket {
|
||||
unsafe fn from_raw_fd(fd: RawFd) -> net::UdpSocket {
|
||||
let socket = sys::net::Socket::from_inner(fd);
|
||||
net::UdpSocket::from_inner(sys_common::net::UdpSocket::from_inner(socket))
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "into_raw_os", since = "1.4.0")]
|
||||
impl IntoRawFd for net::TcpStream {
|
||||
fn into_raw_fd(self) -> RawFd {
|
||||
self.into_inner().into_socket().into_inner()
|
||||
}
|
||||
}
|
||||
#[stable(feature = "into_raw_os", since = "1.4.0")]
|
||||
impl IntoRawFd for net::TcpListener {
|
||||
fn into_raw_fd(self) -> RawFd {
|
||||
self.into_inner().into_socket().into_inner()
|
||||
}
|
||||
}
|
||||
#[stable(feature = "into_raw_os", since = "1.4.0")]
|
||||
impl IntoRawFd for net::UdpSocket {
|
||||
fn into_raw_fd(self) -> RawFd {
|
||||
self.into_inner().into_socket().into_inner()
|
||||
}
|
||||
}
|
||||
|
||||
/// A structure representing a Unix domain socket server.
|
||||
///
|
||||
/// # Examples
|
||||
|
|
|
@ -1064,26 +1064,21 @@ impl MetaItem {
|
|||
},
|
||||
_ => return None,
|
||||
};
|
||||
let list_closing_paren_pos = tokens.peek().map(|tt| tt.span().hi);
|
||||
let node = match MetaItemKind::from_tokens(tokens) {
|
||||
Some(node) => node,
|
||||
_ => return None,
|
||||
};
|
||||
if let Some(last_span) = node.last_span() {
|
||||
span.hi = last_span.hi;
|
||||
}
|
||||
span.hi = match node {
|
||||
MetaItemKind::NameValue(ref lit) => lit.span.hi,
|
||||
MetaItemKind::List(..) => list_closing_paren_pos.unwrap_or(span.hi),
|
||||
_ => span.hi,
|
||||
};
|
||||
Some(MetaItem { name: name, span: span, node: node })
|
||||
}
|
||||
}
|
||||
|
||||
impl MetaItemKind {
|
||||
fn last_span(&self) -> Option<Span> {
|
||||
match *self {
|
||||
MetaItemKind::Word => None,
|
||||
MetaItemKind::List(ref list) => list.last().map(NestedMetaItem::span),
|
||||
MetaItemKind::NameValue(ref lit) => Some(lit.span),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn tokens(&self, span: Span) -> TokenStream {
|
||||
match *self {
|
||||
MetaItemKind::Word => TokenStream::empty(),
|
||||
|
@ -1130,7 +1125,7 @@ impl MetaItemKind {
|
|||
let mut result = Vec::new();
|
||||
while let Some(..) = tokens.peek() {
|
||||
match NestedMetaItemKind::from_tokens(&mut tokens) {
|
||||
Some(item) => result.push(Spanned { span: item.span(), node: item }),
|
||||
Some(item) => result.push(respan(item.span(), item)),
|
||||
None => return None,
|
||||
}
|
||||
match tokens.next() {
|
||||
|
@ -1163,7 +1158,7 @@ impl NestedMetaItemKind {
|
|||
if let Some(TokenTree::Token(span, token)) = tokens.peek().cloned() {
|
||||
if let Some(node) = LitKind::from_token(token) {
|
||||
tokens.next();
|
||||
return Some(NestedMetaItemKind::Literal(Spanned { node: node, span: span }));
|
||||
return Some(NestedMetaItemKind::Literal(respan(span, node)));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1256,7 +1251,7 @@ pub trait HasAttrs: Sized {
|
|||
impl<T: HasAttrs> HasAttrs for Spanned<T> {
|
||||
fn attrs(&self) -> &[ast::Attribute] { self.node.attrs() }
|
||||
fn map_attrs<F: FnOnce(Vec<ast::Attribute>) -> Vec<ast::Attribute>>(self, f: F) -> Self {
|
||||
Spanned { node: self.node.map_attrs(f), span: self.span }
|
||||
respan(self.span, self.node.map_attrs(f))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -244,10 +244,9 @@ impl<'a> Parser<'a> {
|
|||
pub fn parse_meta_item_kind(&mut self) -> PResult<'a, ast::MetaItemKind> {
|
||||
Ok(if self.eat(&token::Eq) {
|
||||
ast::MetaItemKind::NameValue(self.parse_unsuffixed_lit()?)
|
||||
} else if self.token == token::OpenDelim(token::Paren) {
|
||||
} else if self.eat(&token::OpenDelim(token::Paren)) {
|
||||
ast::MetaItemKind::List(self.parse_meta_seq()?)
|
||||
} else {
|
||||
self.eat(&token::OpenDelim(token::Paren));
|
||||
ast::MetaItemKind::Word
|
||||
})
|
||||
}
|
||||
|
@ -277,9 +276,8 @@ impl<'a> Parser<'a> {
|
|||
|
||||
/// matches meta_seq = ( COMMASEP(meta_item_inner) )
|
||||
fn parse_meta_seq(&mut self) -> PResult<'a, Vec<ast::NestedMetaItem>> {
|
||||
self.parse_unspanned_seq(&token::OpenDelim(token::Paren),
|
||||
&token::CloseDelim(token::Paren),
|
||||
SeqSep::trailing_allowed(token::Comma),
|
||||
|p: &mut Parser<'a>| p.parse_meta_item_inner())
|
||||
self.parse_seq_to_end(&token::CloseDelim(token::Paren),
|
||||
SeqSep::trailing_allowed(token::Comma),
|
||||
|p: &mut Parser<'a>| p.parse_meta_item_inner())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -323,8 +323,8 @@ impl NameAndSpan {
|
|||
pub fn name(&self) -> Symbol {
|
||||
match self.format {
|
||||
ExpnFormat::MacroAttribute(s) |
|
||||
ExpnFormat::MacroBang(s) |
|
||||
ExpnFormat::CompilerDesugaring(s) => s,
|
||||
ExpnFormat::MacroBang(s) => s,
|
||||
ExpnFormat::CompilerDesugaring(ref kind) => kind.as_symbol(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -337,7 +337,27 @@ pub enum ExpnFormat {
|
|||
/// e.g. `format!()`
|
||||
MacroBang(Symbol),
|
||||
/// Desugaring done by the compiler during HIR lowering.
|
||||
CompilerDesugaring(Symbol)
|
||||
CompilerDesugaring(CompilerDesugaringKind)
|
||||
}
|
||||
|
||||
/// The kind of compiler desugaring.
|
||||
#[derive(Clone, Hash, Debug, PartialEq, Eq)]
|
||||
pub enum CompilerDesugaringKind {
|
||||
BackArrow,
|
||||
DotFill,
|
||||
QuestionMark,
|
||||
}
|
||||
|
||||
impl CompilerDesugaringKind {
|
||||
pub fn as_symbol(&self) -> Symbol {
|
||||
use CompilerDesugaringKind::*;
|
||||
let s = match *self {
|
||||
BackArrow => "<-",
|
||||
DotFill => "...",
|
||||
QuestionMark => "?",
|
||||
};
|
||||
Symbol::intern(s)
|
||||
}
|
||||
}
|
||||
|
||||
impl Encodable for SyntaxContext {
|
||||
|
|
|
@ -47,7 +47,7 @@ extern crate serialize;
|
|||
extern crate serialize as rustc_serialize; // used by deriving
|
||||
|
||||
pub mod hygiene;
|
||||
pub use hygiene::{SyntaxContext, ExpnInfo, ExpnFormat, NameAndSpan};
|
||||
pub use hygiene::{SyntaxContext, ExpnInfo, ExpnFormat, NameAndSpan, CompilerDesugaringKind};
|
||||
|
||||
pub mod symbol;
|
||||
|
||||
|
@ -153,6 +153,17 @@ impl Span {
|
|||
}
|
||||
}
|
||||
|
||||
/// Check if this span arises from a compiler desugaring of kind `kind`.
|
||||
pub fn is_compiler_desugaring(&self, kind: CompilerDesugaringKind) -> bool {
|
||||
match self.ctxt.outer().expn_info() {
|
||||
Some(info) => match info.callee.format {
|
||||
ExpnFormat::CompilerDesugaring(k) => k == kind,
|
||||
_ => false,
|
||||
},
|
||||
None => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if a span is "internal" to a macro in which `unsafe`
|
||||
/// can be used without triggering the `unsafe_code` lint
|
||||
// (that is, a macro marked with `#[allow_internal_unsafe]`).
|
||||
|
|
|
@ -6,7 +6,7 @@ error[E0425]: cannot find value `A` in module `namespaced_enums`
|
|||
|
|
||||
help: possible candidate is found in another module, you can import it into scope
|
||||
|
|
||||
12 | use namespaced_enums::Foo::A;
|
||||
14 | use namespaced_enums::Foo::A;
|
||||
|
|
||||
|
||||
error[E0425]: cannot find function `B` in module `namespaced_enums`
|
||||
|
@ -17,7 +17,7 @@ error[E0425]: cannot find function `B` in module `namespaced_enums`
|
|||
|
|
||||
help: possible candidate is found in another module, you can import it into scope
|
||||
|
|
||||
12 | use namespaced_enums::Foo::B;
|
||||
14 | use namespaced_enums::Foo::B;
|
||||
|
|
||||
|
||||
error[E0422]: cannot find struct, variant or union type `C` in module `namespaced_enums`
|
||||
|
@ -28,7 +28,7 @@ error[E0422]: cannot find struct, variant or union type `C` in module `namespace
|
|||
|
|
||||
help: possible candidate is found in another module, you can import it into scope
|
||||
|
|
||||
12 | use namespaced_enums::Foo::C;
|
||||
14 | use namespaced_enums::Foo::C;
|
||||
|
|
||||
|
||||
error: aborting due to 3 previous errors
|
||||
|
|
|
@ -6,7 +6,7 @@ error[E0405]: cannot find trait `OuterTrait` in this scope
|
|||
|
|
||||
help: possible candidate is found in another module, you can import it into scope
|
||||
|
|
||||
16 | use issue_21221_3::outer::OuterTrait;
|
||||
18 | use issue_21221_3::outer::OuterTrait;
|
||||
|
|
||||
|
||||
error: cannot continue compilation due to previous error
|
||||
|
|
|
@ -6,7 +6,7 @@ error[E0405]: cannot find trait `T` in this scope
|
|||
|
|
||||
help: possible candidate is found in another module, you can import it into scope
|
||||
|
|
||||
16 | use issue_21221_4::T;
|
||||
18 | use issue_21221_4::T;
|
||||
|
|
||||
|
||||
error: cannot continue compilation due to previous error
|
||||
|
|
|
@ -6,7 +6,7 @@ error[E0404]: expected trait, found type alias `Foo`
|
|||
|
|
||||
help: possible better candidate is found in another module, you can import it into scope
|
||||
|
|
||||
12 | use issue_3907::Foo;
|
||||
14 | use issue_3907::Foo;
|
||||
|
|
||||
|
||||
error: cannot continue compilation due to previous error
|
||||
|
|
|
@ -10,7 +10,7 @@ error[E0423]: expected value, found struct `Z`
|
|||
|
|
||||
help: possible better candidate is found in another module, you can import it into scope
|
||||
|
|
||||
15 | use m::n::Z;
|
||||
16 | use m::n::Z;
|
||||
|
|
||||
|
||||
error[E0423]: expected value, found struct `S`
|
||||
|
@ -24,7 +24,7 @@ error[E0423]: expected value, found struct `S`
|
|||
|
|
||||
help: possible better candidate is found in another module, you can import it into scope
|
||||
|
|
||||
13 | use m::S;
|
||||
15 | use m::S;
|
||||
|
|
||||
|
||||
error[E0423]: expected value, found struct `xcrate::S`
|
||||
|
@ -38,7 +38,7 @@ error[E0423]: expected value, found struct `xcrate::S`
|
|||
|
|
||||
help: possible better candidate is found in another module, you can import it into scope
|
||||
|
|
||||
13 | use m::S;
|
||||
15 | use m::S;
|
||||
|
|
||||
|
||||
error[E0603]: tuple struct `Z` is private
|
||||
|
|
27
src/test/ui/resolve/use_suggestion_placement.rs
Normal file
27
src/test/ui/resolve/use_suggestion_placement.rs
Normal file
|
@ -0,0 +1,27 @@
|
|||
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
macro_rules! y {
|
||||
() => {}
|
||||
}
|
||||
|
||||
mod m {
|
||||
pub const A: i32 = 0;
|
||||
}
|
||||
|
||||
fn main() {
|
||||
y!();
|
||||
let _ = A;
|
||||
foo();
|
||||
}
|
||||
|
||||
fn foo() {
|
||||
type Dict<K, V> = HashMap<K, V>;
|
||||
}
|
38
src/test/ui/resolve/use_suggestion_placement.stderr
Normal file
38
src/test/ui/resolve/use_suggestion_placement.stderr
Normal file
|
@ -0,0 +1,38 @@
|
|||
error[E0425]: cannot find value `A` in this scope
|
||||
--> $DIR/use_suggestion_placement.rs:21:13
|
||||
|
|
||||
21 | let _ = A;
|
||||
| ^ not found in this scope
|
||||
|
|
||||
help: possible candidate is found in another module, you can import it into scope
|
||||
|
|
||||
11 | use m::A;
|
||||
|
|
||||
|
||||
error[E0412]: cannot find type `HashMap` in this scope
|
||||
--> $DIR/use_suggestion_placement.rs:26:23
|
||||
|
|
||||
26 | type Dict<K, V> = HashMap<K, V>;
|
||||
| ^^^^^^^ not found in this scope
|
||||
|
|
||||
help: possible candidates are found in other modules, you can import them into scope
|
||||
|
|
||||
11 | use std::collections::HashMap;
|
||||
|
|
||||
11 | use std::collections::hash_map::HashMap;
|
||||
|
|
||||
|
||||
error[E0091]: type parameter `K` is unused
|
||||
--> $DIR/use_suggestion_placement.rs:26:15
|
||||
|
|
||||
26 | type Dict<K, V> = HashMap<K, V>;
|
||||
| ^ unused type parameter
|
||||
|
||||
error[E0091]: type parameter `V` is unused
|
||||
--> $DIR/use_suggestion_placement.rs:26:18
|
||||
|
|
||||
26 | type Dict<K, V> = HashMap<K, V>;
|
||||
| ^ unused type parameter
|
||||
|
||||
error: aborting due to 4 previous errors
|
||||
|
|
@ -2,7 +2,7 @@ error[E0536]: expected 1 cfg-pattern
|
|||
--> $DIR/E0536.rs:11:7
|
||||
|
|
||||
11 | #[cfg(not())] //~ ERROR E0536
|
||||
| ^^^
|
||||
| ^^^^^
|
||||
|
||||
error: aborting due to previous error
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@ error[E0537]: invalid predicate `unknown`
|
|||
--> $DIR/E0537.rs:11:7
|
||||
|
|
||||
11 | #[cfg(unknown())] //~ ERROR E0537
|
||||
| ^^^^^^^
|
||||
| ^^^^^^^^^
|
||||
|
||||
error: aborting due to previous error
|
||||
|
||||
|
|
|
@ -1,11 +1,3 @@
|
|||
error[E0577]: expected module, found struct `S`
|
||||
--> $DIR/visibility-ty-params.rs:16:5
|
||||
|
|
||||
16 | m!{ S<u8> } //~ ERROR generic arguments in visibility path
|
||||
| -^^^^
|
||||
| |
|
||||
| did you mean `m`?
|
||||
|
||||
error: generic arguments in visibility path
|
||||
--> $DIR/visibility-ty-params.rs:16:6
|
||||
|
|
||||
|
@ -18,5 +10,13 @@ error: generic arguments in visibility path
|
|||
20 | m!{ m<> } //~ ERROR generic arguments in visibility path
|
||||
| ^^
|
||||
|
||||
error[E0577]: expected module, found struct `S`
|
||||
--> $DIR/visibility-ty-params.rs:16:5
|
||||
|
|
||||
16 | m!{ S<u8> } //~ ERROR generic arguments in visibility path
|
||||
| -^^^^
|
||||
| |
|
||||
| did you mean `m`?
|
||||
|
||||
error: aborting due to 3 previous errors
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue