rustc: doc comments
This commit is contained in:
parent
0b7af2668a
commit
c3e182cf43
343 changed files with 2260 additions and 2241 deletions
|
@ -60,17 +60,17 @@ pub trait Step: 'static + Clone + Debug + PartialEq + Eq + Hash {
|
|||
/// Run this rule for all hosts without cross compiling.
|
||||
const ONLY_HOSTS: bool = false;
|
||||
|
||||
/// Primary function to execute this rule. Can call `builder.ensure(...)`
|
||||
/// Primary function to execute this rule. Can call `builder.ensure()`
|
||||
/// with other steps to run those.
|
||||
fn run(self, builder: &Builder) -> Self::Output;
|
||||
|
||||
/// When bootstrap is passed a set of paths, this controls whether this rule
|
||||
/// will execute. However, it does not get called in a "default" context
|
||||
/// when we are not passed any paths; in that case, make_run is called
|
||||
/// when we are not passed any paths; in that case, `make_run` is called
|
||||
/// directly.
|
||||
fn should_run(run: ShouldRun) -> ShouldRun;
|
||||
|
||||
/// Build up a "root" rule, either as a default rule or from a path passed
|
||||
/// Builds up a "root" rule, either as a default rule or from a path passed
|
||||
/// to us.
|
||||
///
|
||||
/// When path is `None`, we are executing in a context where no paths were
|
||||
|
@ -648,7 +648,7 @@ impl<'a> Builder<'a> {
|
|||
add_lib_path(vec![self.rustc_libdir(compiler)], cmd);
|
||||
}
|
||||
|
||||
/// Get a path to the compiler specified.
|
||||
/// Gets a path to the compiler specified.
|
||||
pub fn rustc(&self, compiler: Compiler) -> PathBuf {
|
||||
if compiler.is_snapshot(self) {
|
||||
self.initial_rustc.clone()
|
||||
|
@ -659,7 +659,7 @@ impl<'a> Builder<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Get the paths to all of the compiler's codegen backends.
|
||||
/// Gets the paths to all of the compiler's codegen backends.
|
||||
fn codegen_backends(&self, compiler: Compiler) -> impl Iterator<Item = PathBuf> {
|
||||
fs::read_dir(self.sysroot_codegen_backends(compiler))
|
||||
.into_iter()
|
||||
|
|
|
@ -227,10 +227,10 @@ lazy_static! {
|
|||
pub static ref INTERNER: Interner = Interner::default();
|
||||
}
|
||||
|
||||
/// This is essentially a HashMap which allows storing any type in its input and
|
||||
/// This is essentially a `HashMap` which allows storing any type in its input and
|
||||
/// any type in its output. It is a write-once cache; values are never evicted,
|
||||
/// which means that references to the value can safely be returned from the
|
||||
/// get() method.
|
||||
/// `get()` method.
|
||||
#[derive(Debug)]
|
||||
pub struct Cache(
|
||||
RefCell<HashMap<
|
||||
|
|
|
@ -66,7 +66,7 @@ impl Step for Rustc {
|
|||
});
|
||||
}
|
||||
|
||||
/// Build the compiler.
|
||||
/// Builds the compiler.
|
||||
///
|
||||
/// This will build the compiler for a particular stage of the build using
|
||||
/// the `compiler` targeting the `target` architecture. The artifacts
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
//! Responsible for cleaning out a build directory of all old and stale
|
||||
//! artifacts to prepare for a fresh build. Currently doesn't remove the
|
||||
//! `build/cache` directory (download cache) or the `build/$target/llvm`
|
||||
//! directory unless the --all flag is present.
|
||||
//! directory unless the `--all` flag is present.
|
||||
|
||||
use std::fs;
|
||||
use std::io::{self, ErrorKind};
|
||||
|
|
|
@ -48,7 +48,7 @@ impl Step for Std {
|
|||
});
|
||||
}
|
||||
|
||||
/// Build the standard library.
|
||||
/// Builds the standard library.
|
||||
///
|
||||
/// This will build the standard library for a particular stage of the build
|
||||
/// using the `compiler` targeting the `target` architecture. The artifacts
|
||||
|
@ -269,7 +269,7 @@ impl Step for StartupObjects {
|
|||
});
|
||||
}
|
||||
|
||||
/// Build and prepare startup objects like rsbegin.o and rsend.o
|
||||
/// Builds and prepare startup objects like rsbegin.o and rsend.o
|
||||
///
|
||||
/// These are primarily used on Windows right now for linking executables/dlls.
|
||||
/// They don't require any library support as they're just plain old object
|
||||
|
@ -334,7 +334,7 @@ impl Step for Test {
|
|||
});
|
||||
}
|
||||
|
||||
/// Build libtest.
|
||||
/// Builds libtest.
|
||||
///
|
||||
/// This will build libtest and supporting libraries for a particular stage of
|
||||
/// the build using the `compiler` targeting the `target` architecture. The
|
||||
|
@ -455,7 +455,7 @@ impl Step for Rustc {
|
|||
});
|
||||
}
|
||||
|
||||
/// Build the compiler.
|
||||
/// Builds the compiler.
|
||||
///
|
||||
/// This will build the compiler for a particular stage of the build using
|
||||
/// the `compiler` targeting the `target` architecture. The artifacts
|
||||
|
|
|
@ -342,7 +342,7 @@ impl Step for Mingw {
|
|||
run.builder.ensure(Mingw { host: run.target });
|
||||
}
|
||||
|
||||
/// Build the `rust-mingw` installer component.
|
||||
/// Builds the `rust-mingw` installer component.
|
||||
///
|
||||
/// This contains all the bits and pieces to run the MinGW Windows targets
|
||||
/// without any extra installed software (e.g., we bundle gcc, libraries, etc).
|
||||
|
|
|
@ -259,7 +259,7 @@ impl Step for TheBook {
|
|||
});
|
||||
}
|
||||
|
||||
/// Build the book and associated stuff.
|
||||
/// Builds the book and associated stuff.
|
||||
///
|
||||
/// We need to build:
|
||||
///
|
||||
|
@ -611,7 +611,7 @@ impl Step for WhitelistedRustc {
|
|||
});
|
||||
}
|
||||
|
||||
/// Generate whitelisted compiler crate documentation.
|
||||
/// Generates whitelisted compiler crate documentation.
|
||||
///
|
||||
/// This will generate all documentation for crates that are whitelisted
|
||||
/// to be included in the standard documentation. This documentation is
|
||||
|
@ -683,7 +683,7 @@ impl Step for Rustc {
|
|||
});
|
||||
}
|
||||
|
||||
/// Generate compiler documentation.
|
||||
/// Generates compiler documentation.
|
||||
///
|
||||
/// This will generate all documentation for compiler and dependencies.
|
||||
/// Compiler documentation is distributed separately, so we make sure
|
||||
|
@ -784,7 +784,7 @@ impl Step for Rustdoc {
|
|||
});
|
||||
}
|
||||
|
||||
/// Generate compiler documentation.
|
||||
/// Generates compiler documentation.
|
||||
///
|
||||
/// This will generate all documentation for compiler and dependencies.
|
||||
/// Compiler documentation is distributed separately, so we make sure
|
||||
|
|
|
@ -69,7 +69,7 @@
|
|||
//! ## Copying stage0 {std,test,rustc}
|
||||
//!
|
||||
//! This copies the build output from Cargo into
|
||||
//! `build/$HOST/stage0-sysroot/lib/rustlib/$ARCH/lib`. FIXME: This step's
|
||||
//! `build/$HOST/stage0-sysroot/lib/rustlib/$ARCH/lib`. FIXME: this step's
|
||||
//! documentation should be expanded -- the information already here may be
|
||||
//! incorrect.
|
||||
//!
|
||||
|
@ -504,7 +504,7 @@ impl Build {
|
|||
cleared
|
||||
}
|
||||
|
||||
/// Get the space-separated set of activated features for the standard
|
||||
/// Gets the space-separated set of activated features for the standard
|
||||
/// library.
|
||||
fn std_features(&self) -> String {
|
||||
let mut features = "panic-unwind".to_string();
|
||||
|
@ -521,7 +521,7 @@ impl Build {
|
|||
features
|
||||
}
|
||||
|
||||
/// Get the space-separated set of activated features for the compiler.
|
||||
/// Gets the space-separated set of activated features for the compiler.
|
||||
fn rustc_features(&self) -> String {
|
||||
let mut features = String::new();
|
||||
if self.config.jemalloc {
|
||||
|
@ -609,7 +609,7 @@ impl Build {
|
|||
self.out.join(&*target).join("crate-docs")
|
||||
}
|
||||
|
||||
/// Returns true if no custom `llvm-config` is set for the specified target.
|
||||
/// Returns `true` if no custom `llvm-config` is set for the specified target.
|
||||
///
|
||||
/// If no custom `llvm-config` was specified then Rust's llvm will be used.
|
||||
fn is_rust_llvm(&self, target: Interned<String>) -> bool {
|
||||
|
@ -857,13 +857,13 @@ impl Build {
|
|||
.map(|p| &**p)
|
||||
}
|
||||
|
||||
/// Returns true if this is a no-std `target`, if defined
|
||||
/// Returns `true` if this is a no-std `target`, if defined
|
||||
fn no_std(&self, target: Interned<String>) -> Option<bool> {
|
||||
self.config.target_config.get(&target)
|
||||
.map(|t| t.no_std)
|
||||
}
|
||||
|
||||
/// Returns whether the target will be tested using the `remote-test-client`
|
||||
/// Returns `true` if the target will be tested using the `remote-test-client`
|
||||
/// and `remote-test-server` binaries.
|
||||
fn remote_tested(&self, target: Interned<String>) -> bool {
|
||||
self.qemu_rootfs(target).is_some() || target.contains("android") ||
|
||||
|
@ -1059,7 +1059,7 @@ impl Build {
|
|||
self.rust_info.version(self, channel::CFG_RELEASE_NUM)
|
||||
}
|
||||
|
||||
/// Return the full commit hash
|
||||
/// Returns the full commit hash.
|
||||
fn rust_sha(&self) -> Option<&str> {
|
||||
self.rust_info.sha()
|
||||
}
|
||||
|
@ -1079,7 +1079,7 @@ impl Build {
|
|||
panic!("failed to find version in {}'s Cargo.toml", package)
|
||||
}
|
||||
|
||||
/// Returns whether unstable features should be enabled for the compiler
|
||||
/// Returns `true` if unstable features should be enabled for the compiler
|
||||
/// we're building.
|
||||
fn unstable_features(&self) -> bool {
|
||||
match &self.config.channel[..] {
|
||||
|
@ -1327,7 +1327,7 @@ impl<'a> Compiler {
|
|||
self
|
||||
}
|
||||
|
||||
/// Returns whether this is a snapshot compiler for `build`'s configuration
|
||||
/// Returns `true` if this is a snapshot compiler for `build`'s configuration
|
||||
pub fn is_snapshot(&self, build: &Build) -> bool {
|
||||
self.stage == 0 && self.host == build.build
|
||||
}
|
||||
|
|
|
@ -30,9 +30,9 @@ const ADB_TEST_DIR: &str = "/data/tmp/work";
|
|||
/// The two modes of the test runner; tests or benchmarks.
|
||||
#[derive(Debug, PartialEq, Eq, Hash, Copy, Clone, PartialOrd, Ord)]
|
||||
pub enum TestKind {
|
||||
/// Run `cargo test`
|
||||
/// Run `cargo test`.
|
||||
Test,
|
||||
/// Run `cargo bench`
|
||||
/// Run `cargo bench`.
|
||||
Bench,
|
||||
}
|
||||
|
||||
|
@ -1288,7 +1288,7 @@ impl Step for DocTest {
|
|||
run.never()
|
||||
}
|
||||
|
||||
/// Run `rustdoc --test` for all documentation in `src/doc`.
|
||||
/// Runs `rustdoc --test` for all documentation in `src/doc`.
|
||||
///
|
||||
/// This will run all tests in our markdown documentation (e.g., the book)
|
||||
/// located in `src/doc`. The `rustdoc` that's run is the one that sits next to
|
||||
|
@ -1408,7 +1408,7 @@ impl Step for ErrorIndex {
|
|||
});
|
||||
}
|
||||
|
||||
/// Run the error index generator tool to execute the tests located in the error
|
||||
/// Runs the error index generator tool to execute the tests located in the error
|
||||
/// index.
|
||||
///
|
||||
/// The `error_index_generator` tool lives in `src/tools` and is used to
|
||||
|
@ -1614,7 +1614,7 @@ impl Step for Crate {
|
|||
}
|
||||
}
|
||||
|
||||
/// Run all unit tests plus documentation tests for a given crate defined
|
||||
/// Runs all unit tests plus documentation tests for a given crate defined
|
||||
/// by a `Cargo.toml` (single manifest)
|
||||
///
|
||||
/// This is what runs tests for crates like the standard library, compiler, etc.
|
||||
|
@ -1833,7 +1833,7 @@ fn envify(s: &str) -> String {
|
|||
/// the standard library and such to the emulator ahead of time. This step
|
||||
/// represents this and is a dependency of all test suites.
|
||||
///
|
||||
/// Most of the time this is a noop. For some steps such as shipping data to
|
||||
/// Most of the time this is a no-op. For some steps such as shipping data to
|
||||
/// QEMU we have to build our own tools so we've got conditional dependencies
|
||||
/// on those programs as well. Note that the remote test client is built for
|
||||
/// the build target (us) and the server is built for the target.
|
||||
|
@ -1904,7 +1904,7 @@ impl Step for Distcheck {
|
|||
run.builder.ensure(Distcheck);
|
||||
}
|
||||
|
||||
/// Run "distcheck", a 'make check' from a tarball
|
||||
/// Runs "distcheck", a 'make check' from a tarball
|
||||
fn run(self, builder: &Builder) {
|
||||
builder.info("Distcheck");
|
||||
let dir = builder.out.join("tmp").join("distcheck");
|
||||
|
@ -1965,7 +1965,7 @@ impl Step for Bootstrap {
|
|||
const DEFAULT: bool = true;
|
||||
const ONLY_HOSTS: bool = true;
|
||||
|
||||
/// Test the build system itself
|
||||
/// Tests the build system itself.
|
||||
fn run(self, builder: &Builder) {
|
||||
let mut cmd = Command::new(&builder.initial_cargo);
|
||||
cmd.arg("test")
|
||||
|
|
|
@ -40,7 +40,7 @@ impl Step for ToolBuild {
|
|||
run.never()
|
||||
}
|
||||
|
||||
/// Build a tool in `src/tools`
|
||||
/// Builds a tool in `src/tools`
|
||||
///
|
||||
/// This will build the specified tool with the specified `host` compiler in
|
||||
/// `stage` into the normal cargo output directory.
|
||||
|
@ -621,7 +621,7 @@ tool_extended!((self, builder),
|
|||
);
|
||||
|
||||
impl<'a> Builder<'a> {
|
||||
/// Get a `Command` which is ready to run `tool` in `stage` built for
|
||||
/// Gets a `Command` which is ready to run `tool` in `stage` built for
|
||||
/// `host`.
|
||||
pub fn tool_cmd(&self, tool: Tool) -> Command {
|
||||
let mut cmd = Command::new(self.tool_exe(tool));
|
||||
|
|
|
@ -33,7 +33,7 @@ pub fn exe(name: &str, target: &str) -> String {
|
|||
}
|
||||
}
|
||||
|
||||
/// Returns whether the file name given looks like a dynamic library.
|
||||
/// Returns `true` if the file name given looks like a dynamic library.
|
||||
pub fn is_dylib(name: &str) -> bool {
|
||||
name.ends_with(".dylib") || name.ends_with(".so") || name.ends_with(".dll")
|
||||
}
|
||||
|
|
|
@ -163,7 +163,7 @@ pub fn mtime(path: &Path) -> SystemTime {
|
|||
.unwrap_or(UNIX_EPOCH)
|
||||
}
|
||||
|
||||
/// Returns whether `dst` is up to date given that the file or files in `src`
|
||||
/// Returns `true` if `dst` is up to date given that the file or files in `src`
|
||||
/// are used to generate it.
|
||||
///
|
||||
/// Uses last-modified time checks to verify this.
|
||||
|
@ -190,12 +190,12 @@ pub struct NativeLibBoilerplate {
|
|||
}
|
||||
|
||||
impl NativeLibBoilerplate {
|
||||
/// On OSX we don't want to ship the exact filename that compiler-rt builds.
|
||||
/// On macOS we don't want to ship the exact filename that compiler-rt builds.
|
||||
/// This conflicts with the system and ours is likely a wildly different
|
||||
/// version, so they can't be substituted.
|
||||
///
|
||||
/// As a result, we rename it here but we need to also use
|
||||
/// `install_name_tool` on OSX to rename the commands listed inside of it to
|
||||
/// `install_name_tool` on macOS to rename the commands listed inside of it to
|
||||
/// ensure it's linked against correctly.
|
||||
pub fn fixup_sanitizer_lib_name(&self, sanitizer_name: &str) {
|
||||
if env::var("TARGET").unwrap() != "x86_64-apple-darwin" {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//! String manipulation
|
||||
//! String manipulation.
|
||||
//!
|
||||
//! For more details, see std::str
|
||||
//! For more details, see the `std::str` module.
|
||||
|
||||
#![stable(feature = "rust1", since = "1.0.0")]
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
//! The string Pattern API.
|
||||
//!
|
||||
//! For more details, see the traits `Pattern`, `Searcher`,
|
||||
//! `ReverseSearcher` and `DoubleEndedSearcher`.
|
||||
//! For more details, see the traits [`Pattern`], [`Searcher`],
|
||||
//! [`ReverseSearcher`], and [`DoubleEndedSearcher`].
|
||||
|
||||
#![unstable(feature = "pattern",
|
||||
reason = "API not fully fleshed out and ready to be stabilized",
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//! Unwinding for wasm32
|
||||
//! Unwinding for *wasm32* target.
|
||||
//!
|
||||
//! Right now we don't support this, so this is just stubs
|
||||
//! Right now we don't support this, so this is just stubs.
|
||||
|
||||
use alloc::boxed::Box;
|
||||
use core::any::Any;
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
//! http://www.airs.com/blog/archives/464
|
||||
//!
|
||||
//! A reference implementation may be found in the GCC source tree
|
||||
//! (<root>/libgcc/unwind-c.c as of this writing)
|
||||
//! (`<root>/libgcc/unwind-c.c` as of this writing).
|
||||
|
||||
#![allow(non_upper_case_globals)]
|
||||
#![allow(unused)]
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
//! Utilities for parsing DWARF-encoded data streams.
|
||||
//! See http://www.dwarfstd.org,
|
||||
//! See <http://www.dwarfstd.org>,
|
||||
//! DWARF-4 standard, Section 7 - "Data Representation"
|
||||
|
||||
// This module is used only by x86_64-pc-windows-gnu for now, but we
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
//! Unwinding for emscripten
|
||||
//! Unwinding for *emscripten* target.
|
||||
//!
|
||||
//! Whereas Rust's usual unwinding implementation for Unix platforms
|
||||
//! calls into the libunwind APIs directly, on emscripten we instead
|
||||
//! calls into the libunwind APIs directly, on Emscripten we instead
|
||||
//! call into the C++ unwinding APIs. This is just an expedience since
|
||||
//! emscripten's runtime always implements those APIs and does not
|
||||
//! Emscripten's runtime always implements those APIs and does not
|
||||
//! implement libunwind.
|
||||
|
||||
#![allow(private_no_mangle_fns)]
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
//! Implementation of panics backed by libgcc/libunwind (in some form)
|
||||
//! Implementation of panics backed by libgcc/libunwind (in some form).
|
||||
//!
|
||||
//! For background on exception handling and stack unwinding please see
|
||||
//! "Exception Handling in LLVM" (llvm.org/docs/ExceptionHandling.html) and
|
||||
|
|
|
@ -22,7 +22,7 @@ impl DepNodeFilter {
|
|||
}
|
||||
}
|
||||
|
||||
/// True if all nodes always pass the filter.
|
||||
/// Returns `true` if all nodes always pass the filter.
|
||||
pub fn accepts_all(&self) -> bool {
|
||||
self.text.is_empty()
|
||||
}
|
||||
|
|
|
@ -302,7 +302,7 @@ macro_rules! define_dep_nodes {
|
|||
}
|
||||
}
|
||||
|
||||
/// Create a new, parameterless DepNode. This method will assert
|
||||
/// Creates a new, parameterless DepNode. This method will assert
|
||||
/// that the DepNode corresponding to the given DepKind actually
|
||||
/// does not require any parameters.
|
||||
#[inline(always)]
|
||||
|
@ -314,7 +314,7 @@ macro_rules! define_dep_nodes {
|
|||
}
|
||||
}
|
||||
|
||||
/// Extract the DefId corresponding to this DepNode. This will work
|
||||
/// Extracts the DefId corresponding to this DepNode. This will work
|
||||
/// if two conditions are met:
|
||||
///
|
||||
/// 1. The Fingerprint of the DepNode actually is a DefPathHash, and
|
||||
|
@ -798,7 +798,7 @@ impl<'a, 'gcx: 'tcx + 'a, 'tcx: 'a> DepNodeParams<'a, 'gcx, 'tcx> for HirId {
|
|||
}
|
||||
|
||||
/// A "work product" corresponds to a `.o` (or other) file that we
|
||||
/// save in between runs. These ids do not have a DefId but rather
|
||||
/// save in between runs. These IDs do not have a `DefId` but rather
|
||||
/// some independent path or string that persists between runs without
|
||||
/// the need to be mapped or unmapped. (This ensures we can serialize
|
||||
/// them even in the absence of a tcx.)
|
||||
|
|
|
@ -43,7 +43,7 @@ impl<M: DepTrackingMapConfig> MemoizationMap for RefCell<DepTrackingMap<M>> {
|
|||
///
|
||||
/// Here, `[op]` represents whatever nodes `op` reads in the
|
||||
/// course of execution; `Map(key)` represents the node for this
|
||||
/// map; and `CurrentTask` represents the current task when
|
||||
/// map, and `CurrentTask` represents the current task when
|
||||
/// `memoize` is invoked.
|
||||
///
|
||||
/// **Important:** when `op` is invoked, the current task will be
|
||||
|
|
|
@ -61,13 +61,13 @@ struct DepGraphData {
|
|||
|
||||
colors: DepNodeColorMap,
|
||||
|
||||
/// A set of loaded diagnostics which has been emitted.
|
||||
/// A set of loaded diagnostics that have been emitted.
|
||||
emitted_diagnostics: Mutex<FxHashSet<DepNodeIndex>>,
|
||||
|
||||
/// Used to wait for diagnostics to be emitted.
|
||||
emitted_diagnostics_cond_var: Condvar,
|
||||
|
||||
/// When we load, there may be `.o` files, cached mir, or other such
|
||||
/// When we load, there may be `.o` files, cached MIR, or other such
|
||||
/// things available to us. If we find that they are not dirty, we
|
||||
/// load the path to the file storing those work-products here into
|
||||
/// this map. We can later look for and extract that data.
|
||||
|
@ -115,7 +115,7 @@ impl DepGraph {
|
|||
}
|
||||
}
|
||||
|
||||
/// True if we are actually building the full dep-graph.
|
||||
/// Returns `true` if we are actually building the full dep-graph, and `false` otherwise.
|
||||
#[inline]
|
||||
pub fn is_fully_enabled(&self) -> bool {
|
||||
self.data.is_some()
|
||||
|
@ -320,8 +320,8 @@ impl DepGraph {
|
|||
}
|
||||
}
|
||||
|
||||
/// Execute something within an "anonymous" task, that is, a task the
|
||||
/// DepNode of which is determined by the list of inputs it read from.
|
||||
/// Executes something within an "anonymous" task, that is, a task the
|
||||
/// `DepNode` of which is determined by the list of inputs it read from.
|
||||
pub fn with_anon_task<OP,R>(&self, dep_kind: DepKind, op: OP) -> (R, DepNodeIndex)
|
||||
where OP: FnOnce() -> R
|
||||
{
|
||||
|
@ -356,8 +356,8 @@ impl DepGraph {
|
|||
}
|
||||
}
|
||||
|
||||
/// Execute something within an "eval-always" task which is a task
|
||||
// that runs whenever anything changes.
|
||||
/// Executes something within an "eval-always" task which is a task
|
||||
/// that runs whenever anything changes.
|
||||
pub fn with_eval_always_task<'a, C, A, R>(
|
||||
&self,
|
||||
key: DepNode,
|
||||
|
@ -438,7 +438,7 @@ impl DepGraph {
|
|||
self.data.as_ref().unwrap().previous.node_to_index(dep_node)
|
||||
}
|
||||
|
||||
/// Check whether a previous work product exists for `v` and, if
|
||||
/// Checks whether a previous work product exists for `v` and, if
|
||||
/// so, return the path that leads to it. Used to skip doing work.
|
||||
pub fn previous_work_product(&self, v: &WorkProductId) -> Option<WorkProduct> {
|
||||
self.data
|
||||
|
@ -589,7 +589,7 @@ impl DepGraph {
|
|||
}
|
||||
}
|
||||
|
||||
/// Try to mark a dep-node which existed in the previous compilation session as green
|
||||
/// Try to mark a dep-node which existed in the previous compilation session as green.
|
||||
fn try_mark_previous_green<'tcx>(
|
||||
&self,
|
||||
tcx: TyCtxt<'_, 'tcx, 'tcx>,
|
||||
|
@ -773,8 +773,8 @@ impl DepGraph {
|
|||
Some(dep_node_index)
|
||||
}
|
||||
|
||||
/// Atomically emits some loaded diagnotics assuming that this only gets called with
|
||||
/// did_allocation set to true on one thread
|
||||
/// Atomically emits some loaded diagnotics, assuming that this only gets called with
|
||||
/// `did_allocation` set to `true` on a single thread.
|
||||
#[cold]
|
||||
#[inline(never)]
|
||||
fn emit_diagnostics<'tcx>(
|
||||
|
@ -913,7 +913,7 @@ impl DepGraph {
|
|||
#[derive(Clone, Debug, RustcEncodable, RustcDecodable)]
|
||||
pub struct WorkProduct {
|
||||
pub cgu_name: String,
|
||||
/// Saved files associated with this CGU
|
||||
/// Saved files associated with this CGU.
|
||||
pub saved_files: Vec<(WorkProductFileKind, String)>,
|
||||
}
|
||||
|
||||
|
@ -937,17 +937,17 @@ pub(super) struct CurrentDepGraph {
|
|||
#[allow(dead_code)]
|
||||
forbidden_edge: Option<EdgeFilter>,
|
||||
|
||||
// Anonymous DepNodes are nodes the ID of which we compute from the list of
|
||||
// their edges. This has the beneficial side-effect that multiple anonymous
|
||||
// nodes can be coalesced into one without changing the semantics of the
|
||||
// dependency graph. However, the merging of nodes can lead to a subtle
|
||||
// problem during red-green marking: The color of an anonymous node from
|
||||
// the current session might "shadow" the color of the node with the same
|
||||
// ID from the previous session. In order to side-step this problem, we make
|
||||
// sure that anon-node IDs allocated in different sessions don't overlap.
|
||||
// This is implemented by mixing a session-key into the ID fingerprint of
|
||||
// each anon node. The session-key is just a random number generated when
|
||||
// the DepGraph is created.
|
||||
/// Anonymous `DepNode`s are nodes whose IDs we compute from the list of
|
||||
/// their edges. This has the beneficial side-effect that multiple anonymous
|
||||
/// nodes can be coalesced into one without changing the semantics of the
|
||||
/// dependency graph. However, the merging of nodes can lead to a subtle
|
||||
/// problem during red-green marking: The color of an anonymous node from
|
||||
/// the current session might "shadow" the color of the node with the same
|
||||
/// ID from the previous session. In order to side-step this problem, we make
|
||||
/// sure that anonymous `NodeId`s allocated in different sessions don't overlap.
|
||||
/// This is implemented by mixing a session-key into the ID fingerprint of
|
||||
/// each anon node. The session-key is just a random number generated when
|
||||
/// the `DepGraph` is created.
|
||||
anon_id_seed: Fingerprint,
|
||||
|
||||
total_read_count: u64,
|
||||
|
|
|
@ -91,7 +91,7 @@ struct CheckAttrVisitor<'a, 'tcx: 'a> {
|
|||
}
|
||||
|
||||
impl<'a, 'tcx> CheckAttrVisitor<'a, 'tcx> {
|
||||
/// Check any attribute.
|
||||
/// Checks any attribute.
|
||||
fn check_attributes(&self, item: &hir::Item, target: Target) {
|
||||
if target == Target::Fn || target == Target::Const {
|
||||
self.tcx.codegen_fn_attrs(self.tcx.hir().local_def_id(item.id));
|
||||
|
@ -115,7 +115,7 @@ impl<'a, 'tcx> CheckAttrVisitor<'a, 'tcx> {
|
|||
self.check_used(item, target);
|
||||
}
|
||||
|
||||
/// Check if an `#[inline]` is applied to a function or a closure.
|
||||
/// Checks if an `#[inline]` is applied to a function or a closure.
|
||||
fn check_inline(&self, attr: &hir::Attribute, span: &Span, target: Target) {
|
||||
if target != Target::Fn && target != Target::Closure {
|
||||
struct_span_err!(self.tcx.sess,
|
||||
|
@ -127,7 +127,7 @@ impl<'a, 'tcx> CheckAttrVisitor<'a, 'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Check if the `#[non_exhaustive]` attribute on an `item` is valid.
|
||||
/// Checks if the `#[non_exhaustive]` attribute on an `item` is valid.
|
||||
fn check_non_exhaustive(&self, attr: &hir::Attribute, item: &hir::Item, target: Target) {
|
||||
match target {
|
||||
Target::Struct | Target::Enum => { /* Valid */ },
|
||||
|
@ -143,7 +143,7 @@ impl<'a, 'tcx> CheckAttrVisitor<'a, 'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Check if the `#[marker]` attribute on an `item` is valid.
|
||||
/// Checks if the `#[marker]` attribute on an `item` is valid.
|
||||
fn check_marker(&self, attr: &hir::Attribute, item: &hir::Item, target: Target) {
|
||||
match target {
|
||||
Target::Trait => { /* Valid */ },
|
||||
|
@ -157,7 +157,7 @@ impl<'a, 'tcx> CheckAttrVisitor<'a, 'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Check if the `#[repr]` attributes on `item` are valid.
|
||||
/// Checks if the `#[repr]` attributes on `item` are valid.
|
||||
fn check_repr(&self, item: &hir::Item, target: Target) {
|
||||
// Extract the names of all repr hints, e.g., [foo, bar, align] for:
|
||||
// ```
|
||||
|
|
|
@ -182,7 +182,7 @@ impl<T> ::std::ops::IndexMut<Namespace> for PerNS<T> {
|
|||
}
|
||||
|
||||
impl<T> PerNS<Option<T>> {
|
||||
/// Returns whether all the items in this collection are `None`.
|
||||
/// Returns `true` if all the items in this collection are `None`.
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.type_ns.is_none() && self.value_ns.is_none() && self.macro_ns.is_none()
|
||||
}
|
||||
|
|
|
@ -229,7 +229,7 @@ impl fmt::Debug for DefId {
|
|||
}
|
||||
|
||||
impl DefId {
|
||||
/// Make a local `DefId` with the given index.
|
||||
/// Makes a local `DefId` from the given `DefIndex`.
|
||||
#[inline]
|
||||
pub fn local(index: DefIndex) -> DefId {
|
||||
DefId { krate: LOCAL_CRATE, index: index }
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
//! `super::itemlikevisit::ItemLikeVisitor` trait.**
|
||||
//!
|
||||
//! If you have decided to use this visitor, here are some general
|
||||
//! notes on how to do it:
|
||||
//! notes on how to do so:
|
||||
//!
|
||||
//! Each overridden visit method has full control over what
|
||||
//! happens with its node, it can do its own traversal of the node's children,
|
||||
|
@ -86,7 +86,7 @@ pub enum NestedVisitorMap<'this, 'tcx: 'this> {
|
|||
/// using this setting.
|
||||
OnlyBodies(&'this Map<'tcx>),
|
||||
|
||||
/// Visit all nested things, including item-likes.
|
||||
/// Visits all nested things, including item-likes.
|
||||
///
|
||||
/// **This is an unusual choice.** It is used when you want to
|
||||
/// process everything within their lexical context. Typically you
|
||||
|
@ -96,7 +96,7 @@ pub enum NestedVisitorMap<'this, 'tcx: 'this> {
|
|||
|
||||
impl<'this, 'tcx> NestedVisitorMap<'this, 'tcx> {
|
||||
/// Returns the map to use for an "intra item-like" thing (if any).
|
||||
/// e.g., function body.
|
||||
/// E.g., function body.
|
||||
pub fn intra(self) -> Option<&'this Map<'tcx>> {
|
||||
match self {
|
||||
NestedVisitorMap::None => None,
|
||||
|
@ -106,7 +106,7 @@ impl<'this, 'tcx> NestedVisitorMap<'this, 'tcx> {
|
|||
}
|
||||
|
||||
/// Returns the map to use for an "item-like" thing (if any).
|
||||
/// e.g., item, impl-item.
|
||||
/// E.g., item, impl-item.
|
||||
pub fn inter(self) -> Option<&'this Map<'tcx>> {
|
||||
match self {
|
||||
NestedVisitorMap::None => None,
|
||||
|
@ -203,7 +203,7 @@ pub trait Visitor<'v> : Sized {
|
|||
}
|
||||
}
|
||||
|
||||
/// Visit the top-level item and (optionally) nested items / impl items. See
|
||||
/// Visits the top-level item and (optionally) nested items / impl items. See
|
||||
/// `visit_nested_item` for details.
|
||||
fn visit_item(&mut self, i: &'v Item) {
|
||||
walk_item(self, i)
|
||||
|
|
|
@ -3,24 +3,24 @@
|
|||
//! Since the AST and HIR are fairly similar, this is mostly a simple procedure,
|
||||
//! much like a fold. Where lowering involves a bit more work things get more
|
||||
//! interesting and there are some invariants you should know about. These mostly
|
||||
//! concern spans and ids.
|
||||
//! concern spans and IDs.
|
||||
//!
|
||||
//! Spans are assigned to AST nodes during parsing and then are modified during
|
||||
//! expansion to indicate the origin of a node and the process it went through
|
||||
//! being expanded. Ids are assigned to AST nodes just before lowering.
|
||||
//! being expanded. IDs are assigned to AST nodes just before lowering.
|
||||
//!
|
||||
//! For the simpler lowering steps, ids and spans should be preserved. Unlike
|
||||
//! For the simpler lowering steps, IDs and spans should be preserved. Unlike
|
||||
//! expansion we do not preserve the process of lowering in the spans, so spans
|
||||
//! should not be modified here. When creating a new node (as opposed to
|
||||
//! 'folding' an existing one), then you create a new id using `next_id()`.
|
||||
//! 'folding' an existing one), then you create a new ID using `next_id()`.
|
||||
//!
|
||||
//! You must ensure that ids are unique. That means that you should only use the
|
||||
//! id from an AST node in a single HIR node (you can assume that AST node ids
|
||||
//! are unique). Every new node must have a unique id. Avoid cloning HIR nodes.
|
||||
//! If you do, you must then set the new node's id to a fresh one.
|
||||
//! You must ensure that IDs are unique. That means that you should only use the
|
||||
//! ID from an AST node in a single HIR node (you can assume that AST node IDs
|
||||
//! are unique). Every new node must have a unique ID. Avoid cloning HIR nodes.
|
||||
//! If you do, you must then set the new node's ID to a fresh one.
|
||||
//!
|
||||
//! Spans are used for error messages and for tools to map semantics back to
|
||||
//! source code. It is therefore not as important with spans as ids to be strict
|
||||
//! source code. It is therefore not as important with spans as IDs to be strict
|
||||
//! about use (you can't break the compiler by screwing up a span). Obviously, a
|
||||
//! HIR node can only have a single span. But multiple nodes can have the same
|
||||
//! span and spans don't need to be kept in order, etc. Where code is preserved
|
||||
|
@ -144,7 +144,7 @@ pub trait Resolver {
|
|||
is_value: bool,
|
||||
) -> hir::Path;
|
||||
|
||||
/// Obtain the resolution for a node-id.
|
||||
/// Obtain the resolution for a `NodeId`.
|
||||
fn get_resolution(&mut self, id: NodeId) -> Option<PathResolution>;
|
||||
|
||||
/// Obtain the possible resolutions for the given `use` statement.
|
||||
|
@ -3287,7 +3287,7 @@ impl<'a> LoweringContext<'a> {
|
|||
|
||||
/// Paths like the visibility path in `pub(super) use foo::{bar, baz}` are repeated
|
||||
/// many times in the HIR tree; for each occurrence, we need to assign distinct
|
||||
/// node-ids. (See e.g., #56128.)
|
||||
/// `NodeId`s. (See, e.g., #56128.)
|
||||
fn renumber_segment_ids(&mut self, path: &P<hir::Path>) -> P<hir::Path> {
|
||||
debug!("renumber_segment_ids(path = {:?})", path);
|
||||
let mut path = path.clone();
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
//! This module provides a simplified abstraction for working with
|
||||
//! code blocks identified by their integer node-id. In particular,
|
||||
//! code blocks identified by their integer `NodeId`. In particular,
|
||||
//! it captures a common set of attributes that all "function-like
|
||||
//! things" (represented by `FnLike` instances) share. For example,
|
||||
//! all `FnLike` instances have a type signature (be it explicit or
|
||||
|
|
|
@ -12,7 +12,7 @@ use syntax_pos::Span;
|
|||
|
||||
use crate::hir::map::{ITEM_LIKE_SPACE, REGULAR_SPACE};
|
||||
|
||||
/// Creates def ids for nodes in the AST.
|
||||
/// Creates `DefId`s for nodes in the AST.
|
||||
pub struct DefCollector<'a> {
|
||||
definitions: &'a mut Definitions,
|
||||
parent_def: Option<DefIndex>,
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
//! For each definition, we track the following data. A definition
|
||||
//! here is defined somewhat circularly as "something with a def-id",
|
||||
//! here is defined somewhat circularly as "something with a `DefId`",
|
||||
//! but it generally corresponds to things like structs, enums, etc.
|
||||
//! There are also some rather random cases (like const initializer
|
||||
//! expressions) that are mostly just leftovers.
|
||||
|
@ -163,10 +163,10 @@ pub struct Definitions {
|
|||
/// any) with a `DisambiguatedDefPathData`.
|
||||
#[derive(Clone, PartialEq, Debug, Hash, RustcEncodable, RustcDecodable)]
|
||||
pub struct DefKey {
|
||||
/// Parent path.
|
||||
/// The parent path.
|
||||
pub parent: Option<DefIndex>,
|
||||
|
||||
/// Identifier of this node.
|
||||
/// The identifier of this node.
|
||||
pub disambiguated_data: DisambiguatedDefPathData,
|
||||
}
|
||||
|
||||
|
@ -207,12 +207,12 @@ impl DefKey {
|
|||
}
|
||||
}
|
||||
|
||||
/// Pair of `DefPathData` and an integer disambiguator. The integer is
|
||||
/// A pair of `DefPathData` and an integer disambiguator. The integer is
|
||||
/// normally 0, but in the event that there are multiple defs with the
|
||||
/// same `parent` and `data`, we use this field to disambiguate
|
||||
/// between them. This introduces some artificial ordering dependency
|
||||
/// but means that if you have (e.g.) two impls for the same type in
|
||||
/// the same module, they do get distinct def-ids.
|
||||
/// the same module, they do get distinct `DefId`s.
|
||||
#[derive(Clone, PartialEq, Debug, Hash, RustcEncodable, RustcDecodable)]
|
||||
pub struct DisambiguatedDefPathData {
|
||||
pub data: DefPathData,
|
||||
|
@ -221,10 +221,10 @@ pub struct DisambiguatedDefPathData {
|
|||
|
||||
#[derive(Clone, Debug, Hash, RustcEncodable, RustcDecodable)]
|
||||
pub struct DefPath {
|
||||
/// the path leading from the crate root to the item
|
||||
/// The path leading from the crate root to the item.
|
||||
pub data: Vec<DisambiguatedDefPathData>,
|
||||
|
||||
/// what krate root is this path relative to?
|
||||
/// The crate root this path is relative to.
|
||||
pub krate: CrateNum,
|
||||
}
|
||||
|
||||
|
@ -260,9 +260,9 @@ impl DefPath {
|
|||
DefPath { data: data, krate: krate }
|
||||
}
|
||||
|
||||
/// Returns a string representation of the DefPath without
|
||||
/// Returns a string representation of the `DefPath` without
|
||||
/// the crate-prefix. This method is useful if you don't have
|
||||
/// a TyCtxt available.
|
||||
/// a `TyCtxt` available.
|
||||
pub fn to_string_no_crate(&self) -> String {
|
||||
let mut s = String::with_capacity(self.data.len() * 16);
|
||||
|
||||
|
@ -277,7 +277,7 @@ impl DefPath {
|
|||
s
|
||||
}
|
||||
|
||||
/// Return filename friendly string of the DefPah with the
|
||||
/// Returns a filename-friendly string for the `DefPath`, with the
|
||||
/// crate-prefix.
|
||||
pub fn to_string_friendly<F>(&self, crate_imported_name: F) -> String
|
||||
where F: FnOnce(CrateNum) -> Symbol
|
||||
|
@ -302,9 +302,9 @@ impl DefPath {
|
|||
s
|
||||
}
|
||||
|
||||
/// Return filename friendly string of the DefPah without
|
||||
/// Returns a filename-friendly string of the `DefPath`, without
|
||||
/// the crate-prefix. This method is useful if you don't have
|
||||
/// a TyCtxt available.
|
||||
/// a `TyCtxt` available.
|
||||
pub fn to_filename_friendly_no_crate(&self) -> String {
|
||||
let mut s = String::with_capacity(self.data.len() * 16);
|
||||
|
||||
|
@ -394,18 +394,18 @@ impl Borrow<Fingerprint> for DefPathHash {
|
|||
}
|
||||
|
||||
impl Definitions {
|
||||
/// Create new empty definition map.
|
||||
/// Creates new empty definition map.
|
||||
///
|
||||
/// The DefIndex returned from a new Definitions are as follows:
|
||||
/// 1. At DefIndexAddressSpace::Low,
|
||||
/// The `DefIndex` returned from a new `Definitions` are as follows:
|
||||
/// 1. At `DefIndexAddressSpace::Low`,
|
||||
/// CRATE_ROOT has index 0:0, and then new indexes are allocated in
|
||||
/// ascending order.
|
||||
/// 2. At DefIndexAddressSpace::High,
|
||||
/// the first FIRST_FREE_HIGH_DEF_INDEX indexes are reserved for
|
||||
/// internal use, then 1:FIRST_FREE_HIGH_DEF_INDEX are allocated in
|
||||
/// 2. At `DefIndexAddressSpace::High`,
|
||||
/// the first `FIRST_FREE_HIGH_DEF_INDEX` indexes are reserved for
|
||||
/// internal use, then `1:FIRST_FREE_HIGH_DEF_INDEX` are allocated in
|
||||
/// ascending order.
|
||||
///
|
||||
/// FIXME: there is probably a better place to put this comment.
|
||||
//
|
||||
// FIXME: there is probably a better place to put this comment.
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
@ -414,7 +414,7 @@ impl Definitions {
|
|||
&self.table
|
||||
}
|
||||
|
||||
/// Get the number of definitions.
|
||||
/// Gets the number of definitions.
|
||||
pub fn def_index_counts_lo_hi(&self) -> (usize, usize) {
|
||||
(self.table.index_to_key[DefIndexAddressSpace::Low.index()].len(),
|
||||
self.table.index_to_key[DefIndexAddressSpace::High.index()].len())
|
||||
|
@ -497,8 +497,8 @@ impl Definitions {
|
|||
self.node_to_hir_id[node_id]
|
||||
}
|
||||
|
||||
/// Retrieve the span of the given `DefId` if `DefId` is in the local crate, the span exists and
|
||||
/// it's not DUMMY_SP
|
||||
/// Retrieves the span of the given `DefId` if `DefId` is in the local crate, the span exists
|
||||
/// and it's not `DUMMY_SP`.
|
||||
#[inline]
|
||||
pub fn opt_span(&self, def_id: DefId) -> Option<Span> {
|
||||
if def_id.krate == LOCAL_CRATE {
|
||||
|
@ -508,7 +508,7 @@ impl Definitions {
|
|||
}
|
||||
}
|
||||
|
||||
/// Add a definition with a parent definition.
|
||||
/// Adds a root definition (no parent).
|
||||
pub fn create_root_def(&mut self,
|
||||
crate_name: &str,
|
||||
crate_disambiguator: CrateDisambiguator)
|
||||
|
@ -606,7 +606,7 @@ impl Definitions {
|
|||
index
|
||||
}
|
||||
|
||||
/// Initialize the ast::NodeId to HirId mapping once it has been generated during
|
||||
/// Initialize the `ast::NodeId` to `HirId` mapping once it has been generated during
|
||||
/// AST to HIR lowering.
|
||||
pub fn init_node_id_to_hir_id_mapping(&mut self,
|
||||
mapping: IndexVec<ast::NodeId, hir::HirId>) {
|
||||
|
|
|
@ -36,7 +36,7 @@ mod hir_id_validator;
|
|||
pub const ITEM_LIKE_SPACE: DefIndexAddressSpace = DefIndexAddressSpace::Low;
|
||||
pub const REGULAR_SPACE: DefIndexAddressSpace = DefIndexAddressSpace::High;
|
||||
|
||||
/// Represents an entry and its parent NodeId.
|
||||
/// Represents an entry and its parent `NodeId`.
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub struct Entry<'hir> {
|
||||
parent: NodeId,
|
||||
|
@ -162,8 +162,7 @@ impl Forest {
|
|||
}
|
||||
}
|
||||
|
||||
/// Represents a mapping from Node IDs to AST elements and their parent
|
||||
/// Node IDs
|
||||
/// Represents a mapping from `NodeId`s to AST elements and their parent `NodeId`s.
|
||||
#[derive(Clone)]
|
||||
pub struct Map<'hir> {
|
||||
/// The backing storage for all the AST nodes.
|
||||
|
@ -473,7 +472,7 @@ impl<'hir> Map<'hir> {
|
|||
self.local_def_id(self.body_owner(id))
|
||||
}
|
||||
|
||||
/// Given a node id, returns the `BodyId` associated with it,
|
||||
/// Given a `NodeId`, returns the `BodyId` associated with it,
|
||||
/// if the node is a body owner, otherwise returns `None`.
|
||||
pub fn maybe_body_owned_by(&self, id: NodeId) -> Option<BodyId> {
|
||||
if let Some(entry) = self.find_entry(id) {
|
||||
|
@ -558,7 +557,7 @@ impl<'hir> Map<'hir> {
|
|||
self.trait_auto_impl(trait_did).is_some()
|
||||
}
|
||||
|
||||
/// Get the attributes on the krate. This is preferable to
|
||||
/// Gets the attributes on the crate. This is preferable to
|
||||
/// invoking `krate.attrs` because it registers a tighter
|
||||
/// dep-graph access.
|
||||
pub fn krate_attrs(&self) -> &'hir [ast::Attribute] {
|
||||
|
@ -653,8 +652,7 @@ impl<'hir> Map<'hir> {
|
|||
self.get_generics(id).map(|generics| generics.span).filter(|sp| *sp != DUMMY_SP)
|
||||
}
|
||||
|
||||
/// Retrieve the Node corresponding to `id`, returning None if
|
||||
/// cannot be found.
|
||||
/// Retrieves the `Node` corresponding to `id`, returning `None` if cannot be found.
|
||||
pub fn find(&self, id: NodeId) -> Option<Node<'hir>> {
|
||||
let result = self.find_entry(id).and_then(|entry| {
|
||||
if let Node::Crate = entry.node {
|
||||
|
@ -683,8 +681,8 @@ impl<'hir> Map<'hir> {
|
|||
/// returns the enclosing item. Note that this might not be the actual parent
|
||||
/// node in the AST - some kinds of nodes are not in the map and these will
|
||||
/// never appear as the parent_node. So you can always walk the `parent_nodes`
|
||||
/// from a node to the root of the ast (unless you get the same id back here
|
||||
/// that can happen if the id is not in the map itself or is just weird).
|
||||
/// from a node to the root of the ast (unless you get the same ID back here
|
||||
/// that can happen if the ID is not in the map itself or is just weird).
|
||||
pub fn get_parent_node(&self, id: NodeId) -> NodeId {
|
||||
if self.dep_graph.is_fully_enabled() {
|
||||
let hir_id_owner = self.node_to_hir_id(id).owner;
|
||||
|
@ -725,7 +723,7 @@ impl<'hir> Map<'hir> {
|
|||
|
||||
/// If there is some error when walking the parents (e.g., a node does not
|
||||
/// have a parent in the map or a node can't be found), then we return the
|
||||
/// last good node id we found. Note that reaching the crate root (`id == 0`),
|
||||
/// last good `NodeId` we found. Note that reaching the crate root (`id == 0`),
|
||||
/// is not an error, since items in the crate module have the crate root as
|
||||
/// parent.
|
||||
fn walk_parent_nodes<F, F2>(&self,
|
||||
|
@ -761,7 +759,7 @@ impl<'hir> Map<'hir> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Retrieve the `NodeId` for `id`'s enclosing method, unless there's a
|
||||
/// Retrieves the `NodeId` for `id`'s enclosing method, unless there's a
|
||||
/// `while` or `loop` before reaching it, as block tail returns are not
|
||||
/// available in them.
|
||||
///
|
||||
|
@ -809,7 +807,7 @@ impl<'hir> Map<'hir> {
|
|||
self.walk_parent_nodes(id, match_fn, match_non_returning_block).ok()
|
||||
}
|
||||
|
||||
/// Retrieve the `NodeId` for `id`'s parent item, or `id` itself if no
|
||||
/// Retrieves the `NodeId` for `id`'s parent item, or `id` itself if no
|
||||
/// parent item is in this map. The "parent item" is the closest parent node
|
||||
/// in the HIR which is recorded by the map and is an item, either an item
|
||||
/// in a module, trait, or impl.
|
||||
|
@ -1122,7 +1120,7 @@ pub struct NodesMatchingSuffix<'a, 'hir:'a> {
|
|||
}
|
||||
|
||||
impl<'a, 'hir> NodesMatchingSuffix<'a, 'hir> {
|
||||
/// Returns true only if some suffix of the module path for parent
|
||||
/// Returns `true` only if some suffix of the module path for parent
|
||||
/// matches `self.in_which`.
|
||||
///
|
||||
/// In other words: let `[x_0,x_1,...,x_k]` be `self.in_which`;
|
||||
|
|
|
@ -62,14 +62,14 @@ pub mod map;
|
|||
pub mod pat_util;
|
||||
pub mod print;
|
||||
|
||||
/// A HirId uniquely identifies a node in the HIR of the current crate. It is
|
||||
/// composed of the `owner`, which is the DefIndex of the directly enclosing
|
||||
/// hir::Item, hir::TraitItem, or hir::ImplItem (i.e., the closest "item-like"),
|
||||
/// Uniquely identifies a node in the HIR of the current crate. It is
|
||||
/// composed of the `owner`, which is the `DefIndex` of the directly enclosing
|
||||
/// `hir::Item`, `hir::TraitItem`, or `hir::ImplItem` (i.e., the closest "item-like"),
|
||||
/// and the `local_id` which is unique within the given owner.
|
||||
///
|
||||
/// This two-level structure makes for more stable values: One can move an item
|
||||
/// around within the source code, or add or remove stuff before it, without
|
||||
/// the local_id part of the HirId changing, which is a very useful property in
|
||||
/// the `local_id` part of the `HirId` changing, which is a very useful property in
|
||||
/// incremental compilation where we have to persist things through changes to
|
||||
/// the code base.
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
|
@ -130,7 +130,7 @@ mod item_local_id_inner {
|
|||
|
||||
pub use self::item_local_id_inner::ItemLocalId;
|
||||
|
||||
/// The `HirId` corresponding to CRATE_NODE_ID and CRATE_DEF_INDEX
|
||||
/// The `HirId` corresponding to `CRATE_NODE_ID` and `CRATE_DEF_INDEX`.
|
||||
pub const CRATE_HIR_ID: HirId = HirId {
|
||||
owner: CRATE_DEF_INDEX,
|
||||
local_id: ItemLocalId::from_u32_const(0)
|
||||
|
@ -149,8 +149,8 @@ pub struct Lifetime {
|
|||
pub hir_id: HirId,
|
||||
pub span: Span,
|
||||
|
||||
/// Either "'a", referring to a named lifetime definition,
|
||||
/// or "" (aka keywords::Invalid), for elision placeholders.
|
||||
/// Either "`'a`", referring to a named lifetime definition,
|
||||
/// or "``" (i.e., `keywords::Invalid`), for elision placeholders.
|
||||
///
|
||||
/// HIR lowering inserts these placeholders in type paths that
|
||||
/// refer to type definitions needing lifetime parameters,
|
||||
|
@ -163,8 +163,9 @@ pub enum ParamName {
|
|||
/// Some user-given name like `T` or `'x`.
|
||||
Plain(Ident),
|
||||
|
||||
/// Synthetic name generated when user elided a lifetime in an impl header,
|
||||
/// e.g., the lifetimes in cases like these:
|
||||
/// Synthetic name generated when user elided a lifetime in an impl header.
|
||||
///
|
||||
/// E.g., the lifetimes in cases like these:
|
||||
///
|
||||
/// impl Foo for &u32
|
||||
/// impl Foo<'_> for u32
|
||||
|
@ -180,7 +181,7 @@ pub enum ParamName {
|
|||
|
||||
/// Indicates an illegal name was given and an error has been
|
||||
/// repored (so we should squelch other derived errors). Occurs
|
||||
/// when e.g., `'_` is used in the wrong place.
|
||||
/// when, e.g., `'_` is used in the wrong place.
|
||||
Error,
|
||||
}
|
||||
|
||||
|
@ -205,17 +206,17 @@ pub enum LifetimeName {
|
|||
/// User-given names or fresh (synthetic) names.
|
||||
Param(ParamName),
|
||||
|
||||
/// User typed nothing. e.g., the lifetime in `&u32`.
|
||||
/// User wrote nothing (e.g., the lifetime in `&u32`).
|
||||
Implicit,
|
||||
|
||||
/// Indicates an error during lowering (usually `'_` in wrong place)
|
||||
/// that was already reported.
|
||||
Error,
|
||||
|
||||
/// User typed `'_`.
|
||||
/// User wrote specifies `'_`.
|
||||
Underscore,
|
||||
|
||||
/// User wrote `'static`
|
||||
/// User wrote `'static`.
|
||||
Static,
|
||||
}
|
||||
|
||||
|
@ -280,7 +281,7 @@ impl Lifetime {
|
|||
}
|
||||
}
|
||||
|
||||
/// A "Path" is essentially Rust's notion of a name; for instance:
|
||||
/// A `Path` is essentially Rust's notion of a name; for instance,
|
||||
/// `std::cmp::PartialEq`. It's represented as a sequence of identifiers,
|
||||
/// along with a bunch of supporting information.
|
||||
#[derive(Clone, RustcEncodable, RustcDecodable)]
|
||||
|
@ -340,7 +341,7 @@ pub struct PathSegment {
|
|||
}
|
||||
|
||||
impl PathSegment {
|
||||
/// Convert an identifier to the corresponding segment.
|
||||
/// Converts an identifier to the corresponding segment.
|
||||
pub fn from_ident(ident: Ident) -> PathSegment {
|
||||
PathSegment {
|
||||
ident,
|
||||
|
@ -597,14 +598,14 @@ impl Generics {
|
|||
}
|
||||
}
|
||||
|
||||
/// Synthetic Type Parameters are converted to an other form during lowering, this allows
|
||||
/// to track the original form they had. Useful for error messages.
|
||||
/// Synthetic type parameters are converted to another form during lowering; this allows
|
||||
/// us to track the original form they had, and is useful for error messages.
|
||||
#[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
|
||||
pub enum SyntheticTyParamKind {
|
||||
ImplTrait
|
||||
}
|
||||
|
||||
/// A `where` clause in a definition
|
||||
/// A where-clause in a definition.
|
||||
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
|
||||
pub struct WhereClause {
|
||||
pub id: NodeId,
|
||||
|
@ -624,7 +625,7 @@ impl WhereClause {
|
|||
}
|
||||
}
|
||||
|
||||
/// A single predicate in a `where` clause
|
||||
/// A single predicate in a where-clause.
|
||||
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
|
||||
pub enum WherePredicate {
|
||||
/// A type binding (e.g., `for<'c> Foo: Send + Clone + 'c`).
|
||||
|
@ -645,19 +646,19 @@ impl WherePredicate {
|
|||
}
|
||||
}
|
||||
|
||||
/// A type bound, eg `for<'c> Foo: Send+Clone+'c`
|
||||
/// A type bound (e.g., `for<'c> Foo: Send + Clone + 'c`).
|
||||
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
|
||||
pub struct WhereBoundPredicate {
|
||||
pub span: Span,
|
||||
/// Any generics from a `for` binding
|
||||
/// Any generics from a `for` binding.
|
||||
pub bound_generic_params: HirVec<GenericParam>,
|
||||
/// The type being bounded
|
||||
/// The type being bounded.
|
||||
pub bounded_ty: P<Ty>,
|
||||
/// Trait and lifetime bounds (`Clone+Send+'static`)
|
||||
/// Trait and lifetime bounds (e.g., `Clone + Send + 'static`).
|
||||
pub bounds: GenericBounds,
|
||||
}
|
||||
|
||||
/// A lifetime predicate, e.g., `'a: 'b+'c`
|
||||
/// A lifetime predicate (e.g., `'a: 'b + 'c`).
|
||||
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
|
||||
pub struct WhereRegionPredicate {
|
||||
pub span: Span,
|
||||
|
@ -665,7 +666,7 @@ pub struct WhereRegionPredicate {
|
|||
pub bounds: GenericBounds,
|
||||
}
|
||||
|
||||
/// An equality predicate (unsupported), e.g., `T=int`
|
||||
/// An equality predicate (e.g., `T = int`); currently unsupported.
|
||||
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
|
||||
pub struct WhereEqPredicate {
|
||||
pub id: NodeId,
|
||||
|
@ -759,7 +760,7 @@ impl Crate {
|
|||
}
|
||||
}
|
||||
|
||||
/// A parallel version of visit_all_item_likes
|
||||
/// A parallel version of `visit_all_item_likes`.
|
||||
pub fn par_visit_all_item_likes<'hir, V>(&'hir self, visitor: &V)
|
||||
where V: itemlikevisit::ParItemLikeVisitor<'hir> + Sync + Send
|
||||
{
|
||||
|
@ -800,14 +801,14 @@ pub struct MacroDef {
|
|||
|
||||
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
|
||||
pub struct Block {
|
||||
/// Statements in a block
|
||||
/// Statements in a block.
|
||||
pub stmts: HirVec<Stmt>,
|
||||
/// An expression at the end of the block
|
||||
/// without a semicolon, if any
|
||||
/// without a semicolon, if any.
|
||||
pub expr: Option<P<Expr>>,
|
||||
pub id: NodeId,
|
||||
pub hir_id: HirId,
|
||||
/// Distinguishes between `unsafe { ... }` and `{ ... }`
|
||||
/// Distinguishes between `unsafe { ... }` and `{ ... }`.
|
||||
pub rules: BlockCheckMode,
|
||||
pub span: Span,
|
||||
/// If true, then there may exist `break 'a` values that aim to
|
||||
|
@ -874,18 +875,18 @@ impl Pat {
|
|||
}
|
||||
}
|
||||
|
||||
/// A single field in a struct pattern
|
||||
/// A single field in a struct pattern.
|
||||
///
|
||||
/// Patterns like the fields of Foo `{ x, ref y, ref mut z }`
|
||||
/// are treated the same as` x: x, y: ref y, z: ref mut z`,
|
||||
/// except is_shorthand is true
|
||||
/// except `is_shorthand` is true.
|
||||
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
|
||||
pub struct FieldPat {
|
||||
pub id: NodeId,
|
||||
pub hir_id: HirId,
|
||||
/// The identifier for the field
|
||||
/// The identifier for the field.
|
||||
pub ident: Ident,
|
||||
/// The pattern the field is destructured to
|
||||
/// The pattern the field is destructured to.
|
||||
pub pat: P<Pat>,
|
||||
pub is_shorthand: bool,
|
||||
}
|
||||
|
@ -922,41 +923,41 @@ pub enum RangeEnd {
|
|||
|
||||
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
|
||||
pub enum PatKind {
|
||||
/// Represents a wildcard pattern (`_`)
|
||||
/// Represents a wildcard pattern (i.e., `_`).
|
||||
Wild,
|
||||
|
||||
/// A fresh binding `ref mut binding @ OPT_SUBPATTERN`.
|
||||
/// The `NodeId` is the canonical ID for the variable being bound,
|
||||
/// e.g., in `Ok(x) | Err(x)`, both `x` use the same canonical ID,
|
||||
/// (e.g., in `Ok(x) | Err(x)`, both `x` use the same canonical ID),
|
||||
/// which is the pattern ID of the first `x`.
|
||||
Binding(BindingAnnotation, NodeId, HirId, Ident, Option<P<Pat>>),
|
||||
|
||||
/// A struct or struct variant pattern, e.g., `Variant {x, y, ..}`.
|
||||
/// A struct or struct variant pattern (e.g., `Variant {x, y, ..}`).
|
||||
/// The `bool` is `true` in the presence of a `..`.
|
||||
Struct(QPath, HirVec<Spanned<FieldPat>>, bool),
|
||||
|
||||
/// A tuple struct/variant pattern `Variant(x, y, .., z)`.
|
||||
/// If the `..` pattern fragment is present, then `Option<usize>` denotes its position.
|
||||
/// 0 <= position <= subpats.len()
|
||||
/// `0 <= position <= subpats.len()`
|
||||
TupleStruct(QPath, HirVec<P<Pat>>, Option<usize>),
|
||||
|
||||
/// A path pattern for an unit struct/variant or a (maybe-associated) constant.
|
||||
Path(QPath),
|
||||
|
||||
/// A tuple pattern `(a, b)`.
|
||||
/// A tuple pattern (e.g., `(a, b)`).
|
||||
/// If the `..` pattern fragment is present, then `Option<usize>` denotes its position.
|
||||
/// 0 <= position <= subpats.len()
|
||||
/// `0 <= position <= subpats.len()`
|
||||
Tuple(HirVec<P<Pat>>, Option<usize>),
|
||||
/// A `box` pattern
|
||||
/// A `box` pattern.
|
||||
Box(P<Pat>),
|
||||
/// A reference pattern, e.g., `&mut (a, b)`
|
||||
/// A reference pattern (e.g., `&mut (a, b)`).
|
||||
Ref(P<Pat>, Mutability),
|
||||
/// A literal
|
||||
/// A literal.
|
||||
Lit(P<Expr>),
|
||||
/// A range pattern, e.g., `1...2` or `1..2`
|
||||
/// A range pattern (e.g., `1...2` or `1..2`).
|
||||
Range(P<Expr>, P<Expr>, RangeEnd),
|
||||
/// `[a, b, ..i, y, z]` is represented as:
|
||||
/// `PatKind::Slice(box [a, b], Some(i), box [y, z])`
|
||||
/// `PatKind::Slice(box [a, b], Some(i), box [y, z])`.
|
||||
Slice(HirVec<P<Pat>>, Option<P<Pat>>, HirVec<P<Pat>>),
|
||||
}
|
||||
|
||||
|
@ -967,7 +968,7 @@ pub enum Mutability {
|
|||
}
|
||||
|
||||
impl Mutability {
|
||||
/// Return MutMutable only if both arguments are mutable.
|
||||
/// Returns `MutMutable` only if both arguments are mutable.
|
||||
pub fn and(self, other: Self) -> Self {
|
||||
match self {
|
||||
MutMutable => other,
|
||||
|
@ -978,41 +979,41 @@ impl Mutability {
|
|||
|
||||
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, Copy, Hash)]
|
||||
pub enum BinOpKind {
|
||||
/// The `+` operator (addition)
|
||||
/// The `+` operator (addition).
|
||||
Add,
|
||||
/// The `-` operator (subtraction)
|
||||
/// The `-` operator (subtraction).
|
||||
Sub,
|
||||
/// The `*` operator (multiplication)
|
||||
/// The `*` operator (multiplication).
|
||||
Mul,
|
||||
/// The `/` operator (division)
|
||||
/// The `/` operator (division).
|
||||
Div,
|
||||
/// The `%` operator (modulus)
|
||||
/// The `%` operator (modulus).
|
||||
Rem,
|
||||
/// The `&&` operator (logical and)
|
||||
/// The `&&` operator (logical and).
|
||||
And,
|
||||
/// The `||` operator (logical or)
|
||||
/// The `||` operator (logical or).
|
||||
Or,
|
||||
/// The `^` operator (bitwise xor)
|
||||
/// The `^` operator (bitwise xor).
|
||||
BitXor,
|
||||
/// The `&` operator (bitwise and)
|
||||
/// The `&` operator (bitwise and).
|
||||
BitAnd,
|
||||
/// The `|` operator (bitwise or)
|
||||
/// The `|` operator (bitwise or).
|
||||
BitOr,
|
||||
/// The `<<` operator (shift left)
|
||||
/// The `<<` operator (shift left).
|
||||
Shl,
|
||||
/// The `>>` operator (shift right)
|
||||
/// The `>>` operator (shift right).
|
||||
Shr,
|
||||
/// The `==` operator (equality)
|
||||
/// The `==` operator (equality).
|
||||
Eq,
|
||||
/// The `<` operator (less than)
|
||||
/// The `<` operator (less than).
|
||||
Lt,
|
||||
/// The `<=` operator (less than or equal to)
|
||||
/// The `<=` operator (less than or equal to).
|
||||
Le,
|
||||
/// The `!=` operator (not equal to)
|
||||
/// The `!=` operator (not equal to).
|
||||
Ne,
|
||||
/// The `>=` operator (greater than or equal to)
|
||||
/// The `>=` operator (greater than or equal to).
|
||||
Ge,
|
||||
/// The `>` operator (greater than)
|
||||
/// The `>` operator (greater than).
|
||||
Gt,
|
||||
}
|
||||
|
||||
|
@ -1077,7 +1078,7 @@ impl BinOpKind {
|
|||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if the binary operator takes its arguments by value
|
||||
/// Returns `true` if the binary operator takes its arguments by value.
|
||||
pub fn is_by_value(self) -> bool {
|
||||
!self.is_comparison()
|
||||
}
|
||||
|
@ -1112,11 +1113,11 @@ pub type BinOp = Spanned<BinOpKind>;
|
|||
|
||||
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, Copy, Hash)]
|
||||
pub enum UnOp {
|
||||
/// The `*` operator for dereferencing
|
||||
/// The `*` operator (deferencing).
|
||||
UnDeref,
|
||||
/// The `!` operator for logical inversion
|
||||
/// The `!` operator (logical negation).
|
||||
UnNot,
|
||||
/// The `-` operator for negation
|
||||
/// The `-` operator (negation).
|
||||
UnNeg,
|
||||
}
|
||||
|
||||
|
@ -1129,7 +1130,7 @@ impl UnOp {
|
|||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if the unary operator takes its argument by value
|
||||
/// Returns `true` if the unary operator takes its argument by value.
|
||||
pub fn is_by_value(self) -> bool {
|
||||
match self {
|
||||
UnNeg | UnNot => true,
|
||||
|
@ -1138,7 +1139,7 @@ impl UnOp {
|
|||
}
|
||||
}
|
||||
|
||||
/// A statement
|
||||
/// A statement.
|
||||
#[derive(Clone, RustcEncodable, RustcDecodable)]
|
||||
pub struct Stmt {
|
||||
pub id: NodeId,
|
||||
|
@ -1156,15 +1157,15 @@ impl fmt::Debug for Stmt {
|
|||
|
||||
#[derive(Clone, RustcEncodable, RustcDecodable)]
|
||||
pub enum StmtKind {
|
||||
/// A local (let) binding:
|
||||
/// A local (`let`) binding.
|
||||
Local(P<Local>),
|
||||
/// An item binding:
|
||||
/// An item binding.
|
||||
Item(P<ItemId>),
|
||||
|
||||
/// Expr without trailing semi-colon (must have unit type):
|
||||
/// An expression without a trailing semi-colon (must have unit type).
|
||||
Expr(P<Expr>),
|
||||
|
||||
/// Expr with trailing semi-colon (may have any type):
|
||||
/// An expression with a trailing semi-colon (may have any type).
|
||||
Semi(P<Expr>),
|
||||
}
|
||||
|
||||
|
@ -1179,12 +1180,12 @@ impl StmtKind {
|
|||
}
|
||||
}
|
||||
|
||||
/// Local represents a `let` statement, e.g., `let <pat>:<ty> = <expr>;`
|
||||
/// Represents a `let` statement (i.e., `let <pat>:<ty> = <expr>;`).
|
||||
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
|
||||
pub struct Local {
|
||||
pub pat: P<Pat>,
|
||||
pub ty: Option<P<Ty>>,
|
||||
/// Initializer expression to set the value, if any
|
||||
/// Initializer expression to set the value, if any.
|
||||
pub init: Option<P<Expr>>,
|
||||
pub id: NodeId,
|
||||
pub hir_id: HirId,
|
||||
|
@ -1193,7 +1194,7 @@ pub struct Local {
|
|||
pub source: LocalSource,
|
||||
}
|
||||
|
||||
/// represents one arm of a 'match'
|
||||
/// Represents a single arm of a `match` expression.
|
||||
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
|
||||
pub struct Arm {
|
||||
pub attrs: HirVec<Attribute>,
|
||||
|
@ -1419,16 +1420,16 @@ impl fmt::Debug for Expr {
|
|||
pub enum ExprKind {
|
||||
/// A `box x` expression.
|
||||
Box(P<Expr>),
|
||||
/// An array (`[a, b, c, d]`)
|
||||
/// An array (e.g., `[a, b, c, d]`).
|
||||
Array(HirVec<Expr>),
|
||||
/// A function call
|
||||
/// A function call.
|
||||
///
|
||||
/// The first field resolves to the function itself (usually an `ExprKind::Path`),
|
||||
/// and the second field is the list of arguments.
|
||||
/// This also represents calling the constructor of
|
||||
/// tuple-like ADTs such as tuple structs and enum variants.
|
||||
Call(P<Expr>, HirVec<Expr>),
|
||||
/// A method call (`x.foo::<'static, Bar, Baz>(a, b, c, d)`)
|
||||
/// A method call (e.g., `x.foo::<'static, Bar, Baz>(a, b, c, d)`).
|
||||
///
|
||||
/// The `PathSegment`/`Span` represent the method name and its generic arguments
|
||||
/// (within the angle brackets).
|
||||
|
@ -1438,63 +1439,64 @@ pub enum ExprKind {
|
|||
/// Thus, `x.foo::<Bar, Baz>(a, b, c, d)` is represented as
|
||||
/// `ExprKind::MethodCall(PathSegment { foo, [Bar, Baz] }, [x, a, b, c, d])`.
|
||||
MethodCall(PathSegment, Span, HirVec<Expr>),
|
||||
/// A tuple (`(a, b, c ,d)`)
|
||||
/// A tuple (e.g., `(a, b, c ,d)`).
|
||||
Tup(HirVec<Expr>),
|
||||
/// A binary operation (For example: `a + b`, `a * b`)
|
||||
/// A binary operation (e.g., `a + b`, `a * b`).
|
||||
Binary(BinOp, P<Expr>, P<Expr>),
|
||||
/// A unary operation (For example: `!x`, `*x`)
|
||||
/// A unary operation (e.g., `!x`, `*x`).
|
||||
Unary(UnOp, P<Expr>),
|
||||
/// A literal (For example: `1`, `"foo"`)
|
||||
/// A literal (e.g., `1`, `"foo"`).
|
||||
Lit(Lit),
|
||||
/// A cast (`foo as f64`)
|
||||
/// A cast (e.g., `foo as f64`).
|
||||
Cast(P<Expr>, P<Ty>),
|
||||
/// A type reference (e.g., `Foo`).
|
||||
Type(P<Expr>, P<Ty>),
|
||||
/// An `if` block, with an optional else block
|
||||
/// An `if` block, with an optional else block.
|
||||
///
|
||||
/// `if expr { expr } else { expr }`
|
||||
/// I.e., `if <expr> { <expr> } else { <expr> }`.
|
||||
If(P<Expr>, P<Expr>, Option<P<Expr>>),
|
||||
/// A while loop, with an optional label
|
||||
///
|
||||
/// `'label: while expr { block }`
|
||||
/// I.e., `'label: while expr { <block> }`.
|
||||
While(P<Expr>, P<Block>, Option<Label>),
|
||||
/// Conditionless loop (can be exited with break, continue, or return)
|
||||
/// A conditionless loop (can be exited with `break`, `continue`, or `return`).
|
||||
///
|
||||
/// `'label: loop { block }`
|
||||
/// I.e., `'label: loop { <block> }`.
|
||||
Loop(P<Block>, Option<Label>, LoopSource),
|
||||
/// A `match` block, with a source that indicates whether or not it is
|
||||
/// the result of a desugaring, and if so, which kind.
|
||||
Match(P<Expr>, HirVec<Arm>, MatchSource),
|
||||
/// A closure (for example, `move |a, b, c| {a + b + c}`).
|
||||
/// A closure (e.g., `move |a, b, c| {a + b + c}`).
|
||||
///
|
||||
/// The final span is the span of the argument block `|...|`
|
||||
/// The final span is the span of the argument block `|...|`.
|
||||
///
|
||||
/// This may also be a generator literal, indicated by the final boolean,
|
||||
/// in that case there is an GeneratorClause.
|
||||
/// in that case there is an `GeneratorClause`.
|
||||
Closure(CaptureClause, P<FnDecl>, BodyId, Span, Option<GeneratorMovability>),
|
||||
/// A block (`'label: { ... }`)
|
||||
/// A block (e.g., `'label: { ... }`).
|
||||
Block(P<Block>, Option<Label>),
|
||||
|
||||
/// An assignment (`a = foo()`)
|
||||
/// An assignment (e.g., `a = foo()`).
|
||||
Assign(P<Expr>, P<Expr>),
|
||||
/// An assignment with an operator
|
||||
/// An assignment with an operator.
|
||||
///
|
||||
/// For example, `a += 1`.
|
||||
/// E.g., `a += 1`.
|
||||
AssignOp(BinOp, P<Expr>, P<Expr>),
|
||||
/// Access of a named (`obj.foo`) or unnamed (`obj.0`) struct or tuple field
|
||||
/// Access of a named (e.g., `obj.foo`) or unnamed (e.g., `obj.0`) struct or tuple field.
|
||||
Field(P<Expr>, Ident),
|
||||
/// An indexing operation (`foo[2]`)
|
||||
/// An indexing operation (`foo[2]`).
|
||||
Index(P<Expr>, P<Expr>),
|
||||
|
||||
/// Path to a definition, possibly containing lifetime or type parameters.
|
||||
Path(QPath),
|
||||
|
||||
/// A referencing operation (`&a` or `&mut a`)
|
||||
/// A referencing operation (i.e., `&a` or `&mut a`).
|
||||
AddrOf(Mutability, P<Expr>),
|
||||
/// A `break`, with an optional label to break
|
||||
/// A `break`, with an optional label to break.
|
||||
Break(Destination, Option<P<Expr>>),
|
||||
/// A `continue`, with an optional label
|
||||
/// A `continue`, with an optional label.
|
||||
Continue(Destination),
|
||||
/// A `return`, with an optional value to be returned
|
||||
/// A `return`, with an optional value to be returned.
|
||||
Ret(Option<P<Expr>>),
|
||||
|
||||
/// Inline assembly (from `asm!`), with its outputs and inputs.
|
||||
|
@ -1512,10 +1514,10 @@ pub enum ExprKind {
|
|||
/// to be repeated; the second is the number of times to repeat it.
|
||||
Repeat(P<Expr>, AnonConst),
|
||||
|
||||
/// A suspension point for generators. This is `yield <expr>` in Rust.
|
||||
/// A suspension point for generators (i.e., `yield <expr>`).
|
||||
Yield(P<Expr>),
|
||||
|
||||
/// Placeholder for an expression that wasn't syntactically well formed in some way.
|
||||
/// A placeholder for an expression that wasn't syntactically well formed in some way.
|
||||
Err,
|
||||
}
|
||||
|
||||
|
@ -1525,12 +1527,12 @@ pub enum QPath {
|
|||
/// Path to a definition, optionally "fully-qualified" with a `Self`
|
||||
/// type, if the path points to an associated item in a trait.
|
||||
///
|
||||
/// e.g., an unqualified path like `Clone::clone` has `None` for `Self`,
|
||||
/// E.g., an unqualified path like `Clone::clone` has `None` for `Self`,
|
||||
/// while `<Vec<T> as Clone>::clone` has `Some(Vec<T>)` for `Self`,
|
||||
/// even though they both have the same two-segment `Clone::clone` `Path`.
|
||||
Resolved(Option<P<Ty>>, P<Path>),
|
||||
|
||||
/// Type-related paths, e.g., `<T>::default` or `<T>::Output`.
|
||||
/// Type-related paths (e.g., `<T>::default` or `<T>::Output`).
|
||||
/// Will be resolved by type-checking to an associated item.
|
||||
///
|
||||
/// UFCS source paths can desugar into this, with `Vec::new` turning into
|
||||
|
@ -1539,41 +1541,41 @@ pub enum QPath {
|
|||
TypeRelative(P<Ty>, P<PathSegment>)
|
||||
}
|
||||
|
||||
/// Hints at the original code for a let statement
|
||||
/// Hints at the original code for a let statement.
|
||||
#[derive(Clone, RustcEncodable, RustcDecodable, Debug, Copy)]
|
||||
pub enum LocalSource {
|
||||
/// A `match _ { .. }`
|
||||
/// A `match _ { .. }`.
|
||||
Normal,
|
||||
/// A desugared `for _ in _ { .. }` loop
|
||||
/// A desugared `for _ in _ { .. }` loop.
|
||||
ForLoopDesugar,
|
||||
}
|
||||
|
||||
/// Hints at the original code for a `match _ { .. }`
|
||||
/// Hints at the original code for a `match _ { .. }`.
|
||||
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
|
||||
pub enum MatchSource {
|
||||
/// A `match _ { .. }`
|
||||
/// A `match _ { .. }`.
|
||||
Normal,
|
||||
/// An `if let _ = _ { .. }` (optionally with `else { .. }`)
|
||||
/// An `if let _ = _ { .. }` (optionally with `else { .. }`).
|
||||
IfLetDesugar {
|
||||
contains_else_clause: bool,
|
||||
},
|
||||
/// A `while let _ = _ { .. }` (which was desugared to a
|
||||
/// `loop { match _ { .. } }`)
|
||||
/// `loop { match _ { .. } }`).
|
||||
WhileLetDesugar,
|
||||
/// A desugared `for _ in _ { .. }` loop
|
||||
/// A desugared `for _ in _ { .. }` loop.
|
||||
ForLoopDesugar,
|
||||
/// A desugared `?` operator
|
||||
/// A desugared `?` operator.
|
||||
TryDesugar,
|
||||
}
|
||||
|
||||
/// The loop type that yielded an ExprKind::Loop
|
||||
/// The loop type that yielded an `ExprKind::Loop`.
|
||||
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, Copy)]
|
||||
pub enum LoopSource {
|
||||
/// A `loop { .. }` loop
|
||||
/// A `loop { .. }` loop.
|
||||
Loop,
|
||||
/// A `while let _ = _ { .. }` loop
|
||||
/// A `while let _ = _ { .. }` loop.
|
||||
WhileLet,
|
||||
/// A `for _ in _ { .. }` loop
|
||||
/// A `for _ in _ { .. }` loop.
|
||||
ForLoop,
|
||||
}
|
||||
|
||||
|
@ -1739,7 +1741,7 @@ impl fmt::Debug for Ty {
|
|||
}
|
||||
}
|
||||
|
||||
/// Not represented directly in the AST, referred to by name through a ty_path.
|
||||
/// Not represented directly in the AST; referred to by name through a `ty_path`.
|
||||
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
|
||||
pub enum PrimTy {
|
||||
Int(IntTy),
|
||||
|
@ -1766,38 +1768,38 @@ pub struct ExistTy {
|
|||
pub impl_trait_fn: Option<DefId>,
|
||||
}
|
||||
|
||||
/// The various kinds of types recognized by the compiler.
|
||||
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
|
||||
/// The different kinds of types recognized by the compiler
|
||||
pub enum TyKind {
|
||||
/// A variable length slice (`[T]`)
|
||||
/// A variable length slice (i.e., `[T]`).
|
||||
Slice(P<Ty>),
|
||||
/// A fixed length array (`[T; n]`)
|
||||
/// A fixed length array (i.e., `[T; n]`).
|
||||
Array(P<Ty>, AnonConst),
|
||||
/// A raw pointer (`*const T` or `*mut T`)
|
||||
/// A raw pointer (i.e., `*const T` or `*mut T`).
|
||||
Ptr(MutTy),
|
||||
/// A reference (`&'a T` or `&'a mut T`)
|
||||
/// A reference (i.e., `&'a T` or `&'a mut T`).
|
||||
Rptr(Lifetime, MutTy),
|
||||
/// A bare function (e.g., `fn(usize) -> bool`)
|
||||
/// A bare function (e.g., `fn(usize) -> bool`).
|
||||
BareFn(P<BareFnTy>),
|
||||
/// The never type (`!`)
|
||||
/// The never type (`!`).
|
||||
Never,
|
||||
/// A tuple (`(A, B, C, D,...)`)
|
||||
/// A tuple (`(A, B, C, D,...)`).
|
||||
Tup(HirVec<Ty>),
|
||||
/// A path to a type definition (`module::module::...::Type`), or an
|
||||
/// associated type, e.g., `<Vec<T> as Trait>::Type` or `<T>::Target`.
|
||||
/// associated type (e.g., `<Vec<T> as Trait>::Type` or `<T>::Target`).
|
||||
///
|
||||
/// Type parameters may be stored in each `PathSegment`.
|
||||
Path(QPath),
|
||||
/// A type definition itself. This is currently only used for the `existential type`
|
||||
/// item that `impl Trait` in return position desugars to.
|
||||
///
|
||||
/// The generic arg list are the lifetimes (and in the future possibly parameters) that are
|
||||
/// actually bound on the `impl Trait`.
|
||||
/// The generic argument list contains the lifetimes (and in the future possibly parameters)
|
||||
/// that are actually bound on the `impl Trait`.
|
||||
Def(ItemId, HirVec<GenericArg>),
|
||||
/// A trait object type `Bound1 + Bound2 + Bound3`
|
||||
/// where `Bound` is a trait or a lifetime.
|
||||
TraitObject(HirVec<PolyTraitRef>, Lifetime),
|
||||
/// Unused for now
|
||||
/// Unused for now.
|
||||
Typeof(AnonConst),
|
||||
/// `TyKind::Infer` means the type should be inferred instead of it having been
|
||||
/// specified. This can appear anywhere in a type.
|
||||
|
@ -1827,7 +1829,7 @@ pub struct InlineAsm {
|
|||
pub ctxt: SyntaxContext,
|
||||
}
|
||||
|
||||
/// represents an argument in a function header
|
||||
/// Represents an argument in a function header.
|
||||
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
|
||||
pub struct Arg {
|
||||
pub pat: P<Pat>,
|
||||
|
@ -1835,7 +1837,7 @@ pub struct Arg {
|
|||
pub hir_id: HirId,
|
||||
}
|
||||
|
||||
/// Represents the header (not the body) of a function declaration
|
||||
/// Represents the header (not the body) of a function declaration.
|
||||
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
|
||||
pub struct FnDecl {
|
||||
pub inputs: HirVec<Ty>,
|
||||
|
@ -1958,7 +1960,7 @@ pub enum FunctionRetTy {
|
|||
/// closures default to inference. Span points to where return
|
||||
/// type would be inserted.
|
||||
DefaultReturn(Span),
|
||||
/// Everything else
|
||||
/// Everything else.
|
||||
Return(P<Ty>),
|
||||
}
|
||||
|
||||
|
@ -2011,7 +2013,7 @@ pub struct VariantKind {
|
|||
pub ident: Ident,
|
||||
pub attrs: HirVec<Attribute>,
|
||||
pub data: VariantData,
|
||||
/// Explicit discriminant, e.g., `Foo = 1`
|
||||
/// Explicit discriminant (e.g., `Foo = 1`).
|
||||
pub disr_expr: Option<AnonConst>,
|
||||
}
|
||||
|
||||
|
@ -2047,7 +2049,7 @@ pub struct TraitRef {
|
|||
}
|
||||
|
||||
impl TraitRef {
|
||||
/// Get the `DefId` of the referenced trait. It _must_ actually be a trait or trait alias.
|
||||
/// Gets the `DefId` of the referenced trait. It _must_ actually be a trait or trait alias.
|
||||
pub fn trait_def_id(&self) -> DefId {
|
||||
match self.path.def {
|
||||
Def::Trait(did) => did,
|
||||
|
@ -2062,10 +2064,10 @@ impl TraitRef {
|
|||
|
||||
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
|
||||
pub struct PolyTraitRef {
|
||||
/// The `'a` in `<'a> Foo<&'a T>`
|
||||
/// The `'a` in `<'a> Foo<&'a T>`.
|
||||
pub bound_generic_params: HirVec<GenericParam>,
|
||||
|
||||
/// The `Foo<&'a T>` in `<'a> Foo<&'a T>`
|
||||
/// The `Foo<&'a T>` in `<'a> Foo<&'a T>`.
|
||||
pub trait_ref: TraitRef,
|
||||
|
||||
pub span: Span,
|
||||
|
@ -2223,7 +2225,7 @@ pub struct FnHeader {
|
|||
pub enum ItemKind {
|
||||
/// An `extern crate` item, with optional *original* crate name if the crate was renamed.
|
||||
///
|
||||
/// e.g., `extern crate foo` or `extern crate foo_bar as foo`
|
||||
/// E.g., `extern crate foo` or `extern crate foo_bar as foo`.
|
||||
ExternCrate(Option<Name>),
|
||||
|
||||
/// `use foo::bar::*;` or `use foo::bar::baz as quux;`
|
||||
|
@ -2320,7 +2322,7 @@ impl ItemKind {
|
|||
/// contains the item's id, naturally, but also the item's name and
|
||||
/// some other high-level details (like whether it is an associated
|
||||
/// type or method, and whether it is public). This allows other
|
||||
/// passes to find the impl they want without loading the id (which
|
||||
/// passes to find the impl they want without loading the ID (which
|
||||
/// means fewer edges in the incremental compilation graph).
|
||||
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
|
||||
pub struct TraitItemRef {
|
||||
|
@ -2332,10 +2334,10 @@ pub struct TraitItemRef {
|
|||
}
|
||||
|
||||
/// A reference from an impl to one of its associated items. This
|
||||
/// contains the item's id, naturally, but also the item's name and
|
||||
/// contains the item's ID, naturally, but also the item's name and
|
||||
/// some other high-level details (like whether it is an associated
|
||||
/// type or method, and whether it is public). This allows other
|
||||
/// passes to find the impl they want without loading the id (which
|
||||
/// passes to find the impl they want without loading the ID (which
|
||||
/// means fewer edges in the incremental compilation graph).
|
||||
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
|
||||
pub struct ImplItemRef {
|
||||
|
@ -2366,15 +2368,15 @@ pub struct ForeignItem {
|
|||
pub vis: Visibility,
|
||||
}
|
||||
|
||||
/// An item within an `extern` block
|
||||
/// An item within an `extern` block.
|
||||
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
|
||||
pub enum ForeignItemKind {
|
||||
/// A foreign function
|
||||
/// A foreign function.
|
||||
Fn(P<FnDecl>, HirVec<Ident>, Generics),
|
||||
/// A foreign static item (`static ext: u8`), with optional mutability
|
||||
/// (the boolean is true when mutable)
|
||||
/// (the boolean is true when mutable).
|
||||
Static(P<Ty>, bool),
|
||||
/// A foreign type
|
||||
/// A foreign type.
|
||||
Type,
|
||||
}
|
||||
|
||||
|
@ -2458,36 +2460,37 @@ pub struct CodegenFnAttrs {
|
|||
bitflags! {
|
||||
#[derive(RustcEncodable, RustcDecodable)]
|
||||
pub struct CodegenFnAttrFlags: u32 {
|
||||
/// #[cold], a hint to LLVM that this function, when called, is never on
|
||||
/// the hot path
|
||||
/// `#[cold]`: a hint to LLVM that this function, when called, is never on
|
||||
/// the hot path.
|
||||
const COLD = 1 << 0;
|
||||
/// #[allocator], a hint to LLVM that the pointer returned from this
|
||||
/// function is never null
|
||||
/// `#[allocator]`: a hint to LLVM that the pointer returned from this
|
||||
/// function is never null.
|
||||
const ALLOCATOR = 1 << 1;
|
||||
/// #[unwind], an indicator that this function may unwind despite what
|
||||
/// its ABI signature may otherwise imply
|
||||
/// `#[unwind]`: an indicator that this function may unwind despite what
|
||||
/// its ABI signature may otherwise imply.
|
||||
const UNWIND = 1 << 2;
|
||||
/// #[rust_allocator_nounwind], an indicator that an imported FFI
|
||||
/// `#[rust_allocator_nounwind]`, an indicator that an imported FFI
|
||||
/// function will never unwind. Probably obsolete by recent changes with
|
||||
/// #[unwind], but hasn't been removed/migrated yet
|
||||
const RUSTC_ALLOCATOR_NOUNWIND = 1 << 3;
|
||||
/// #[naked], indicates to LLVM that no function prologue/epilogue
|
||||
/// should be generated
|
||||
/// `#[naked]`: an indicator to LLVM that no function prologue/epilogue
|
||||
/// should be generated.
|
||||
const NAKED = 1 << 4;
|
||||
/// #[no_mangle], the function's name should be the same as its symbol
|
||||
/// `#[no_mangle]`: an indicator that the function's name should be the same
|
||||
/// as its symbol.
|
||||
const NO_MANGLE = 1 << 5;
|
||||
/// #[rustc_std_internal_symbol], and indicator that this symbol is a
|
||||
/// `#[rustc_std_internal_symbol]`: an indicator that this symbol is a
|
||||
/// "weird symbol" for the standard library in that it has slightly
|
||||
/// different linkage, visibility, and reachability rules.
|
||||
const RUSTC_STD_INTERNAL_SYMBOL = 1 << 6;
|
||||
/// #[no_debug], indicates that no debugging information should be
|
||||
/// generated for this function by LLVM
|
||||
/// `#[no_debug]`: an indicator that no debugging information should be
|
||||
/// generated for this function by LLVM.
|
||||
const NO_DEBUG = 1 << 7;
|
||||
/// #[thread_local], indicates a static is actually a thread local
|
||||
/// `#[thread_local]`: indicates a static is actually a thread local
|
||||
/// piece of memory
|
||||
const THREAD_LOCAL = 1 << 8;
|
||||
/// #[used], indicates that LLVM can't eliminate this function (but the
|
||||
/// linker can!)
|
||||
/// `#[used]`: indicates that LLVM can't eliminate this function (but the
|
||||
/// linker can!).
|
||||
const USED = 1 << 9;
|
||||
}
|
||||
}
|
||||
|
@ -2506,7 +2509,7 @@ impl CodegenFnAttrs {
|
|||
}
|
||||
}
|
||||
|
||||
/// True if `#[inline]` or `#[inline(always)]` is present.
|
||||
/// Returns `true` if `#[inline]` or `#[inline(always)]` is present.
|
||||
pub fn requests_inline(&self) -> bool {
|
||||
match self.inline {
|
||||
InlineAttr::Hint | InlineAttr::Always => true,
|
||||
|
|
|
@ -129,7 +129,7 @@ impl hir::Pat {
|
|||
}
|
||||
}
|
||||
|
||||
/// Return variants that are necessary to exist for the pattern to match.
|
||||
/// Returns variants that are necessary to exist for the pattern to match.
|
||||
pub fn necessary_variants(&self) -> Vec<DefId> {
|
||||
let mut variants = vec![];
|
||||
self.walk(|p| {
|
||||
|
@ -154,11 +154,9 @@ impl hir::Pat {
|
|||
|
||||
/// Checks if the pattern contains any `ref` or `ref mut` bindings, and if
|
||||
/// yes whether it contains mutable or just immutables ones.
|
||||
///
|
||||
/// FIXME(tschottdorf): this is problematic as the HIR is being scraped, but
|
||||
/// ref bindings are be implicit after #42640 (default match binding modes).
|
||||
///
|
||||
/// See #44848.
|
||||
//
|
||||
// FIXME(tschottdorf): this is problematic as the HIR is being scraped, but
|
||||
// ref bindings are be implicit after #42640 (default match binding modes). See issue #44848.
|
||||
pub fn contains_explicit_ref_binding(&self) -> Option<hir::Mutability> {
|
||||
let mut result = None;
|
||||
self.each_binding(|annotation, _, _, _| {
|
||||
|
|
|
@ -78,7 +78,7 @@ impl<'a, 'gcx, 'tcx> At<'a, 'gcx, 'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Make `a <: b` where `a` may or may not be expected
|
||||
/// Makes `a <: b`, where `a` may or may not be expected.
|
||||
pub fn sub_exp<T>(self,
|
||||
a_is_expected: bool,
|
||||
a: T,
|
||||
|
@ -89,7 +89,7 @@ impl<'a, 'gcx, 'tcx> At<'a, 'gcx, 'tcx> {
|
|||
self.trace_exp(a_is_expected, a, b).sub(&a, &b)
|
||||
}
|
||||
|
||||
/// Make `actual <: expected`. For example, if type-checking a
|
||||
/// Makes `actual <: expected`. For example, if type-checking a
|
||||
/// call like `foo(x)`, where `foo: fn(i32)`, you might have
|
||||
/// `sup(i32, x)`, since the "expected" type is the type that
|
||||
/// appears in the signature.
|
||||
|
@ -102,7 +102,7 @@ impl<'a, 'gcx, 'tcx> At<'a, 'gcx, 'tcx> {
|
|||
self.sub_exp(false, actual, expected)
|
||||
}
|
||||
|
||||
/// Make `expected <: actual`
|
||||
/// Makes `expected <: actual`.
|
||||
pub fn sub<T>(self,
|
||||
expected: T,
|
||||
actual: T)
|
||||
|
@ -112,7 +112,7 @@ impl<'a, 'gcx, 'tcx> At<'a, 'gcx, 'tcx> {
|
|||
self.sub_exp(true, expected, actual)
|
||||
}
|
||||
|
||||
/// Make `expected <: actual`
|
||||
/// Makes `expected <: actual`.
|
||||
pub fn eq_exp<T>(self,
|
||||
a_is_expected: bool,
|
||||
a: T,
|
||||
|
@ -123,7 +123,7 @@ impl<'a, 'gcx, 'tcx> At<'a, 'gcx, 'tcx> {
|
|||
self.trace_exp(a_is_expected, a, b).eq(&a, &b)
|
||||
}
|
||||
|
||||
/// Make `expected <: actual`
|
||||
/// Makes `expected <: actual`.
|
||||
pub fn eq<T>(self,
|
||||
expected: T,
|
||||
actual: T)
|
||||
|
@ -155,7 +155,7 @@ impl<'a, 'gcx, 'tcx> At<'a, 'gcx, 'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Compute the least-upper-bound, or mutual supertype, of two
|
||||
/// Computes the least-upper-bound, or mutual supertype, of two
|
||||
/// values. The order of the arguments doesn't matter, but since
|
||||
/// this can result in an error (e.g., if asked to compute LUB of
|
||||
/// u32 and i32), it is meaningful to call one of them the
|
||||
|
@ -169,7 +169,7 @@ impl<'a, 'gcx, 'tcx> At<'a, 'gcx, 'tcx> {
|
|||
self.trace(expected, actual).lub(&expected, &actual)
|
||||
}
|
||||
|
||||
/// Compute the greatest-lower-bound, or mutual subtype, of two
|
||||
/// Computes the greatest-lower-bound, or mutual subtype, of two
|
||||
/// values. As with `lub` order doesn't matter, except for error
|
||||
/// cases.
|
||||
pub fn glb<T>(self,
|
||||
|
@ -210,9 +210,9 @@ impl<'a, 'gcx, 'tcx> At<'a, 'gcx, 'tcx> {
|
|||
}
|
||||
|
||||
impl<'a, 'gcx, 'tcx> Trace<'a, 'gcx, 'tcx> {
|
||||
/// Make `a <: b` where `a` may or may not be expected (if
|
||||
/// Makes `a <: b` where `a` may or may not be expected (if
|
||||
/// `a_is_expected` is true, then `a` is expected).
|
||||
/// Make `expected <: actual`
|
||||
/// Makes `expected <: actual`.
|
||||
pub fn sub<T>(self,
|
||||
a: &T,
|
||||
b: &T)
|
||||
|
@ -229,7 +229,7 @@ impl<'a, 'gcx, 'tcx> Trace<'a, 'gcx, 'tcx> {
|
|||
})
|
||||
}
|
||||
|
||||
/// Make `a == b`; the expectation is set by the call to
|
||||
/// Makes `a == b`; the expectation is set by the call to
|
||||
/// `trace()`.
|
||||
pub fn eq<T>(self,
|
||||
a: &T,
|
||||
|
|
|
@ -117,9 +117,9 @@ impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> {
|
|||
/// #33684), so if we are performing an operation that may need to
|
||||
/// prove "leak-check" related things, we leave `'static`
|
||||
/// alone.
|
||||
///
|
||||
/// FIXME(#48536) -- once we have universes, we can remove this and just use
|
||||
/// `canonicalize_query`.
|
||||
//
|
||||
// FIXME(#48536): once we have universes, we can remove this and just use
|
||||
// `canonicalize_query`.
|
||||
pub fn canonicalize_hr_query_hack<V>(
|
||||
&self,
|
||||
value: &V,
|
||||
|
@ -595,7 +595,7 @@ impl<'cx, 'gcx, 'tcx> Canonicalizer<'cx, 'gcx, 'tcx> {
|
|||
.var_universe(vid)
|
||||
}
|
||||
|
||||
/// Create a canonical variable (with the given `info`)
|
||||
/// Creates a canonical variable (with the given `info`)
|
||||
/// representing the region `r`; return a region referencing it.
|
||||
fn canonical_var_for_region(
|
||||
&mut self,
|
||||
|
|
|
@ -424,7 +424,7 @@ impl<'tcx> CanonicalVarValues<'tcx> {
|
|||
self.var_values.len()
|
||||
}
|
||||
|
||||
/// Make an identity substitution from this one: each bound var
|
||||
/// Makes an identity substitution from this one: each bound var
|
||||
/// is matched to the same bound var, preserving the original kinds.
|
||||
/// For example, if we have:
|
||||
/// `self.var_values == [Type(u32), Lifetime('a), Type(u64)]`
|
||||
|
|
|
@ -119,7 +119,7 @@ impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> {
|
|||
/// If you DO want to keep track of pending obligations (which
|
||||
/// include all region obligations, so this includes all cases
|
||||
/// that care about regions) with this function, you have to
|
||||
/// do it yourself, by e.g. having them be a part of the answer.
|
||||
/// do it yourself, by e.g., having them be a part of the answer.
|
||||
pub fn make_query_response_ignoring_pending_obligations<T>(
|
||||
&self,
|
||||
inference_vars: CanonicalVarValues<'tcx>,
|
||||
|
|
|
@ -165,8 +165,8 @@ impl<'infcx, 'gcx, 'tcx> CombineFields<'infcx, 'gcx, 'tcx> {
|
|||
Glb::new(self, a_is_expected)
|
||||
}
|
||||
|
||||
/// Here dir is either EqTo, SubtypeOf, or SupertypeOf. The
|
||||
/// idea is that we should ensure that the type `a_ty` is equal
|
||||
/// Here, `dir` is either `EqTo`, `SubtypeOf`, or `SupertypeOf`.
|
||||
/// The idea is that we should ensure that the type `a_ty` is equal
|
||||
/// to, a subtype of, or a supertype of (respectively) the type
|
||||
/// to which `b_vid` is bound.
|
||||
///
|
||||
|
@ -280,7 +280,7 @@ impl<'infcx, 'gcx, 'tcx> CombineFields<'infcx, 'gcx, 'tcx> {
|
|||
struct Generalizer<'cx, 'gcx: 'cx+'tcx, 'tcx: 'cx> {
|
||||
infcx: &'cx InferCtxt<'cx, 'gcx, 'tcx>,
|
||||
|
||||
/// Span, used when creating new type variables and things.
|
||||
/// The span, used when creating new type variables and things.
|
||||
span: Span,
|
||||
|
||||
/// The vid of the type variable that is in the process of being
|
||||
|
|
|
@ -659,7 +659,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
|
|||
None
|
||||
}
|
||||
|
||||
/// Add a `,` to the type representation only if it is appropriate.
|
||||
/// Adds a `,` to the type representation only if it is appropriate.
|
||||
fn push_comma(
|
||||
&self,
|
||||
value: &mut DiagnosticStyledString,
|
||||
|
@ -715,7 +715,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
|
|||
substs.truncate_to(self.tcx, &generics)
|
||||
}
|
||||
|
||||
/// Compare two given types, eliding parts that are the same between them and highlighting
|
||||
/// Compares two given types, eliding parts that are the same between them and highlighting
|
||||
/// relevant differences, and return two representation of those types for highlighted printing.
|
||||
fn cmp(&self, t1: Ty<'tcx>, t2: Ty<'tcx>) -> (DiagnosticStyledString, DiagnosticStyledString) {
|
||||
fn equals<'tcx>(a: &Ty<'tcx>, b: &Ty<'tcx>) -> bool {
|
||||
|
|
|
@ -39,7 +39,7 @@ impl<'a, 'gcx, 'tcx> NiceRegionError<'a, 'gcx, 'tcx> {
|
|||
/// x.push(y);
|
||||
/// ^ ...but data from `y` flows into `x` here
|
||||
/// }
|
||||
/// ````
|
||||
/// ```
|
||||
///
|
||||
/// It will later be extended to trait objects.
|
||||
pub(super) fn try_report_anon_anon_conflict(&self) -> Option<ErrorReported> {
|
||||
|
|
|
@ -54,7 +54,7 @@ impl<'a, 'gcx, 'tcx> CombineFields<'a, 'gcx, 'tcx> {
|
|||
}
|
||||
|
||||
impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
|
||||
/// Replace all regions (resp. types) bound by `binder` with placeholder
|
||||
/// Replaces all regions (resp. types) bound by `binder` with placeholder
|
||||
/// regions (resp. types) and return a map indicating which bound-region
|
||||
/// placeholder region. This is the first step of checking subtyping
|
||||
/// when higher-ranked things are involved.
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
//! The code to do lexical region resolution.
|
||||
//! Lexical region resolution.
|
||||
|
||||
use crate::infer::region_constraints::Constraint;
|
||||
use crate::infer::region_constraints::GenericKind;
|
||||
|
@ -492,11 +492,11 @@ impl<'cx, 'gcx, 'tcx> LexicalResolver<'cx, 'gcx, 'tcx> {
|
|||
match *value {
|
||||
VarValue::Value(_) => { /* Inference successful */ }
|
||||
VarValue::ErrorValue => {
|
||||
/* Inference impossible, this value contains
|
||||
/* Inference impossible: this value contains
|
||||
inconsistent constraints.
|
||||
|
||||
I think that in this case we should report an
|
||||
error now---unlike the case above, we can't
|
||||
error now -- unlike the case above, we can't
|
||||
wait to see whether the user needs the result
|
||||
of this variable. The reason is that the mere
|
||||
existence of this variable implies that the
|
||||
|
|
|
@ -221,7 +221,7 @@ pub struct InferCtxt<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> {
|
|||
/// replaced with.
|
||||
pub type PlaceholderMap<'tcx> = BTreeMap<ty::BoundRegion, ty::Region<'tcx>>;
|
||||
|
||||
/// See `error_reporting` module for more details
|
||||
/// See the `error_reporting` module for more details.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum ValuePairs<'tcx> {
|
||||
Types(ExpectedFound<Ty<'tcx>>),
|
||||
|
@ -233,7 +233,7 @@ pub enum ValuePairs<'tcx> {
|
|||
/// The trace designates the path through inference that we took to
|
||||
/// encounter an error or subtyping constraint.
|
||||
///
|
||||
/// See `error_reporting` module for more details.
|
||||
/// See the `error_reporting` module for more details.
|
||||
#[derive(Clone)]
|
||||
pub struct TypeTrace<'tcx> {
|
||||
cause: ObligationCause<'tcx>,
|
||||
|
@ -454,9 +454,9 @@ impl fmt::Display for FixupError {
|
|||
}
|
||||
}
|
||||
|
||||
/// Helper type of a temporary returned by tcx.infer_ctxt().
|
||||
/// Helper type of a temporary returned by `tcx.infer_ctxt()`.
|
||||
/// Necessary because we can't write the following bound:
|
||||
/// F: for<'b, 'tcx> where 'gcx: 'tcx FnOnce(InferCtxt<'b, 'gcx, 'tcx>).
|
||||
/// `F: for<'b, 'tcx> where 'gcx: 'tcx FnOnce(InferCtxt<'b, 'gcx, 'tcx>)`.
|
||||
pub struct InferCtxtBuilder<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> {
|
||||
global_tcx: TyCtxt<'a, 'gcx, 'gcx>,
|
||||
arena: SyncDroplessArena,
|
||||
|
@ -563,7 +563,7 @@ impl<'tcx, T> InferOk<'tcx, T> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Extract `value`, registering any obligations into `fulfill_cx`
|
||||
/// Extracts `value`, registering any obligations into `fulfill_cx`.
|
||||
pub fn into_value_registering_obligations(
|
||||
self,
|
||||
infcx: &InferCtxt<'_, '_, 'tcx>,
|
||||
|
@ -794,7 +794,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
|
|||
.commit(region_constraints_snapshot);
|
||||
}
|
||||
|
||||
/// Execute `f` and commit the bindings
|
||||
/// Executes `f` and commit the bindings.
|
||||
pub fn commit_unconditionally<R, F>(&self, f: F) -> R
|
||||
where
|
||||
F: FnOnce() -> R,
|
||||
|
@ -806,7 +806,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
|
|||
r
|
||||
}
|
||||
|
||||
/// Execute `f` and commit the bindings if closure `f` returns `Ok(_)`
|
||||
/// Executes `f` and commit the bindings if closure `f` returns `Ok(_)`.
|
||||
pub fn commit_if_ok<T, E, F>(&self, f: F) -> Result<T, E>
|
||||
where
|
||||
F: FnOnce(&CombinedSnapshot<'a, 'tcx>) -> Result<T, E>,
|
||||
|
@ -838,7 +838,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
|
|||
r
|
||||
}
|
||||
|
||||
/// Execute `f` then unroll any bindings it creates
|
||||
/// Executes `f` then unroll any bindings it creates.
|
||||
pub fn probe<R, F>(&self, f: F) -> R
|
||||
where
|
||||
F: FnOnce(&CombinedSnapshot<'a, 'tcx>) -> R,
|
||||
|
@ -996,14 +996,14 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
|
|||
self.float_unification_table.borrow_mut().new_key(None)
|
||||
}
|
||||
|
||||
/// Create a fresh region variable with the next available index.
|
||||
/// Creates a fresh region variable with the next available index.
|
||||
/// The variable will be created in the maximum universe created
|
||||
/// thus far, allowing it to name any region created thus far.
|
||||
pub fn next_region_var(&self, origin: RegionVariableOrigin) -> ty::Region<'tcx> {
|
||||
self.next_region_var_in_universe(origin, self.universe())
|
||||
}
|
||||
|
||||
/// Create a fresh region variable with the next available index
|
||||
/// Creates a fresh region variable with the next available index
|
||||
/// in the given universe; typically, you can use
|
||||
/// `next_region_var` and just use the maximal universe.
|
||||
pub fn next_region_var_in_universe(
|
||||
|
@ -1069,7 +1069,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
|
|||
Substs::for_item(self.tcx, def_id, |param, _| self.var_for_def(span, param))
|
||||
}
|
||||
|
||||
/// True if errors have been reported since this infcx was
|
||||
/// Returns `true` if errors have been reported since this infcx was
|
||||
/// created. This is sometimes used as a heuristic to skip
|
||||
/// reporting errors that often occur as a result of earlier
|
||||
/// errors, but where it's hard to be 100% sure (e.g., unresolved
|
||||
|
@ -1278,7 +1278,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
|
|||
value.fold_with(&mut r)
|
||||
}
|
||||
|
||||
/// Returns true if `T` contains unresolved type variables. In the
|
||||
/// Returns `true` if `T` contains unresolved type variables. In the
|
||||
/// process of visiting `T`, this will resolve (where possible)
|
||||
/// type variables in `T`, but it never constructs the final,
|
||||
/// resolved type, so it's more efficient than
|
||||
|
@ -1369,7 +1369,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
|
|||
self.tcx.replace_bound_vars(value, fld_r, fld_t)
|
||||
}
|
||||
|
||||
/// See `verify_generic_bound` method in `region_constraints`
|
||||
/// See the [`region_constraints::verify_generic_bound`] method.
|
||||
pub fn verify_generic_bound(
|
||||
&self,
|
||||
origin: SubregionOrigin<'tcx>,
|
||||
|
@ -1466,8 +1466,8 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
|
|||
}
|
||||
|
||||
/// Clears the selection, evaluation, and projection caches. This is useful when
|
||||
/// repeatedly attempting to select an Obligation while changing only
|
||||
/// its ParamEnv, since FulfillmentContext doesn't use 'probe'
|
||||
/// repeatedly attempting to select an `Obligation` while changing only
|
||||
/// its `ParamEnv`, since `FulfillmentContext` doesn't use probing.
|
||||
pub fn clear_caches(&self) {
|
||||
self.selection_cache.clear();
|
||||
self.evaluation_cache.clear();
|
||||
|
@ -1478,7 +1478,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
|
|||
self.universe.get()
|
||||
}
|
||||
|
||||
/// Create and return a fresh universe that extends all previous
|
||||
/// Creates and return a fresh universe that extends all previous
|
||||
/// universes. Updates `self.universe` to that new universe.
|
||||
pub fn create_next_universe(&self) -> ty::UniverseIndex {
|
||||
let u = self.universe.get().next_universe();
|
||||
|
|
|
@ -47,17 +47,17 @@ where
|
|||
|
||||
/// How are we relating `a` and `b`?
|
||||
///
|
||||
/// - covariant means `a <: b`
|
||||
/// - contravariant means `b <: a`
|
||||
/// - invariant means `a == b
|
||||
/// - bivariant means that it doesn't matter
|
||||
/// - Covariant means `a <: b`.
|
||||
/// - Contravariant means `b <: a`.
|
||||
/// - Invariant means `a == b.
|
||||
/// - Bivariant means that it doesn't matter.
|
||||
ambient_variance: ty::Variance,
|
||||
|
||||
/// When we pass through a set of binders (e.g., when looking into
|
||||
/// a `fn` type), we push a new bound region scope onto here. This
|
||||
/// will contain the instantiated region for each region in those
|
||||
/// binders. When we then encounter a `ReLateBound(d, br)`, we can
|
||||
/// use the debruijn index `d` to find the right scope, and then
|
||||
/// use the De Bruijn index `d` to find the right scope, and then
|
||||
/// bound region name `br` to find the specific instantiation from
|
||||
/// within that scope. See `replace_bound_region`.
|
||||
///
|
||||
|
@ -114,7 +114,7 @@ pub trait TypeRelatingDelegate<'tcx> {
|
|||
/// Define the normalization strategy to use, eager or lazy.
|
||||
fn normalization() -> NormalizationStrategy;
|
||||
|
||||
/// Enable some optimizations if we do not expect inference variables
|
||||
/// Enables some optimizations if we do not expect inference variables
|
||||
/// in the RHS of the relation.
|
||||
fn forbid_inference_vars() -> bool;
|
||||
}
|
||||
|
@ -208,7 +208,7 @@ where
|
|||
/// When we encounter binders during the type traversal, we record
|
||||
/// the value to substitute for each of the things contained in
|
||||
/// that binder. (This will be either a universal placeholder or
|
||||
/// an existential inference variable.) Given the debruijn index
|
||||
/// an existential inference variable.) Given the De Bruijn index
|
||||
/// `debruijn` (and name `br`) of some binder we have now
|
||||
/// encountered, this routine finds the value that we instantiated
|
||||
/// the region with; to do so, it indexes backwards into the list
|
||||
|
|
|
@ -46,7 +46,7 @@ pub struct OpaqueTypeDecl<'tcx> {
|
|||
/// lifetime parameter on `foo`.)
|
||||
pub concrete_ty: Ty<'tcx>,
|
||||
|
||||
/// True if the `impl Trait` bounds include region bounds.
|
||||
/// Returns `true` if the `impl Trait` bounds include region bounds.
|
||||
/// For example, this would be true for:
|
||||
///
|
||||
/// fn foo<'a, 'b, 'c>() -> impl Trait<'c> + 'a + 'b
|
||||
|
@ -71,7 +71,7 @@ pub struct OpaqueTypeDecl<'tcx> {
|
|||
}
|
||||
|
||||
impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
|
||||
/// Replace all opaque types in `value` with fresh inference variables
|
||||
/// Replaces all opaque types in `value` with fresh inference variables
|
||||
/// and creates appropriate obligations. For example, given the input:
|
||||
///
|
||||
/// impl Iterator<Item = impl Debug>
|
||||
|
@ -88,7 +88,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
|
|||
///
|
||||
/// # Parameters
|
||||
///
|
||||
/// - `parent_def_id` -- the def-id of the function in which the opaque type
|
||||
/// - `parent_def_id` -- the `DefId` of the function in which the opaque type
|
||||
/// is defined
|
||||
/// - `body_id` -- the body-id with which the resulting obligations should
|
||||
/// be associated
|
||||
|
@ -813,7 +813,7 @@ impl<'a, 'gcx, 'tcx> Instantiator<'a, 'gcx, 'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Whether `opaque_node_id` is a sibling or a child of a sibling of `def_id`
|
||||
/// Returns `true` if `opaque_node_id` is a sibling or a child of a sibling of `def_id`.
|
||||
///
|
||||
/// ```rust
|
||||
/// pub mod foo {
|
||||
|
@ -827,11 +827,10 @@ impl<'a, 'gcx, 'tcx> Instantiator<'a, 'gcx, 'tcx> {
|
|||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// Here, `def_id` will be the `DefId` of the existential type `Baz`.
|
||||
/// `opaque_node_id` is the `NodeId` of the reference to Baz --
|
||||
/// so either the return type of f1 or f2.
|
||||
/// We will return true if the reference is within the same module as the existential type
|
||||
/// So true for f1, false for f2.
|
||||
/// Here, `def_id` is the `DefId` of the existential type `Baz` and `opaque_node_id` is the
|
||||
/// `NodeId` of the reference to `Baz` (i.e., the return type of both `f1` and `f2`).
|
||||
/// We return `true` if the reference is within the same module as the existential type
|
||||
/// (i.e., `true` for `f1`, `false` for `f2`).
|
||||
pub fn may_define_existential_type(
|
||||
tcx: TyCtxt<'_, '_, '_>,
|
||||
def_id: DefId,
|
||||
|
|
|
@ -63,7 +63,7 @@ pub struct OutlivesEnvironment<'tcx> {
|
|||
}
|
||||
|
||||
/// "Region-bound pairs" tracks outlives relations that are known to
|
||||
/// be true, either because of explicit where clauses like `T: 'a` or
|
||||
/// be true, either because of explicit where-clauses like `T: 'a` or
|
||||
/// because of implied bounds.
|
||||
pub type RegionBoundPairs<'tcx> = Vec<(ty::Region<'tcx>, GenericKind<'tcx>)>;
|
||||
|
||||
|
|
|
@ -24,7 +24,7 @@ impl<'tcx> FreeRegionMap<'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Compute the least-upper-bound of two free regions. In some
|
||||
/// Computes the least-upper-bound of two free regions. In some
|
||||
/// cases, this is more conservative than necessary, in order to
|
||||
/// avoid making arbitrary choices. See
|
||||
/// `TransitiveRelation::postdom_upper_bound` for more details.
|
||||
|
|
|
@ -96,7 +96,7 @@ impl<'cx, 'gcx, 'tcx> VerifyBoundCx<'cx, 'gcx, 'tcx> {
|
|||
})
|
||||
}
|
||||
|
||||
/// Searches the where clauses in scope for regions that
|
||||
/// Searches the where-clauses in scope for regions that
|
||||
/// `projection_ty` is known to outlive. Currently requires an
|
||||
/// exact match.
|
||||
pub fn projection_declared_bounds_from_trait(
|
||||
|
@ -251,7 +251,7 @@ impl<'cx, 'gcx, 'tcx> VerifyBoundCx<'cx, 'gcx, 'tcx> {
|
|||
.map(move |r| r.subst(tcx, projection_ty.substs))
|
||||
}
|
||||
|
||||
/// Given the def-id of an associated item, returns any region
|
||||
/// Given the `DefId` of an associated item, returns any region
|
||||
/// bounds attached to that associated item from the trait definition.
|
||||
///
|
||||
/// For example:
|
||||
|
@ -262,7 +262,7 @@ impl<'cx, 'gcx, 'tcx> VerifyBoundCx<'cx, 'gcx, 'tcx> {
|
|||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// If we were given the def-id of `Foo::Bar`, we would return
|
||||
/// If we were given the `DefId` of `Foo::Bar`, we would return
|
||||
/// `'a`. You could then apply the substitutions from the
|
||||
/// projection to convert this into your namespace. This also
|
||||
/// works if the user writes `where <Self as Foo<'a>>::Bar: 'a` on
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
//! See README.md
|
||||
//! See `README.md`.
|
||||
|
||||
use self::CombineMapType::*;
|
||||
use self::UndoLog::*;
|
||||
|
@ -108,16 +108,16 @@ pub struct RegionConstraintData<'tcx> {
|
|||
pub givens: FxHashSet<(Region<'tcx>, ty::RegionVid)>,
|
||||
}
|
||||
|
||||
/// A constraint that influences the inference process.
|
||||
/// Represents a constraint that influences the inference process.
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, PartialOrd, Ord)]
|
||||
pub enum Constraint<'tcx> {
|
||||
/// One region variable is subregion of another
|
||||
/// A region variable is a subregion of another.
|
||||
VarSubVar(RegionVid, RegionVid),
|
||||
|
||||
/// Concrete region is subregion of region variable
|
||||
/// A concrete region is a subregion of region variable.
|
||||
RegSubVar(Region<'tcx>, RegionVid),
|
||||
|
||||
/// Region variable is subregion of concrete region. This does not
|
||||
/// A region variable is a subregion of a concrete region. This does not
|
||||
/// directly affect inference, but instead is checked after
|
||||
/// inference is complete.
|
||||
VarSubReg(RegionVid, Region<'tcx>),
|
||||
|
@ -138,9 +138,9 @@ impl Constraint<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
/// VerifyGenericBound(T, _, R, RS): The parameter type `T` (or
|
||||
/// `VerifyGenericBound(T, _, R, RS)`: the parameter type `T` (or
|
||||
/// associated type) must outlive the region `R`. `T` is known to
|
||||
/// outlive `RS`. Therefore verify that `R <= RS[i]` for some
|
||||
/// outlive `RS`. Therefore, verify that `R <= RS[i]` for some
|
||||
/// `i`. Inference variables may be involved (but this verification
|
||||
/// step doesn't influence inference).
|
||||
#[derive(Debug, Clone)]
|
||||
|
@ -164,7 +164,7 @@ EnumTypeFoldableImpl! {
|
|||
}
|
||||
}
|
||||
|
||||
/// Describes the things that some `GenericKind` value G is known to
|
||||
/// Describes the things that some `GenericKind` value `G` is known to
|
||||
/// outlive. Each variant of `VerifyBound` can be thought of as a
|
||||
/// function:
|
||||
///
|
||||
|
@ -187,6 +187,7 @@ pub enum VerifyBound<'tcx> {
|
|||
/// following, where `G` is the generic for which this verify
|
||||
/// bound was created:
|
||||
///
|
||||
/// ```rust
|
||||
/// fn(min) -> bool {
|
||||
/// if G == K {
|
||||
/// B(min)
|
||||
|
@ -194,6 +195,7 @@ pub enum VerifyBound<'tcx> {
|
|||
/// false
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// In other words, if the generic `G` that we are checking is
|
||||
/// equal to `K`, then check the associated verify bound
|
||||
|
@ -202,14 +204,16 @@ pub enum VerifyBound<'tcx> {
|
|||
/// This is used when we have something in the environment that
|
||||
/// may or may not be relevant, depending on the region inference
|
||||
/// results. For example, we may have `where <T as
|
||||
/// Trait<'a>>::Item: 'b` in our where clauses. If we are
|
||||
/// Trait<'a>>::Item: 'b` in our where-clauses. If we are
|
||||
/// generating the verify-bound for `<T as Trait<'0>>::Item`, then
|
||||
/// this where-clause is only relevant if `'0` winds up inferred
|
||||
/// to `'a`.
|
||||
///
|
||||
/// So we would compile to a verify-bound like
|
||||
///
|
||||
/// ```
|
||||
/// IfEq(<T as Trait<'a>>::Item, AnyRegion('a))
|
||||
/// ```
|
||||
///
|
||||
/// meaning, if the subject G is equal to `<T as Trait<'a>>::Item`
|
||||
/// (after inference), and `'a: min`, then `G: min`.
|
||||
|
@ -217,9 +221,11 @@ pub enum VerifyBound<'tcx> {
|
|||
|
||||
/// Given a region `R`, expands to the function:
|
||||
///
|
||||
/// ```
|
||||
/// fn(min) -> bool {
|
||||
/// R: min
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// This is used when we can establish that `G: R` -- therefore,
|
||||
/// if `R: min`, then by transitivity `G: min`.
|
||||
|
@ -227,20 +233,23 @@ pub enum VerifyBound<'tcx> {
|
|||
|
||||
/// Given a set of bounds `B`, expands to the function:
|
||||
///
|
||||
/// ```rust
|
||||
/// fn(min) -> bool {
|
||||
/// exists (b in B) { b(min) }
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// In other words, if we meet some bound in `B`, that suffices.
|
||||
/// This is used when all the bounds in `B` are known to apply to
|
||||
/// G.
|
||||
/// This is used when all the bounds in `B` are known to apply to `G`.
|
||||
AnyBound(Vec<VerifyBound<'tcx>>),
|
||||
|
||||
/// Given a set of bounds `B`, expands to the function:
|
||||
///
|
||||
/// ```rust
|
||||
/// fn(min) -> bool {
|
||||
/// forall (b in B) { b(min) }
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// In other words, if we meet *all* bounds in `B`, that suffices.
|
||||
/// This is used when *some* bound in `B` is known to suffice, but
|
||||
|
@ -256,19 +265,19 @@ struct TwoRegions<'tcx> {
|
|||
|
||||
#[derive(Copy, Clone, PartialEq)]
|
||||
enum UndoLog<'tcx> {
|
||||
/// We added `RegionVid`
|
||||
/// We added `RegionVid`.
|
||||
AddVar(RegionVid),
|
||||
|
||||
/// We added the given `constraint`
|
||||
/// We added the given `constraint`.
|
||||
AddConstraint(Constraint<'tcx>),
|
||||
|
||||
/// We added the given `verify`
|
||||
/// We added the given `verify`.
|
||||
AddVerify(usize),
|
||||
|
||||
/// We added the given `given`
|
||||
/// We added the given `given`.
|
||||
AddGiven(Region<'tcx>, ty::RegionVid),
|
||||
|
||||
/// We added a GLB/LUB "combination variable"
|
||||
/// We added a GLB/LUB "combination variable".
|
||||
AddCombination(CombineMapType, TwoRegions<'tcx>),
|
||||
|
||||
/// During skolemization, we sometimes purge entries from the undo
|
||||
|
@ -707,7 +716,7 @@ impl<'tcx> RegionConstraintCollector<'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
/// See `Verify::VerifyGenericBound`
|
||||
/// See [`Verify::VerifyGenericBound`].
|
||||
pub fn verify_generic_bound(
|
||||
&mut self,
|
||||
origin: SubregionOrigin<'tcx>,
|
||||
|
@ -837,7 +846,7 @@ impl<'tcx> RegionConstraintCollector<'tcx> {
|
|||
}).collect()
|
||||
}
|
||||
|
||||
/// See [`RegionInference::region_constraints_added_in_snapshot`]
|
||||
/// See [`RegionInference::region_constraints_added_in_snapshot`].
|
||||
pub fn region_constraints_added_in_snapshot(&self, mark: &RegionSnapshot) -> Option<bool> {
|
||||
self.undo_log[mark.length..]
|
||||
.iter()
|
||||
|
@ -925,7 +934,8 @@ impl<'a, 'gcx, 'tcx> VerifyBound<'tcx> {
|
|||
}
|
||||
|
||||
impl<'tcx> RegionConstraintData<'tcx> {
|
||||
/// True if this region constraint data contains no constraints.
|
||||
/// Returns `true` if this region constraint data contains no constraints, and `false`
|
||||
/// otherwise.
|
||||
pub fn is_empty(&self) -> bool {
|
||||
let RegionConstraintData {
|
||||
constraints,
|
||||
|
|
|
@ -218,7 +218,7 @@ impl<'tcx> TypeVariableTable<'tcx> {
|
|||
self.sub_relations.find(vid)
|
||||
}
|
||||
|
||||
/// True if `a` and `b` have same "sub-root" (i.e., exists some
|
||||
/// Returns `true` if `a` and `b` have same "sub-root" (i.e., exists some
|
||||
/// type X such that `forall i in {a, b}. (i <: X || X <: i)`.
|
||||
pub fn sub_unified(&mut self, a: ty::TyVid, b: ty::TyVid) -> bool {
|
||||
self.sub_root_var(a) == self.sub_root_var(b)
|
||||
|
@ -306,7 +306,7 @@ impl<'tcx> TypeVariableTable<'tcx> {
|
|||
.collect()
|
||||
}
|
||||
|
||||
/// Find the set of type variables that existed *before* `s`
|
||||
/// Finds the set of type variables that existed *before* `s`
|
||||
/// but which have only been unified since `s` started, and
|
||||
/// return the types with which they were unified. So if we had
|
||||
/// a type variable `V0`, then we started the snapshot, then we
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
//! The lint checking is mostly consolidated into one pass which runs
|
||||
//! after all other analyses. Throughout compilation, lint warnings
|
||||
//! can be added via the `add_lint` method on the Session structure. This
|
||||
//! requires a span and an id of the node that the lint is being added to. The
|
||||
//! requires a span and an ID of the node that the lint is being added to. The
|
||||
//! lint isn't actually emitted at that time because it is unknown what the
|
||||
//! actual lint level at that location is.
|
||||
//!
|
||||
|
@ -703,7 +703,7 @@ impl<'a, T: EarlyLintPass> EarlyContextAndPass<'a, T> {
|
|||
impl<'a, 'tcx> LintContext<'tcx> for LateContext<'a, 'tcx> {
|
||||
type PassObject = LateLintPassObject;
|
||||
|
||||
/// Get the overall compiler `Session` object.
|
||||
/// Gets the overall compiler `Session` object.
|
||||
fn sess(&self) -> &Session {
|
||||
&self.tcx.sess
|
||||
}
|
||||
|
@ -736,7 +736,7 @@ impl<'a, 'tcx> LintContext<'tcx> for LateContext<'a, 'tcx> {
|
|||
impl<'a> LintContext<'a> for EarlyContext<'a> {
|
||||
type PassObject = EarlyLintPassObject;
|
||||
|
||||
/// Get the overall compiler `Session` object.
|
||||
/// Gets the overall compiler `Session` object.
|
||||
fn sess(&self) -> &Session {
|
||||
&self.sess
|
||||
}
|
||||
|
@ -1200,7 +1200,7 @@ impl<'a, T: EarlyLintPass> ast_visit::Visitor<'a> for EarlyContextAndPass<'a, T>
|
|||
}
|
||||
|
||||
|
||||
/// Perform lint checking on a crate.
|
||||
/// Performs lint checking on a crate.
|
||||
///
|
||||
/// Consumes the `lint_store` field of the `Session`.
|
||||
pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
|
||||
|
|
|
@ -72,7 +72,7 @@ pub struct Lint {
|
|||
/// `default_level`.
|
||||
pub edition_lint_opts: Option<(Edition, Level)>,
|
||||
|
||||
/// Whether this lint is reported even inside expansions of external macros
|
||||
/// `true` if this lint is reported even inside expansions of external macros.
|
||||
pub report_in_external_macro: bool,
|
||||
}
|
||||
|
||||
|
@ -86,7 +86,7 @@ impl Lint {
|
|||
}
|
||||
}
|
||||
|
||||
/// Get the lint's name, with ASCII letters converted to lowercase.
|
||||
/// Gets the lint's name, with ASCII letters converted to lowercase.
|
||||
pub fn name_lower(&self) -> String {
|
||||
self.name.to_ascii_lowercase()
|
||||
}
|
||||
|
@ -99,7 +99,7 @@ impl Lint {
|
|||
}
|
||||
}
|
||||
|
||||
/// Declare a static item of type `&'static Lint`.
|
||||
/// Declares a static item of type `&'static Lint`.
|
||||
#[macro_export]
|
||||
macro_rules! declare_lint {
|
||||
($vis: vis $NAME: ident, $Level: ident, $desc: expr) => (
|
||||
|
@ -150,7 +150,7 @@ macro_rules! declare_tool_lint {
|
|||
);
|
||||
}
|
||||
|
||||
/// Declare a static `LintArray` and return it as an expression.
|
||||
/// Declares a static `LintArray` and return it as an expression.
|
||||
#[macro_export]
|
||||
macro_rules! lint_array {
|
||||
($( $lint:expr ),* ,) => { lint_array!( $($lint),* ) };
|
||||
|
@ -164,7 +164,7 @@ pub type LintArray = Vec<&'static Lint>;
|
|||
pub trait LintPass {
|
||||
fn name(&self) -> &'static str;
|
||||
|
||||
/// Get descriptions of the lints this `LintPass` object can emit.
|
||||
/// Gets descriptions of the lints this `LintPass` object can emit.
|
||||
///
|
||||
/// N.B., there is no enforcement that the object only emits lints it registered.
|
||||
/// And some `rustc` internal `LintPass`es register lints to be emitted by other
|
||||
|
@ -487,7 +487,7 @@ impl hash::Hash for LintId {
|
|||
}
|
||||
|
||||
impl LintId {
|
||||
/// Get the `LintId` for a `Lint`.
|
||||
/// Gets the `LintId` for a `Lint`.
|
||||
pub fn of(lint: &'static Lint) -> LintId {
|
||||
LintId {
|
||||
lint,
|
||||
|
@ -498,7 +498,7 @@ impl LintId {
|
|||
self.lint.name
|
||||
}
|
||||
|
||||
/// Get the name of the lint.
|
||||
/// Gets the name of the lint.
|
||||
pub fn to_string(&self) -> String {
|
||||
self.lint.name_lower()
|
||||
}
|
||||
|
@ -518,7 +518,7 @@ impl_stable_hash_for!(enum self::Level {
|
|||
});
|
||||
|
||||
impl Level {
|
||||
/// Convert a level to a lower-case string.
|
||||
/// Converts a level to a lower-case string.
|
||||
pub fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
Allow => "allow",
|
||||
|
@ -528,7 +528,7 @@ impl Level {
|
|||
}
|
||||
}
|
||||
|
||||
/// Convert a lower-case string to a level.
|
||||
/// Converts a lower-case string to a level.
|
||||
pub fn from_str(x: &str) -> Option<Level> {
|
||||
match x {
|
||||
"allow" => Some(Allow),
|
||||
|
|
|
@ -800,8 +800,8 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> {
|
|||
self.consume_expr(&arm.body);
|
||||
}
|
||||
|
||||
/// Walks a pat that occurs in isolation (i.e., top-level of fn
|
||||
/// arg or let binding. *Not* a match arm or nested pat.)
|
||||
/// Walks a pat that occurs in isolation (i.e., top-level of fn argument or
|
||||
/// let binding, and *not* a match arm or nested pat.)
|
||||
fn walk_irrefutable_pat(&mut self, cmt_discr: mc::cmt<'tcx>, pat: &hir::Pat) {
|
||||
let mut mode = Unknown;
|
||||
self.determine_pat_move_mode(cmt_discr.clone(), pat, &mut mode);
|
||||
|
|
|
@ -1,9 +1,7 @@
|
|||
//! This file handles the relationships between free regions --
|
||||
//! meaning lifetime parameters. Ordinarily, free regions are
|
||||
//! unrelated to one another, but they can be related via implied or
|
||||
//! explicit bounds. In that case, we track the bounds using the
|
||||
//! `TransitiveRelation` type and use that to decide when one free
|
||||
//! region outlives another and so forth.
|
||||
//! This module handles the relationships between "free regions", i.e., lifetime parameters.
|
||||
//! Ordinarily, free regions are unrelated to one another, but they can be related via implied
|
||||
//! or explicit bounds. In that case, we track the bounds using the `TransitiveRelation` type,
|
||||
//! and use that to decide when one free region outlives another, and so forth.
|
||||
|
||||
use crate::infer::outlives::free_region_map::{FreeRegionMap, FreeRegionRelations};
|
||||
use crate::hir::def_id::DefId;
|
||||
|
@ -16,17 +14,17 @@ use crate::ty::{self, TyCtxt, Region};
|
|||
/// regions.
|
||||
///
|
||||
/// This stuff is a bit convoluted and should be refactored, but as we
|
||||
/// move to NLL it'll all go away anyhow.
|
||||
/// transition to NLL, it'll all go away anyhow.
|
||||
pub struct RegionRelations<'a, 'gcx: 'tcx, 'tcx: 'a> {
|
||||
pub tcx: TyCtxt<'a, 'gcx, 'tcx>,
|
||||
|
||||
/// context used to fetch the region maps
|
||||
/// The context used to fetch the region maps.
|
||||
pub context: DefId,
|
||||
|
||||
/// region maps for the given context
|
||||
/// The region maps for the given context.
|
||||
pub region_scope_tree: &'a region::ScopeTree,
|
||||
|
||||
/// free-region relationships
|
||||
/// Free-region relationships.
|
||||
pub free_regions: &'a FreeRegionMap<'tcx>,
|
||||
}
|
||||
|
||||
|
@ -86,7 +84,7 @@ impl<'a, 'gcx, 'tcx> RegionRelations<'a, 'gcx, 'tcx> {
|
|||
result
|
||||
}
|
||||
|
||||
/// Determines whether this free-region is required to be 'static
|
||||
/// Determines whether this free region is required to be `'static`.
|
||||
fn is_static(&self, super_region: ty::Region<'tcx>) -> bool {
|
||||
debug!("is_static(super_region={:?})", super_region);
|
||||
match *super_region {
|
||||
|
|
|
@ -1,27 +1,27 @@
|
|||
//! A classic liveness analysis based on dataflow over the AST. Computes,
|
||||
//! for each local variable in a function, whether that variable is live
|
||||
//! at a given point. Program execution points are identified by their
|
||||
//! id.
|
||||
//! IDs.
|
||||
//!
|
||||
//! # Basic idea
|
||||
//!
|
||||
//! The basic model is that each local variable is assigned an index. We
|
||||
//! represent sets of local variables using a vector indexed by this
|
||||
//! index. The value in the vector is either 0, indicating the variable
|
||||
//! is dead, or the id of an expression that uses the variable.
|
||||
//! is dead, or the ID of an expression that uses the variable.
|
||||
//!
|
||||
//! We conceptually walk over the AST in reverse execution order. If we
|
||||
//! find a use of a variable, we add it to the set of live variables. If
|
||||
//! we find an assignment to a variable, we remove it from the set of live
|
||||
//! variables. When we have to merge two flows, we take the union of
|
||||
//! those two flows---if the variable is live on both paths, we simply
|
||||
//! pick one id. In the event of loops, we continue doing this until a
|
||||
//! those two flows -- if the variable is live on both paths, we simply
|
||||
//! pick one ID. In the event of loops, we continue doing this until a
|
||||
//! fixed point is reached.
|
||||
//!
|
||||
//! ## Checking initialization
|
||||
//!
|
||||
//! At the function entry point, all variables must be dead. If this is
|
||||
//! not the case, we can report an error using the id found in the set of
|
||||
//! not the case, we can report an error using the ID found in the set of
|
||||
//! live variables, which identifies a use of the variable which is not
|
||||
//! dominated by an assignment.
|
||||
//!
|
||||
|
@ -40,9 +40,9 @@
|
|||
//! The outer walk has the job of building up the ir_maps instance for the
|
||||
//! enclosing function. On the way down the tree, it identifies those AST
|
||||
//! nodes and variable IDs that will be needed for the liveness analysis
|
||||
//! and assigns them contiguous IDs. The liveness id for an AST node is
|
||||
//! called a `live_node` (it's a newtype'd u32) and the id for a variable
|
||||
//! is called a `variable` (another newtype'd u32).
|
||||
//! and assigns them contiguous IDs. The liveness ID for an AST node is
|
||||
//! called a `live_node` (it's a newtype'd `u32`) and the ID for a variable
|
||||
//! is called a `variable` (another newtype'd `u32`).
|
||||
//!
|
||||
//! On the way back up the tree, as we are about to exit from a function
|
||||
//! declaration we allocate a `liveness` instance. Now that we know
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
//! The job of the categorization module is to analyze an expression to
|
||||
//! determine what kind of memory is used in evaluating it (for example,
|
||||
//! where dereferences occur and what kind of pointer is dereferenced;
|
||||
//! whether the memory is mutable; etc)
|
||||
//! whether the memory is mutable, etc.).
|
||||
//!
|
||||
//! Categorization effectively transforms all of our expressions into
|
||||
//! expressions of the following forms (the actual enum has many more
|
||||
|
@ -17,17 +17,17 @@
|
|||
//!
|
||||
//! Imagine a routine ToAddr(Expr) that evaluates an expression and returns an
|
||||
//! address where the result is to be found. If Expr is a place, then this
|
||||
//! is the address of the place. If Expr is an rvalue, this is the address of
|
||||
//! is the address of the place. If `Expr` is an rvalue, this is the address of
|
||||
//! some temporary spot in memory where the result is stored.
|
||||
//!
|
||||
//! Now, cat_expr() classifies the expression Expr and the address A=ToAddr(Expr)
|
||||
//! Now, `cat_expr()` classifies the expression `Expr` and the address `A = ToAddr(Expr)`
|
||||
//! as follows:
|
||||
//!
|
||||
//! - cat: what kind of expression was this? This is a subset of the
|
||||
//! - `cat`: what kind of expression was this? This is a subset of the
|
||||
//! full expression forms which only includes those that we care about
|
||||
//! for the purpose of the analysis.
|
||||
//! - mutbl: mutability of the address A
|
||||
//! - ty: the type of data found at the address A
|
||||
//! - `mutbl`: mutability of the address `A`.
|
||||
//! - `ty`: the type of data found at the address `A`.
|
||||
//!
|
||||
//! The resulting categorization tree differs somewhat from the expressions
|
||||
//! themselves. For example, auto-derefs are explicit. Also, an index a[b] is
|
||||
|
|
|
@ -85,11 +85,11 @@ use rustc_data_structures::stable_hasher::{HashStable, StableHasher,
|
|||
/// values live long enough; phrased another way, the starting point
|
||||
/// of each range is not really the important thing in the above
|
||||
/// picture, but rather the ending point.
|
||||
///
|
||||
/// FIXME (pnkfelix): This currently derives `PartialOrd` and `Ord` to
|
||||
/// placate the same deriving in `ty::FreeRegion`, but we may want to
|
||||
/// actually attach a more meaningful ordering to scopes than the one
|
||||
/// generated via deriving here.
|
||||
//
|
||||
// FIXME(pnkfelix): this currently derives `PartialOrd` and `Ord` to
|
||||
// placate the same deriving in `ty::FreeRegion`, but we may want to
|
||||
// actually attach a more meaningful ordering to scopes than the one
|
||||
// generated via deriving here.
|
||||
#[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Hash, Copy, RustcEncodable, RustcDecodable)]
|
||||
pub struct Scope {
|
||||
pub id: hir::ItemLocalId,
|
||||
|
@ -140,14 +140,14 @@ pub enum ScopeData {
|
|||
///
|
||||
/// For example, given `{ let (a, b) = EXPR_1; let c = EXPR_2; ... }`:
|
||||
///
|
||||
/// * the subscope with `first_statement_index == 0` is scope of both
|
||||
/// * The subscope with `first_statement_index == 0` is scope of both
|
||||
/// `a` and `b`; it does not include EXPR_1, but does include
|
||||
/// everything after that first `let`. (If you want a scope that
|
||||
/// includes EXPR_1 as well, then do not use `Scope::Remainder`,
|
||||
/// but instead another `Scope` that encompasses the whole block,
|
||||
/// e.g., `Scope::Node`.
|
||||
///
|
||||
/// * the subscope with `first_statement_index == 1` is scope of `c`,
|
||||
/// * The subscope with `first_statement_index == 1` is scope of `c`,
|
||||
/// and thus does not include EXPR_2, but covers the `...`.
|
||||
|
||||
newtype_index! {
|
||||
|
@ -160,7 +160,7 @@ impl_stable_hash_for!(struct crate::middle::region::FirstStatementIndex { privat
|
|||
static_assert!(ASSERT_SCOPE_DATA: mem::size_of::<ScopeData>() == 4);
|
||||
|
||||
impl Scope {
|
||||
/// Returns a item-local id associated with this scope.
|
||||
/// Returns a item-local ID associated with this scope.
|
||||
///
|
||||
/// N.B., likely to be replaced as API is refined; e.g., pnkfelix
|
||||
/// anticipates `fn entry_node_id` and `fn each_exit_node_id`.
|
||||
|
@ -180,8 +180,8 @@ impl Scope {
|
|||
}
|
||||
}
|
||||
|
||||
/// Returns the span of this Scope. Note that in general the
|
||||
/// returned span may not correspond to the span of any node id in
|
||||
/// Returns the span of this `Scope`. Note that in general the
|
||||
/// returned span may not correspond to the span of any `NodeId` in
|
||||
/// the AST.
|
||||
pub fn span(&self, tcx: TyCtxt<'_, '_, '_>, scope_tree: &ScopeTree) -> Span {
|
||||
let node_id = self.node_id(tcx, scope_tree);
|
||||
|
@ -225,19 +225,19 @@ pub struct ScopeTree {
|
|||
/// have lifetime parameters free in this body.
|
||||
root_parent: Option<ast::NodeId>,
|
||||
|
||||
/// `parent_map` maps from a scope id to the enclosing scope id;
|
||||
/// `parent_map` maps from a scope ID to the enclosing scope id;
|
||||
/// this is usually corresponding to the lexical nesting, though
|
||||
/// in the case of closures the parent scope is the innermost
|
||||
/// conditional expression or repeating block. (Note that the
|
||||
/// enclosing scope id for the block associated with a closure is
|
||||
/// enclosing scope ID for the block associated with a closure is
|
||||
/// the closure itself.)
|
||||
parent_map: FxHashMap<Scope, (Scope, ScopeDepth)>,
|
||||
|
||||
/// `var_map` maps from a variable or binding id to the block in
|
||||
/// `var_map` maps from a variable or binding ID to the block in
|
||||
/// which that variable is declared.
|
||||
var_map: FxHashMap<hir::ItemLocalId, Scope>,
|
||||
|
||||
/// maps from a node-id to the associated destruction scope (if any)
|
||||
/// maps from a `NodeId` to the associated destruction scope (if any)
|
||||
destruction_scopes: FxHashMap<hir::ItemLocalId, Scope>,
|
||||
|
||||
/// `rvalue_scopes` includes entries for those expressions whose cleanup scope is
|
||||
|
@ -252,8 +252,8 @@ pub struct ScopeTree {
|
|||
|
||||
/// Encodes the hierarchy of fn bodies. Every fn body (including
|
||||
/// closures) forms its own distinct region hierarchy, rooted in
|
||||
/// the block that is the fn body. This map points from the id of
|
||||
/// that root block to the id of the root block for the enclosing
|
||||
/// the block that is the fn body. This map points from the ID of
|
||||
/// that root block to the ID of the root block for the enclosing
|
||||
/// fn, if any. Thus the map structures the fn bodies into a
|
||||
/// hierarchy based on their lexical mapping. This is used to
|
||||
/// handle the relationships between regions in a fn and in a
|
||||
|
@ -382,7 +382,7 @@ struct RegionResolutionVisitor<'a, 'tcx: 'a> {
|
|||
/// upon exiting the parent scope, we cannot statically know how
|
||||
/// many times the expression executed, and thus if the expression
|
||||
/// creates temporaries we cannot know statically how many such
|
||||
/// temporaries we would have to cleanup. Therefore we ensure that
|
||||
/// temporaries we would have to cleanup. Therefore, we ensure that
|
||||
/// the temporaries never outlast the conditional/repeating
|
||||
/// expression, preventing the need for dynamic checks and/or
|
||||
/// arbitrary amounts of stack space. Terminating scopes end
|
||||
|
@ -465,7 +465,7 @@ impl<'tcx> ScopeTree {
|
|||
}
|
||||
|
||||
/// Records that `sub_closure` is defined within `sup_closure`. These ids
|
||||
/// should be the id of the block that is the fn body, which is
|
||||
/// should be the ID of the block that is the fn body, which is
|
||||
/// also the root of the region hierarchy for that fn.
|
||||
fn record_closure_parent(&mut self,
|
||||
sub_closure: hir::ItemLocalId,
|
||||
|
@ -551,8 +551,8 @@ impl<'tcx> ScopeTree {
|
|||
self.is_subscope_of(scope2, scope1)
|
||||
}
|
||||
|
||||
/// Returns true if `subscope` is equal to or is lexically nested inside `superscope` and false
|
||||
/// otherwise.
|
||||
/// Returns `true` if `subscope` is equal to or is lexically nested inside `superscope`, and
|
||||
/// `false` otherwise.
|
||||
pub fn is_subscope_of(&self,
|
||||
subscope: Scope,
|
||||
superscope: Scope)
|
||||
|
@ -575,7 +575,7 @@ impl<'tcx> ScopeTree {
|
|||
return true;
|
||||
}
|
||||
|
||||
/// Returns the id of the innermost containing body
|
||||
/// Returns the ID of the innermost containing body
|
||||
pub fn containing_body(&self, mut scope: Scope) -> Option<hir::ItemLocalId> {
|
||||
loop {
|
||||
if let ScopeData::CallSite = scope.data {
|
||||
|
@ -1051,7 +1051,7 @@ fn resolve_local<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>,
|
|||
visitor.visit_pat(pat);
|
||||
}
|
||||
|
||||
/// True if `pat` match the `P&` nonterminal:
|
||||
/// Returns `true` if `pat` match the `P&` non-terminal.
|
||||
///
|
||||
/// P& = ref X
|
||||
/// | StructName { ..., P&, ... }
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
//! Name resolution for lifetimes follows MUCH simpler rules than the
|
||||
//! full resolve. For example, lifetime names are never exported or
|
||||
//! used between functions, and they operate in a purely top-down
|
||||
//! way. Therefore we break lifetime name resolution into a separate pass.
|
||||
//! way. Therefore, we break lifetime name resolution into a separate pass.
|
||||
|
||||
use crate::hir::def::Def;
|
||||
use crate::hir::def_id::{CrateNum, DefId, LocalDefId, LOCAL_CRATE};
|
||||
|
@ -207,7 +207,7 @@ struct NamedRegionMap {
|
|||
pub object_lifetime_defaults: NodeMap<Vec<ObjectLifetimeDefault>>,
|
||||
}
|
||||
|
||||
/// See `NamedRegionMap`.
|
||||
/// See [`NamedRegionMap`].
|
||||
#[derive(Default)]
|
||||
pub struct ResolveLifetimes {
|
||||
defs: FxHashMap<LocalDefId, Lrc<FxHashMap<ItemLocalId, Region>>>,
|
||||
|
@ -227,21 +227,19 @@ struct LifetimeContext<'a, 'tcx: 'a> {
|
|||
map: &'a mut NamedRegionMap,
|
||||
scope: ScopeRef<'a>,
|
||||
|
||||
/// Deep breath. Our representation for poly trait refs contains a single
|
||||
/// This is slightly complicated. Our representation for poly-trait-refs contains a single
|
||||
/// binder and thus we only allow a single level of quantification. However,
|
||||
/// the syntax of Rust permits quantification in two places, e.g., `T: for <'a> Foo<'a>`
|
||||
/// and `for <'a, 'b> &'b T: Foo<'a>`. In order to get the de Bruijn indices
|
||||
/// and `for <'a, 'b> &'b T: Foo<'a>`. In order to get the De Bruijn indices
|
||||
/// correct when representing these constraints, we should only introduce one
|
||||
/// scope. However, we want to support both locations for the quantifier and
|
||||
/// during lifetime resolution we want precise information (so we can't
|
||||
/// desugar in an earlier phase).
|
||||
///
|
||||
/// SO, if we encounter a quantifier at the outer scope, we set
|
||||
/// trait_ref_hack to true (and introduce a scope), and then if we encounter
|
||||
/// a quantifier at the inner scope, we error. If trait_ref_hack is false,
|
||||
/// So, if we encounter a quantifier at the outer scope, we set
|
||||
/// `trait_ref_hack` to `true` (and introduce a scope), and then if we encounter
|
||||
/// a quantifier at the inner scope, we error. If `trait_ref_hack` is `false`,
|
||||
/// then we introduce the scope at the inner quantifier.
|
||||
///
|
||||
/// I'm sorry.
|
||||
trait_ref_hack: bool,
|
||||
|
||||
/// Used to disallow the use of in-band lifetimes in `fn` or `Fn` syntax.
|
||||
|
@ -1676,7 +1674,7 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> {
|
|||
/// If early bound lifetimes are present, we separate them into their own list (and likewise
|
||||
/// for late bound). They will be numbered sequentially, starting from the lowest index that is
|
||||
/// already in scope (for a fn item, that will be 0, but for a method it might not be). Late
|
||||
/// bound lifetimes are resolved by name and associated with a binder id (`binder_id`), so the
|
||||
/// bound lifetimes are resolved by name and associated with a binder ID (`binder_id`), so the
|
||||
/// ordering is not important there.
|
||||
fn visit_early_late<F>(
|
||||
&mut self,
|
||||
|
@ -2610,7 +2608,7 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Returns true if, in the current scope, replacing `'_` would be
|
||||
/// Returns `true` if, in the current scope, replacing `'_` would be
|
||||
/// equivalent to a single-use lifetime.
|
||||
fn track_lifetime_uses(&self) -> bool {
|
||||
let mut scope = self.scope;
|
||||
|
|
|
@ -51,7 +51,7 @@ enum AnnotationKind {
|
|||
pub struct DeprecationEntry {
|
||||
/// The metadata of the attribute associated with this entry.
|
||||
pub attr: Deprecation,
|
||||
/// The def id where the attr was originally attached. `None` for non-local
|
||||
/// The `DefId` where the attr was originally attached. `None` for non-local
|
||||
/// `DefId`'s.
|
||||
origin: Option<HirId>,
|
||||
}
|
||||
|
@ -475,7 +475,7 @@ pub fn provide(providers: &mut Providers<'_>) {
|
|||
};
|
||||
}
|
||||
|
||||
/// Check whether an item marked with `deprecated(since="X")` is currently
|
||||
/// Checks whether an item marked with `deprecated(since="X")` is currently
|
||||
/// deprecated (i.e., whether X is not greater than the current rustc version).
|
||||
pub fn deprecation_in_effect(since: &str) -> bool {
|
||||
fn parse_version(ver: &str) -> Vec<u32> {
|
||||
|
|
|
@ -54,7 +54,7 @@ pub fn link_name(attrs: &[ast::Attribute]) -> Option<Symbol> {
|
|||
})
|
||||
}
|
||||
|
||||
/// Returns whether the specified `lang_item` doesn't actually need to be
|
||||
/// Returns `true` if the specified `lang_item` doesn't actually need to be
|
||||
/// present for this compilation.
|
||||
///
|
||||
/// Not all lang items are always required for each compilation, particularly in
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
//! The virtual memory representation of the MIR interpreter
|
||||
//! The virtual memory representation of the MIR interpreter.
|
||||
|
||||
use super::{
|
||||
Pointer, EvalResult, AllocId, ScalarMaybeUndef, write_target_uint, read_target_uint, Scalar,
|
||||
|
@ -133,7 +133,7 @@ impl<'tcx> ::serialize::UseSpecializedDecodable for &'tcx Allocation {}
|
|||
|
||||
/// Alignment and bounds checks
|
||||
impl<'tcx, Tag, Extra> Allocation<Tag, Extra> {
|
||||
/// Check if the pointer is "in-bounds". Notice that a pointer pointing at the end
|
||||
/// Checks if the pointer is "in-bounds". Notice that a pointer pointing at the end
|
||||
/// of an allocation (i.e., at the first *inaccessible* location) *is* considered
|
||||
/// in-bounds! This follows C's/LLVM's rules.
|
||||
/// If you want to check bounds before doing a memory access, better use `check_bounds`.
|
||||
|
@ -145,7 +145,7 @@ impl<'tcx, Tag, Extra> Allocation<Tag, Extra> {
|
|||
ptr.check_in_alloc(Size::from_bytes(allocation_size), InboundsCheck::Live)
|
||||
}
|
||||
|
||||
/// Check if the memory range beginning at `ptr` and of size `Size` is "in-bounds".
|
||||
/// Checks if the memory range beginning at `ptr` and of size `Size` is "in-bounds".
|
||||
#[inline(always)]
|
||||
pub fn check_bounds(
|
||||
&self,
|
||||
|
@ -462,7 +462,7 @@ impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
|
|||
|
||||
/// Relocations
|
||||
impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
|
||||
/// Return all relocations overlapping with the given ptr-offset pair.
|
||||
/// Returns all relocations overlapping with the given ptr-offset pair.
|
||||
pub fn relocations(
|
||||
&self,
|
||||
cx: &impl HasDataLayout,
|
||||
|
@ -476,7 +476,7 @@ impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
|
|||
self.relocations.range(Size::from_bytes(start)..end)
|
||||
}
|
||||
|
||||
/// Check that there are no relocations overlapping with the given range.
|
||||
/// Checks that there are no relocations overlapping with the given range.
|
||||
#[inline(always)]
|
||||
fn check_relocations(
|
||||
&self,
|
||||
|
@ -491,7 +491,7 @@ impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Remove all relocations inside the given range.
|
||||
/// Removes all relocations inside the given range.
|
||||
/// If there are relocations overlapping with the edges, they
|
||||
/// are removed as well *and* the bytes they cover are marked as
|
||||
/// uninitialized. This is a somewhat odd "spooky action at a distance",
|
||||
|
@ -633,7 +633,7 @@ impl UndefMask {
|
|||
m
|
||||
}
|
||||
|
||||
/// Check whether the range `start..end` (end-exclusive) is entirely defined.
|
||||
/// Checks whether the range `start..end` (end-exclusive) is entirely defined.
|
||||
///
|
||||
/// Returns `Ok(())` if it's defined. Otherwise returns the index of the byte
|
||||
/// at which the first undefined access begins.
|
||||
|
|
|
@ -19,7 +19,7 @@ use syntax::symbol::Symbol;
|
|||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
||||
pub enum ErrorHandled {
|
||||
/// Already reported a lint or an error for this evaluation
|
||||
/// Already reported a lint or an error for this evaluation.
|
||||
Reported,
|
||||
/// Don't emit an error, the evaluation failed because the MIR was generic
|
||||
/// and the substs didn't fully monomorphize it.
|
||||
|
@ -212,7 +212,7 @@ pub type AssertMessage<'tcx> = EvalErrorKind<'tcx, mir::Operand<'tcx>>;
|
|||
#[derive(Clone, RustcEncodable, RustcDecodable)]
|
||||
pub enum EvalErrorKind<'tcx, O> {
|
||||
/// This variant is used by machines to signal their own errors that do not
|
||||
/// match an existing variant
|
||||
/// match an existing variant.
|
||||
MachineError(String),
|
||||
|
||||
FunctionAbiMismatch(Abi, Abi),
|
||||
|
|
|
@ -260,23 +260,23 @@ impl fmt::Display for AllocId {
|
|||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Hash, RustcDecodable, RustcEncodable)]
|
||||
pub enum AllocKind<'tcx> {
|
||||
/// The alloc id is used as a function pointer
|
||||
/// The alloc ID is used as a function pointer
|
||||
Function(Instance<'tcx>),
|
||||
/// The alloc id points to a "lazy" static variable that did not get computed (yet).
|
||||
/// The alloc ID points to a "lazy" static variable that did not get computed (yet).
|
||||
/// This is also used to break the cycle in recursive statics.
|
||||
Static(DefId),
|
||||
/// The alloc id points to memory
|
||||
/// The alloc ID points to memory.
|
||||
Memory(&'tcx Allocation),
|
||||
}
|
||||
|
||||
pub struct AllocMap<'tcx> {
|
||||
/// Lets you know what an AllocId refers to
|
||||
/// Lets you know what an `AllocId` refers to.
|
||||
id_to_kind: FxHashMap<AllocId, AllocKind<'tcx>>,
|
||||
|
||||
/// Used to ensure that statics only get one associated AllocId
|
||||
/// Used to ensure that statics only get one associated `AllocId`.
|
||||
type_interner: FxHashMap<AllocKind<'tcx>, AllocId>,
|
||||
|
||||
/// The AllocId to assign to the next requested id.
|
||||
/// The `AllocId` to assign to the next requested ID.
|
||||
/// Always incremented, never gets smaller.
|
||||
next_id: AllocId,
|
||||
}
|
||||
|
@ -345,7 +345,7 @@ impl<'tcx> AllocMap<'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Generate an `AllocId` for a static or return a cached one in case this function has been
|
||||
/// Generates an `AllocId` for a static or return a cached one in case this function has been
|
||||
/// called on the same static before.
|
||||
pub fn intern_static(&mut self, static_id: DefId) -> AllocId {
|
||||
self.intern(AllocKind::Static(static_id))
|
||||
|
|
|
@ -13,16 +13,17 @@ pub struct RawConst<'tcx> {
|
|||
pub ty: Ty<'tcx>,
|
||||
}
|
||||
|
||||
/// Represents a constant value in Rust. Scalar and ScalarPair are optimizations which
|
||||
/// matches the LocalState optimizations for easy conversions between Value and ConstValue.
|
||||
/// Represents a constant value in Rust. `Scalar` and `ScalarPair` are optimizations that
|
||||
/// match the `LocalState` optimizations for easy conversions between `Value` and `ConstValue`.
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord, RustcEncodable, RustcDecodable, Hash)]
|
||||
pub enum ConstValue<'tcx> {
|
||||
/// Used only for types with layout::abi::Scalar ABI and ZSTs
|
||||
/// Used only for types with `layout::abi::Scalar` ABI and ZSTs.
|
||||
///
|
||||
/// Not using the enum `Value` to encode that this must not be `Undef`
|
||||
/// Not using the enum `Value` to encode that this must not be `Undef`.
|
||||
Scalar(Scalar),
|
||||
|
||||
/// Used only for slices and strings (`&[T]`, `&str`, `*const [T]`, `*mut str`, `Box<str>`, ...)
|
||||
/// Used only for slices and strings (`&[T]`, `&str`, `*const [T]`, `*mut str`, `Box<str>`,
|
||||
/// etc.).
|
||||
///
|
||||
/// Empty slices don't necessarily have an address backed by an `AllocId`, thus we also need to
|
||||
/// enable integer pointers. The `Scalar` type covers exactly those two cases. While we could
|
||||
|
@ -30,8 +31,8 @@ pub enum ConstValue<'tcx> {
|
|||
/// it.
|
||||
Slice(Scalar, u64),
|
||||
|
||||
/// An allocation + offset into the allocation.
|
||||
/// Invariant: The AllocId matches the allocation.
|
||||
/// An allocation together with an offset into the allocation.
|
||||
/// Invariant: the `AllocId` matches the allocation.
|
||||
ByRef(AllocId, &'tcx Allocation, Size),
|
||||
}
|
||||
|
||||
|
|
|
@ -108,7 +108,7 @@ pub struct Mir<'tcx> {
|
|||
/// in scope, but a separate set of locals.
|
||||
pub promoted: IndexVec<Promoted, Mir<'tcx>>,
|
||||
|
||||
/// Yield type of the function, if it is a generator.
|
||||
/// Yields type of the function, if it is a generator.
|
||||
pub yield_ty: Option<Ty<'tcx>>,
|
||||
|
||||
/// Generator drop glue
|
||||
|
@ -380,7 +380,7 @@ impl<'tcx> Mir<'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Check if `sub` is a sub scope of `sup`
|
||||
/// Checks if `sub` is a sub scope of `sup`
|
||||
pub fn is_sub_scope(&self, mut sub: SourceScope, sup: SourceScope) -> bool {
|
||||
while sub != sup {
|
||||
match self.source_scopes[sub].parent_scope {
|
||||
|
@ -391,12 +391,12 @@ impl<'tcx> Mir<'tcx> {
|
|||
true
|
||||
}
|
||||
|
||||
/// Return the return type, it always return first element from `local_decls` array
|
||||
/// Returns the return type, it always return first element from `local_decls` array
|
||||
pub fn return_ty(&self) -> Ty<'tcx> {
|
||||
self.local_decls[RETURN_PLACE].ty
|
||||
}
|
||||
|
||||
/// Get the location of the terminator for the given block
|
||||
/// Gets the location of the terminator for the given block
|
||||
pub fn terminator_loc(&self, bb: BasicBlock) -> Location {
|
||||
Location {
|
||||
block: bb,
|
||||
|
@ -565,8 +565,8 @@ pub enum BorrowKind {
|
|||
|
||||
/// Data is mutable and not aliasable.
|
||||
Mut {
|
||||
/// True if this borrow arose from method-call auto-ref
|
||||
/// (i.e., `adjustment::Adjust::Borrow`)
|
||||
/// `true` if this borrow arose from method-call auto-ref
|
||||
/// (i.e., `adjustment::Adjust::Borrow`).
|
||||
allow_two_phase_borrow: bool,
|
||||
},
|
||||
}
|
||||
|
@ -610,7 +610,7 @@ pub struct VarBindingForm<'tcx> {
|
|||
/// If an explicit type was provided for this variable binding,
|
||||
/// this holds the source Span of that type.
|
||||
///
|
||||
/// NOTE: If you want to change this to a `HirId`, be wary that
|
||||
/// NOTE: if you want to change this to a `HirId`, be wary that
|
||||
/// doing so breaks incremental compilation (as of this writing),
|
||||
/// while a `Span` does not cause our tests to fail.
|
||||
pub opt_ty_info: Option<Span>,
|
||||
|
@ -737,7 +737,7 @@ pub struct LocalDecl<'tcx> {
|
|||
/// `ClearCrossCrate` as long as it carries as `HirId`.
|
||||
pub is_user_variable: Option<ClearCrossCrate<BindingForm<'tcx>>>,
|
||||
|
||||
/// True if this is an internal local
|
||||
/// `true` if this is an internal local.
|
||||
///
|
||||
/// These locals are not based on types in the source code and are only used
|
||||
/// for a few desugarings at the moment.
|
||||
|
@ -864,7 +864,7 @@ pub struct LocalDecl<'tcx> {
|
|||
}
|
||||
|
||||
impl<'tcx> LocalDecl<'tcx> {
|
||||
/// Returns true only if local is a binding that can itself be
|
||||
/// Returns `true` only if local is a binding that can itself be
|
||||
/// made mutable via the addition of the `mut` keyword, namely
|
||||
/// something like the occurrences of `x` in:
|
||||
/// - `fn foo(x: Type) { ... }`,
|
||||
|
@ -886,7 +886,7 @@ impl<'tcx> LocalDecl<'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Returns true if local is definitely not a `ref ident` or
|
||||
/// Returns `true` if local is definitely not a `ref ident` or
|
||||
/// `ref mut ident` binding. (Such bindings cannot be made into
|
||||
/// mutable bindings, but the inverse does not necessarily hold).
|
||||
pub fn is_nonref_binding(&self) -> bool {
|
||||
|
@ -904,7 +904,7 @@ impl<'tcx> LocalDecl<'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Create a new `LocalDecl` for a temporary.
|
||||
/// Creates a new `LocalDecl` for a temporary.
|
||||
#[inline]
|
||||
pub fn new_temp(ty: Ty<'tcx>, span: Span) -> Self {
|
||||
Self::new_local(ty, Mutability::Mut, false, span)
|
||||
|
@ -925,7 +925,7 @@ impl<'tcx> LocalDecl<'tcx> {
|
|||
self
|
||||
}
|
||||
|
||||
/// Create a new `LocalDecl` for a internal temporary.
|
||||
/// Creates a new `LocalDecl` for a internal temporary.
|
||||
#[inline]
|
||||
pub fn new_internal(ty: Ty<'tcx>, span: Span) -> Self {
|
||||
Self::new_local(ty, Mutability::Mut, true, span)
|
||||
|
@ -1019,7 +1019,7 @@ pub struct BasicBlockData<'tcx> {
|
|||
|
||||
/// Terminator for this block.
|
||||
///
|
||||
/// NB. This should generally ONLY be `None` during construction.
|
||||
/// N.B., this should generally ONLY be `None` during construction.
|
||||
/// Therefore, you should generally access it via the
|
||||
/// `terminator()` or `terminator_mut()` methods. The only
|
||||
/// exception is that certain passes, such as `simplify_cfg`, swap
|
||||
|
@ -1637,7 +1637,7 @@ impl<'tcx> TerminatorKind<'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Return the list of labels for the edges to the successor basic blocks.
|
||||
/// Returns the list of labels for the edges to the successor basic blocks.
|
||||
pub fn fmt_successor_labels(&self) -> Vec<Cow<'static, str>> {
|
||||
use self::TerminatorKind::*;
|
||||
match *self {
|
||||
|
@ -1760,7 +1760,7 @@ pub enum StatementKind<'tcx> {
|
|||
/// error messages to these specific patterns.
|
||||
///
|
||||
/// Note that this also is emitted for regular `let` bindings to ensure that locals that are
|
||||
/// never accessed still get some sanity checks for e.g. `let x: ! = ..;`
|
||||
/// never accessed still get some sanity checks for, e.g., `let x: ! = ..;`
|
||||
FakeRead(FakeReadCause, Place<'tcx>),
|
||||
|
||||
/// Write the discriminant for a variant to the enum Place.
|
||||
|
@ -1775,7 +1775,7 @@ pub enum StatementKind<'tcx> {
|
|||
/// End the current live range for the storage of the local.
|
||||
StorageDead(Local),
|
||||
|
||||
/// Execute a piece of inline Assembly.
|
||||
/// Executes a piece of inline Assembly.
|
||||
InlineAsm {
|
||||
asm: Box<InlineAsm>,
|
||||
outputs: Box<[Place<'tcx>]>,
|
||||
|
@ -1904,7 +1904,7 @@ pub enum Place<'tcx> {
|
|||
Projection(Box<PlaceProjection<'tcx>>),
|
||||
}
|
||||
|
||||
/// The def-id of a static, along with its normalized type (which is
|
||||
/// The `DefId` of a static, along with its normalized type (which is
|
||||
/// stored to avoid requiring normalization when reading MIR).
|
||||
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable)]
|
||||
pub struct Static<'tcx> {
|
||||
|
@ -2009,10 +2009,10 @@ impl<'tcx> Place<'tcx> {
|
|||
Place::Projection(Box::new(PlaceProjection { base: self, elem }))
|
||||
}
|
||||
|
||||
/// Find the innermost `Local` from this `Place`, *if* it is either a local itself or
|
||||
/// Finds the innermost `Local` from this `Place`, *if* it is either a local itself or
|
||||
/// a single deref of a local.
|
||||
///
|
||||
/// FIXME: can we safely swap the semantics of `fn base_local` below in here instead?
|
||||
//
|
||||
// FIXME: can we safely swap the semantics of `fn base_local` below in here instead?
|
||||
pub fn local(&self) -> Option<Local> {
|
||||
match self {
|
||||
Place::Local(local) |
|
||||
|
@ -2024,7 +2024,7 @@ impl<'tcx> Place<'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Find the innermost `Local` from this `Place`.
|
||||
/// Finds the innermost `Local` from this `Place`.
|
||||
pub fn base_local(&self) -> Option<Local> {
|
||||
match self {
|
||||
Place::Local(local) => Some(*local),
|
||||
|
@ -2141,7 +2141,7 @@ impl<'tcx> Debug for Operand<'tcx> {
|
|||
|
||||
impl<'tcx> Operand<'tcx> {
|
||||
/// Convenience helper to make a constant that refers to the fn
|
||||
/// with given def-id and substs. Since this is used to synthesize
|
||||
/// with given `DefId` and substs. Since this is used to synthesize
|
||||
/// MIR, assumes `user_ty` is None.
|
||||
pub fn function_handle<'a>(
|
||||
tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
|
@ -2199,7 +2199,7 @@ pub enum Rvalue<'tcx> {
|
|||
/// be defined to return, say, a 0) if ADT is not an enum.
|
||||
Discriminant(Place<'tcx>),
|
||||
|
||||
/// Create an aggregate value, like a tuple or struct. This is
|
||||
/// Creates an aggregate value, like a tuple or struct. This is
|
||||
/// only needed because we want to distinguish `dest = Foo { x:
|
||||
/// ..., y: ... }` from `dest.x = ...; dest.y = ...;` in the case
|
||||
/// that `Foo` has a destructor. These rvalues can be optimized
|
||||
|
@ -2211,13 +2211,13 @@ pub enum Rvalue<'tcx> {
|
|||
pub enum CastKind {
|
||||
Misc,
|
||||
|
||||
/// Convert unique, zero-sized type for a fn to fn()
|
||||
/// Converts unique, zero-sized type for a fn to fn()
|
||||
ReifyFnPointer,
|
||||
|
||||
/// Convert non capturing closure to fn()
|
||||
/// Converts non capturing closure to fn()
|
||||
ClosureFnPointer,
|
||||
|
||||
/// Convert safe fn() to unsafe fn()
|
||||
/// Converts safe fn() to unsafe fn()
|
||||
UnsafeFnPointer,
|
||||
|
||||
/// "Unsize" -- convert a thin-or-fat pointer to a fat pointer.
|
||||
|
@ -2301,9 +2301,9 @@ impl BinOp {
|
|||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, RustcEncodable, RustcDecodable)]
|
||||
pub enum NullOp {
|
||||
/// Return the size of a value of that type
|
||||
/// Returns the size of a value of that type
|
||||
SizeOf,
|
||||
/// Create a new uninitialized box for a value of that type
|
||||
/// Creates a new uninitialized box for a value of that type
|
||||
Box,
|
||||
}
|
||||
|
||||
|
@ -2847,7 +2847,7 @@ impl Location {
|
|||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
|
||||
pub enum UnsafetyViolationKind {
|
||||
General,
|
||||
/// Permitted in const fn and regular fns
|
||||
/// Permitted in const fn and regular fns.
|
||||
GeneralAndConstFn,
|
||||
ExternStatic(ast::NodeId),
|
||||
BorrowPacked(ast::NodeId),
|
||||
|
|
|
@ -278,7 +278,7 @@ impl<'tcx> Rvalue<'tcx> {
|
|||
}
|
||||
|
||||
#[inline]
|
||||
/// Returns whether this rvalue is deeply initialized (most rvalues) or
|
||||
/// Returns `true` if this rvalue is deeply initialized (most rvalues) or
|
||||
/// whether its only shallowly initialized (`Rvalue::Box`).
|
||||
pub fn initialization_state(&self) -> RvalueInitializationState {
|
||||
match *self {
|
||||
|
|
|
@ -475,7 +475,7 @@ impl BorrowckMode {
|
|||
}
|
||||
|
||||
pub enum Input {
|
||||
/// Load source from file
|
||||
/// Loads source from file
|
||||
File(PathBuf),
|
||||
Str {
|
||||
/// String that is shown in place of a filename
|
||||
|
@ -523,7 +523,7 @@ impl OutputFilenames {
|
|||
.unwrap_or_else(|| self.temp_path(flavor, None))
|
||||
}
|
||||
|
||||
/// Get the path where a compilation artifact of the given type for the
|
||||
/// Gets the path where a compilation artifact of the given type for the
|
||||
/// given codegen unit should be placed on disk. If codegen_unit_name is
|
||||
/// None, a path distinct from those of any codegen unit will be generated.
|
||||
pub fn temp_path(&self, flavor: OutputType, codegen_unit_name: Option<&str>) -> PathBuf {
|
||||
|
@ -532,7 +532,7 @@ impl OutputFilenames {
|
|||
}
|
||||
|
||||
/// Like temp_path, but also supports things where there is no corresponding
|
||||
/// OutputType, like no-opt-bitcode or lto-bitcode.
|
||||
/// OutputType, like noopt-bitcode or lto-bitcode.
|
||||
pub fn temp_path_ext(&self, ext: &str, codegen_unit_name: Option<&str>) -> PathBuf {
|
||||
let base = self.out_directory.join(&self.filestem());
|
||||
|
||||
|
@ -616,7 +616,7 @@ impl Default for Options {
|
|||
}
|
||||
|
||||
impl Options {
|
||||
/// True if there is a reason to build the dep graph.
|
||||
/// Returns `true` if there is a reason to build the dep graph.
|
||||
pub fn build_dep_graph(&self) -> bool {
|
||||
self.incremental.is_some() || self.debugging_opts.dump_dep_graph
|
||||
|| self.debugging_opts.query_dep_graph
|
||||
|
@ -632,7 +632,7 @@ impl Options {
|
|||
FilePathMapping::new(self.remap_path_prefix.clone())
|
||||
}
|
||||
|
||||
/// True if there will be an output file generated
|
||||
/// Returns `true` if there will be an output file generated
|
||||
pub fn will_create_output_file(&self) -> bool {
|
||||
!self.debugging_opts.parse_only && // The file is just being parsed
|
||||
!self.debugging_opts.ls // The file is just being queried
|
||||
|
|
|
@ -51,7 +51,7 @@ pub mod filesearch;
|
|||
pub mod search_paths;
|
||||
|
||||
pub struct OptimizationFuel {
|
||||
/// If -zfuel=crate=n is specified, initially set to n. Otherwise 0.
|
||||
/// If `-zfuel=crate=n` is specified, initially set to `n`, otherwise `0`.
|
||||
remaining: u64,
|
||||
/// We're rejecting all further optimizations.
|
||||
out_of_fuel: bool,
|
||||
|
@ -64,7 +64,7 @@ pub struct Session {
|
|||
pub host: Target,
|
||||
pub opts: config::Options,
|
||||
pub host_tlib_path: SearchPath,
|
||||
/// This is `None` if the host and target are the same.
|
||||
/// `None` if the host and target are the same.
|
||||
pub target_tlib_path: Option<SearchPath>,
|
||||
pub parse_sess: ParseSess,
|
||||
pub sysroot: PathBuf,
|
||||
|
@ -104,7 +104,7 @@ pub struct Session {
|
|||
/// The maximum length of types during monomorphization.
|
||||
pub type_length_limit: Once<usize>,
|
||||
|
||||
/// The maximum number of stackframes allowed in const eval
|
||||
/// The maximum number of stackframes allowed in const eval.
|
||||
pub const_eval_stack_frame_limit: usize,
|
||||
|
||||
/// The metadata::creader module may inject an allocator/panic_runtime
|
||||
|
@ -123,13 +123,13 @@ pub struct Session {
|
|||
/// `-Zquery-dep-graph` is specified.
|
||||
pub cgu_reuse_tracker: CguReuseTracker,
|
||||
|
||||
/// Used by -Z profile-queries in util::common
|
||||
/// Used by `-Z profile-queries` in `util::common`.
|
||||
pub profile_channel: Lock<Option<mpsc::Sender<ProfileQueriesMsg>>>,
|
||||
|
||||
/// Used by -Z self-profile
|
||||
/// Used by `-Z self-profile`.
|
||||
pub self_profiling_active: bool,
|
||||
|
||||
/// Used by -Z self-profile
|
||||
/// Used by `-Z self-profile`.
|
||||
pub self_profiling: Lock<SelfProfiler>,
|
||||
|
||||
/// Some measurements that are being gathered during compilation.
|
||||
|
@ -140,14 +140,14 @@ pub struct Session {
|
|||
|
||||
next_node_id: OneThread<Cell<ast::NodeId>>,
|
||||
|
||||
/// If -zfuel=crate=n is specified, Some(crate).
|
||||
/// If `-zfuel=crate=n` is specified, `Some(crate)`.
|
||||
optimization_fuel_crate: Option<String>,
|
||||
|
||||
/// Tracks fuel info if If -zfuel=crate=n is specified
|
||||
/// Tracks fuel info if `-zfuel=crate=n` is specified.
|
||||
optimization_fuel: Lock<OptimizationFuel>,
|
||||
|
||||
// The next two are public because the driver needs to read them.
|
||||
/// If -zprint-fuel=crate, Some(crate).
|
||||
/// If `-zprint-fuel=crate`, `Some(crate)`.
|
||||
pub print_fuel_crate: Option<String>,
|
||||
/// Always set to zero and incremented so that we can print fuel expended by a crate.
|
||||
pub print_fuel: AtomicU64,
|
||||
|
@ -156,10 +156,10 @@ pub struct Session {
|
|||
/// false positives about a job server in our environment.
|
||||
pub jobserver: Client,
|
||||
|
||||
/// Metadata about the allocators for the current crate being compiled
|
||||
/// Metadata about the allocators for the current crate being compiled.
|
||||
pub has_global_allocator: Once<bool>,
|
||||
|
||||
/// Metadata about the panic handlers for the current crate being compiled
|
||||
/// Metadata about the panic handlers for the current crate being compiled.
|
||||
pub has_panic_handler: Once<bool>,
|
||||
|
||||
/// Cap lint level specified by a driver specifically.
|
||||
|
@ -167,9 +167,9 @@ pub struct Session {
|
|||
}
|
||||
|
||||
pub struct PerfStats {
|
||||
/// The accumulated time spent on computing symbol hashes
|
||||
/// The accumulated time spent on computing symbol hashes.
|
||||
pub symbol_hash_time: Lock<Duration>,
|
||||
/// The accumulated time spent decoding def path tables from metadata
|
||||
/// The accumulated time spent decoding def path tables from metadata.
|
||||
pub decode_def_path_tables_time: Lock<Duration>,
|
||||
/// Total number of values canonicalized queries constructed.
|
||||
pub queries_canonicalized: AtomicUsize,
|
||||
|
@ -539,7 +539,7 @@ impl Session {
|
|||
self.opts.debugging_opts.print_llvm_passes
|
||||
}
|
||||
|
||||
/// Get the features enabled for the current compilation session.
|
||||
/// Gets the features enabled for the current compilation session.
|
||||
/// DO NOT USE THIS METHOD if there is a TyCtxt available, as it circumvents
|
||||
/// dependency tracking. Use tcx.features() instead.
|
||||
#[inline]
|
||||
|
@ -989,7 +989,7 @@ impl Session {
|
|||
self.opts.edition
|
||||
}
|
||||
|
||||
/// True if we cannot skip the PLT for shared library calls.
|
||||
/// Returns `true` if we cannot skip the PLT for shared library calls.
|
||||
pub fn needs_plt(&self) -> bool {
|
||||
// Check if the current target usually needs PLT to be enabled.
|
||||
// The user can use the command line flag to override it.
|
||||
|
|
|
@ -57,7 +57,7 @@ impl<'a, 'tcx> AutoTraitFinder<'a, 'tcx> {
|
|||
AutoTraitFinder { tcx }
|
||||
}
|
||||
|
||||
/// Make a best effort to determine whether and under which conditions an auto trait is
|
||||
/// Makes a best effort to determine whether and under which conditions an auto trait is
|
||||
/// implemented for a type. For example, if you have
|
||||
///
|
||||
/// ```
|
||||
|
|
|
@ -14,8 +14,8 @@ use crate::ty::{self, Ty, TyCtxt};
|
|||
use crate::ty::subst::{Subst, Substs};
|
||||
use crate::ty::fold::TypeFoldable;
|
||||
|
||||
/// Attempts to resolve an obligation to a vtable.. The result is
|
||||
/// a shallow vtable resolution -- meaning that we do not
|
||||
/// Attempts to resolve an obligation to a vtable. The result is
|
||||
/// a shallow vtable resolution, meaning that we do not
|
||||
/// (necessarily) resolve all nested obligations on the impl. Note
|
||||
/// that type check should guarantee to us that all nested
|
||||
/// obligations *could be* resolved if we wanted to.
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
//! See rustc guide chapters on [trait-resolution] and [trait-specialization] for more info on how
|
||||
//! See Rustc Guide chapters on [trait-resolution] and [trait-specialization] for more info on how
|
||||
//! this works.
|
||||
//!
|
||||
//! [trait-resolution]: https://rust-lang.github.io/rustc-guide/traits/resolution.html
|
||||
|
@ -34,7 +34,7 @@ pub struct OverlapResult<'tcx> {
|
|||
pub impl_header: ty::ImplHeader<'tcx>,
|
||||
pub intercrate_ambiguity_causes: Vec<IntercrateAmbiguityCause>,
|
||||
|
||||
/// True if the overlap might've been permitted before the shift
|
||||
/// `true` if the overlap might've been permitted before the shift
|
||||
/// to universes.
|
||||
pub involves_placeholder: bool,
|
||||
}
|
||||
|
@ -111,7 +111,7 @@ fn with_fresh_ty_vars<'cx, 'gcx, 'tcx>(selcx: &mut SelectionContext<'cx, 'gcx, '
|
|||
}
|
||||
|
||||
/// Can both impl `a` and impl `b` be satisfied by a common type (including
|
||||
/// `where` clauses)? If so, returns an `ImplHeader` that unifies the two impls.
|
||||
/// where-clauses)? If so, returns an `ImplHeader` that unifies the two impls.
|
||||
fn overlap<'cx, 'gcx, 'tcx>(
|
||||
selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>,
|
||||
a_def_id: DefId,
|
||||
|
@ -242,7 +242,7 @@ pub enum OrphanCheckErr<'tcx> {
|
|||
}
|
||||
|
||||
/// Checks the coherence orphan rules. `impl_def_id` should be the
|
||||
/// def-id of a trait impl. To pass, either the trait must be local, or else
|
||||
/// `DefId` of a trait impl. To pass, either the trait must be local, or else
|
||||
/// two conditions must be satisfied:
|
||||
///
|
||||
/// 1. All type parameters in `Self` must be "covered" by some local type constructor.
|
||||
|
@ -268,7 +268,7 @@ pub fn orphan_check<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
|
|||
orphan_check_trait_ref(tcx, trait_ref, InCrate::Local)
|
||||
}
|
||||
|
||||
/// Check whether a trait-ref is potentially implementable by a crate.
|
||||
/// Checks whether a trait-ref is potentially implementable by a crate.
|
||||
///
|
||||
/// The current rule is that a trait-ref orphan checks in a crate C:
|
||||
///
|
||||
|
|
|
@ -583,7 +583,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
|
|||
}
|
||||
|
||||
|
||||
/// Get the parent trait chain start
|
||||
/// Gets the parent trait chain start
|
||||
fn get_parent_trait_ref(&self, code: &ObligationCauseCode<'tcx>) -> Option<String> {
|
||||
match code {
|
||||
&ObligationCauseCode::BuiltinDerivedObligation(ref data) => {
|
||||
|
@ -1376,7 +1376,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Returns whether the trait predicate may apply for *some* assignment
|
||||
/// Returns `true` if the trait predicate may apply for *some* assignment
|
||||
/// to the type parameters.
|
||||
fn predicate_can_apply(&self,
|
||||
param_env: ty::ParamEnv<'tcx>,
|
||||
|
|
|
@ -140,7 +140,7 @@ impl<'tcx> TraitEngine<'tcx> for FulfillmentContext<'tcx> {
|
|||
/// creating a fresh type variable `$0` as well as a projection
|
||||
/// predicate `<SomeType as SomeTrait>::X == $0`. When the
|
||||
/// inference engine runs, it will attempt to find an impl of
|
||||
/// `SomeTrait` or a where clause that lets us unify `$0` with
|
||||
/// `SomeTrait` or a where-clause that lets us unify `$0` with
|
||||
/// something concrete. If this fails, we'll unify `$0` with
|
||||
/// `projection_ty` again.
|
||||
fn normalize_projection_type<'a, 'gcx>(&mut self,
|
||||
|
@ -509,7 +509,7 @@ impl<'a, 'b, 'gcx, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'b, 'gcx,
|
|||
}
|
||||
}
|
||||
|
||||
/// Return the set of type variables contained in a trait ref
|
||||
/// Returns the set of type variables contained in a trait ref
|
||||
fn trait_ref_type_vars<'a, 'gcx, 'tcx>(selcx: &mut SelectionContext<'a, 'gcx, 'tcx>,
|
||||
t: ty::PolyTraitRef<'tcx>) -> Vec<Ty<'tcx>>
|
||||
{
|
||||
|
|
|
@ -73,14 +73,14 @@ pub use self::FulfillmentErrorCode::*;
|
|||
pub use self::SelectionError::*;
|
||||
pub use self::Vtable::*;
|
||||
|
||||
// Whether to enable bug compatibility with issue #43355
|
||||
/// Whether to enable bug compatibility with issue #43355.
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
|
||||
pub enum IntercrateMode {
|
||||
Issue43355,
|
||||
Fixed
|
||||
}
|
||||
|
||||
// The mode that trait queries run in
|
||||
/// The mode that trait queries run in.
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
|
||||
pub enum TraitQueryMode {
|
||||
// Standard/un-canonicalized queries get accurate
|
||||
|
@ -101,37 +101,37 @@ pub enum TraitQueryMode {
|
|||
/// scope. The eventual result is usually a `Selection` (defined below).
|
||||
#[derive(Clone, PartialEq, Eq, Hash)]
|
||||
pub struct Obligation<'tcx, T> {
|
||||
/// Why do we have to prove this thing?
|
||||
/// The reason we have to prove this thing.
|
||||
pub cause: ObligationCause<'tcx>,
|
||||
|
||||
/// In which environment should we prove this thing?
|
||||
/// The environment in which we should prove this thing.
|
||||
pub param_env: ty::ParamEnv<'tcx>,
|
||||
|
||||
/// What are we trying to prove?
|
||||
/// The thing we are trying to prove.
|
||||
pub predicate: T,
|
||||
|
||||
/// If we started proving this as a result of trying to prove
|
||||
/// something else, track the total depth to ensure termination.
|
||||
/// If this goes over a certain threshold, we abort compilation --
|
||||
/// in such cases, we can not say whether or not the predicate
|
||||
/// holds for certain. Stupid halting problem. Such a drag.
|
||||
/// holds for certain. Stupid halting problem; such a drag.
|
||||
pub recursion_depth: usize,
|
||||
}
|
||||
|
||||
pub type PredicateObligation<'tcx> = Obligation<'tcx, ty::Predicate<'tcx>>;
|
||||
pub type TraitObligation<'tcx> = Obligation<'tcx, ty::PolyTraitPredicate<'tcx>>;
|
||||
|
||||
/// Why did we incur this obligation? Used for error reporting.
|
||||
/// The reason why we incurred this obligation; used for error reporting.
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
|
||||
pub struct ObligationCause<'tcx> {
|
||||
pub span: Span,
|
||||
|
||||
// The id of the fn body that triggered this obligation. This is
|
||||
// used for region obligations to determine the precise
|
||||
// environment in which the region obligation should be evaluated
|
||||
// (in particular, closures can add new assumptions). See the
|
||||
// field `region_obligations` of the `FulfillmentContext` for more
|
||||
// information.
|
||||
/// The ID of the fn body that triggered this obligation. This is
|
||||
/// used for region obligations to determine the precise
|
||||
/// environment in which the region obligation should be evaluated
|
||||
/// (in particular, closures can add new assumptions). See the
|
||||
/// field `region_obligations` of the `FulfillmentContext` for more
|
||||
/// information.
|
||||
pub body_id: ast::NodeId,
|
||||
|
||||
pub code: ObligationCauseCode<'tcx>
|
||||
|
@ -152,20 +152,20 @@ impl<'tcx> ObligationCause<'tcx> {
|
|||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
|
||||
pub enum ObligationCauseCode<'tcx> {
|
||||
/// Not well classified or should be obvious from span.
|
||||
/// Not well classified or should be obvious from the span.
|
||||
MiscObligation,
|
||||
|
||||
/// A slice or array is WF only if `T: Sized`
|
||||
/// A slice or array is WF only if `T: Sized`.
|
||||
SliceOrArrayElem,
|
||||
|
||||
/// A tuple is WF only if its middle elements are Sized
|
||||
/// A tuple is WF only if its middle elements are `Sized`.
|
||||
TupleElem,
|
||||
|
||||
/// This is the trait reference from the given projection
|
||||
/// This is the trait reference from the given projection.
|
||||
ProjectionWf(ty::ProjectionTy<'tcx>),
|
||||
|
||||
/// In an impl of trait X for type Y, type Y must
|
||||
/// also implement all supertraits of X.
|
||||
/// In an impl of trait `X` for type `Y`, type `Y` must
|
||||
/// also implement all supertraits of `X`.
|
||||
ItemObligation(DefId),
|
||||
|
||||
/// A type like `&'a T` is WF only if `T: 'a`.
|
||||
|
@ -271,7 +271,7 @@ pub struct DerivedObligationCause<'tcx> {
|
|||
/// directly.
|
||||
parent_trait_ref: ty::PolyTraitRef<'tcx>,
|
||||
|
||||
/// The parent trait had this cause
|
||||
/// The parent trait had this cause.
|
||||
parent_code: Rc<ObligationCauseCode<'tcx>>
|
||||
}
|
||||
|
||||
|
@ -280,14 +280,14 @@ pub type PredicateObligations<'tcx> = Vec<PredicateObligation<'tcx>>;
|
|||
pub type TraitObligations<'tcx> = Vec<TraitObligation<'tcx>>;
|
||||
|
||||
/// The following types:
|
||||
/// * `WhereClause`
|
||||
/// * `WellFormed`
|
||||
/// * `FromEnv`
|
||||
/// * `DomainGoal`
|
||||
/// * `Goal`
|
||||
/// * `Clause`
|
||||
/// * `Environment`
|
||||
/// * `InEnvironment`
|
||||
/// * `WhereClause`,
|
||||
/// * `WellFormed`,
|
||||
/// * `FromEnv`,
|
||||
/// * `DomainGoal`,
|
||||
/// * `Goal`,
|
||||
/// * `Clause`,
|
||||
/// * `Environment`,
|
||||
/// * `InEnvironment`,
|
||||
/// are used for representing the trait system in the form of
|
||||
/// logic programming clauses. They are part of the interface
|
||||
/// for the chalk SLG solver.
|
||||
|
@ -485,7 +485,6 @@ pub type SelectionResult<'tcx, T> = Result<Option<T>, SelectionError<'tcx>>;
|
|||
/// For example, the vtable may be tied to a specific impl (case A),
|
||||
/// or it may be relative to some bound that is in scope (case B).
|
||||
///
|
||||
///
|
||||
/// ```
|
||||
/// impl<T:Clone> Clone<T> for Option<T> { ... } // Impl_1
|
||||
/// impl<T:Clone> Clone<T> for Box<T> { ... } // Impl_2
|
||||
|
@ -517,7 +516,7 @@ pub enum Vtable<'tcx, N> {
|
|||
/// Vtable identifying a particular impl.
|
||||
VtableImpl(VtableImplData<'tcx, N>),
|
||||
|
||||
/// Vtable for auto trait implementations
|
||||
/// Vtable for auto trait implementations.
|
||||
/// This carries the information and nested obligations with regards
|
||||
/// to an auto implementation for a trait `Trait`. The nested obligations
|
||||
/// ensure the trait implementation holds for all the constituent types.
|
||||
|
@ -529,18 +528,18 @@ pub enum Vtable<'tcx, N> {
|
|||
/// any).
|
||||
VtableParam(Vec<N>),
|
||||
|
||||
/// Virtual calls through an object
|
||||
/// Virtual calls through an object.
|
||||
VtableObject(VtableObjectData<'tcx, N>),
|
||||
|
||||
/// Successful resolution for a builtin trait.
|
||||
VtableBuiltin(VtableBuiltinData<N>),
|
||||
|
||||
/// Vtable automatically generated for a closure. The def ID is the ID
|
||||
/// Vtable automatically generated for a closure. The `DefId` is the ID
|
||||
/// of the closure expression. This is a `VtableImpl` in spirit, but the
|
||||
/// impl is generated by the compiler and does not appear in the source.
|
||||
VtableClosure(VtableClosureData<'tcx, N>),
|
||||
|
||||
/// Same as above, but for a fn pointer type with the given signature.
|
||||
/// Same as above, but for a function pointer type with the given signature.
|
||||
VtableFnPointer(VtableFnPointerData<'tcx, N>),
|
||||
|
||||
/// Vtable automatically generated for a generator.
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
//! - have a suitable receiver from which we can extract a vtable and coerce to a "thin" version
|
||||
//! that doesn't contain the vtable;
|
||||
//! - not reference the erased type `Self` except for in this receiver;
|
||||
//! - not have generic type parameters
|
||||
//! - not have generic type parameters.
|
||||
|
||||
use super::elaborate_predicates;
|
||||
|
||||
|
@ -22,17 +22,17 @@ use syntax_pos::Span;
|
|||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
|
||||
pub enum ObjectSafetyViolation {
|
||||
/// Self : Sized declared on the trait
|
||||
/// `Self: Sized` declared on the trait.
|
||||
SizedSelf,
|
||||
|
||||
/// Supertrait reference references `Self` an in illegal location
|
||||
/// (e.g., `trait Foo : Bar<Self>`)
|
||||
/// (e.g., `trait Foo : Bar<Self>`).
|
||||
SupertraitSelf,
|
||||
|
||||
/// Method has something illegal
|
||||
/// Method has something illegal.
|
||||
Method(ast::Name, MethodViolationCode),
|
||||
|
||||
/// Associated const
|
||||
/// Associated const.
|
||||
AssociatedConst(ast::Name),
|
||||
}
|
||||
|
||||
|
@ -84,7 +84,7 @@ pub enum MethodViolationCode {
|
|||
impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> {
|
||||
|
||||
/// Returns the object safety violations that affect
|
||||
/// astconv - currently, Self in supertraits. This is needed
|
||||
/// astconv -- currently, `Self` in supertraits. This is needed
|
||||
/// because `object_safety_violations` can't be used during
|
||||
/// type collection.
|
||||
pub fn astconv_object_safety_violations(self, trait_def_id: DefId)
|
||||
|
@ -399,8 +399,8 @@ impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> {
|
|||
None
|
||||
}
|
||||
|
||||
/// performs a type substitution to produce the version of receiver_ty when `Self = self_ty`
|
||||
/// e.g., for receiver_ty = `Rc<Self>` and self_ty = `Foo`, returns `Rc<Foo>`
|
||||
/// Performs a type substitution to produce the version of receiver_ty when `Self = self_ty`
|
||||
/// e.g., for receiver_ty = `Rc<Self>` and self_ty = `Foo`, returns `Rc<Foo>`.
|
||||
fn receiver_for_self_ty(
|
||||
self, receiver_ty: Ty<'tcx>, self_ty: Ty<'tcx>, method_def_id: DefId
|
||||
) -> Ty<'tcx> {
|
||||
|
@ -419,9 +419,9 @@ impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> {
|
|||
result
|
||||
}
|
||||
|
||||
/// creates the object type for the current trait. For example,
|
||||
/// Creates the object type for the current trait. For example,
|
||||
/// if the current trait is `Deref`, then this will be
|
||||
/// `dyn Deref<Target=Self::Target> + 'static`
|
||||
/// `dyn Deref<Target = Self::Target> + 'static`.
|
||||
fn object_ty_for_trait(self, trait_def_id: DefId, lifetime: ty::Region<'tcx>) -> Ty<'tcx> {
|
||||
debug!("object_ty_for_trait: trait_def_id={:?}", trait_def_id);
|
||||
|
||||
|
@ -470,25 +470,27 @@ impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> {
|
|||
object_ty
|
||||
}
|
||||
|
||||
/// checks the method's receiver (the `self` argument) can be dispatched on when `Self` is a
|
||||
/// Checks the method's receiver (the `self` argument) can be dispatched on when `Self` is a
|
||||
/// trait object. We require that `DispatchableFromDyn` be implemented for the receiver type
|
||||
/// in the following way:
|
||||
/// - let `Receiver` be the type of the `self` argument, i.e `Self`, `&Self`, `Rc<Self>`
|
||||
/// - let `Receiver` be the type of the `self` argument, i.e `Self`, `&Self`, `Rc<Self>`,
|
||||
/// - require the following bound:
|
||||
///
|
||||
/// ```
|
||||
/// Receiver[Self => T]: DispatchFromDyn<Receiver[Self => dyn Trait]>
|
||||
/// ```
|
||||
///
|
||||
/// where `Foo[X => Y]` means "the same type as `Foo`, but with `X` replaced with `Y`"
|
||||
/// (substitution notation).
|
||||
///
|
||||
/// some examples of receiver types and their required obligation
|
||||
/// - `&'a mut self` requires `&'a mut Self: DispatchFromDyn<&'a mut dyn Trait>`
|
||||
/// - `self: Rc<Self>` requires `Rc<Self>: DispatchFromDyn<Rc<dyn Trait>>`
|
||||
/// - `self: Pin<Box<Self>>` requires `Pin<Box<Self>>: DispatchFromDyn<Pin<Box<dyn Trait>>>`
|
||||
/// Some examples of receiver types and their required obligation:
|
||||
/// - `&'a mut self` requires `&'a mut Self: DispatchFromDyn<&'a mut dyn Trait>`,
|
||||
/// - `self: Rc<Self>` requires `Rc<Self>: DispatchFromDyn<Rc<dyn Trait>>`,
|
||||
/// - `self: Pin<Box<Self>>` requires `Pin<Box<Self>>: DispatchFromDyn<Pin<Box<dyn Trait>>>`.
|
||||
///
|
||||
/// The only case where the receiver is not dispatchable, but is still a valid receiver
|
||||
/// type (just not object-safe), is when there is more than one level of pointer indirection.
|
||||
/// e.g., `self: &&Self`, `self: &Rc<Self>`, `self: Box<Box<Self>>`. In these cases, there
|
||||
/// E.g., `self: &&Self`, `self: &Rc<Self>`, `self: Box<Box<Self>>`. In these cases, there
|
||||
/// is no way, or at least no inexpensive way, to coerce the receiver from the version where
|
||||
/// `Self = dyn Trait` to the version where `Self = T`, where `T` is the unknown erased type
|
||||
/// contained by the trait object, because the object that needs to be coerced is behind
|
||||
|
|
|
@ -55,7 +55,7 @@ pub enum Reveal {
|
|||
/// Also, `impl Trait` is normalized to the concrete type,
|
||||
/// which has to be already collected by type-checking.
|
||||
///
|
||||
/// NOTE: As `impl Trait`'s concrete type should *never*
|
||||
/// NOTE: as `impl Trait`'s concrete type should *never*
|
||||
/// be observable directly by the user, `Reveal::All`
|
||||
/// should not be used by checks which may expose
|
||||
/// type equality or type contents to the user.
|
||||
|
@ -844,7 +844,7 @@ impl<'tcx> Progress<'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Compute the result of a projection type (if we can).
|
||||
/// Computes the result of a projection type (if we can).
|
||||
///
|
||||
/// IMPORTANT:
|
||||
/// - `obligation` must be fully normalized
|
||||
|
@ -1553,7 +1553,7 @@ fn assoc_ty_def<'cx, 'gcx, 'tcx>(
|
|||
// # Cache
|
||||
|
||||
/// The projection cache. Unlike the standard caches, this can include
|
||||
/// infcx-dependent type variables - therefore, we have to roll the
|
||||
/// infcx-dependent type variables, therefore we have to roll the
|
||||
/// cache back each time we roll a snapshot back, to avoid assumptions
|
||||
/// on yet-unresolved inference variables. Types with placeholder
|
||||
/// regions also have to be removed when the respective snapshot ends.
|
||||
|
@ -1564,9 +1564,9 @@ fn assoc_ty_def<'cx, 'gcx, 'tcx>(
|
|||
/// (for the lifetime of the infcx).
|
||||
///
|
||||
/// Entries in the projection cache might contain inference variables
|
||||
/// that will be resolved by obligations on the projection cache entry - e.g.
|
||||
/// that will be resolved by obligations on the projection cache entry (e.g.,
|
||||
/// when a type parameter in the associated type is constrained through
|
||||
/// an "RFC 447" projection on the impl.
|
||||
/// an "RFC 447" projection on the impl).
|
||||
///
|
||||
/// When working with a fulfillment context, the derived obligations of each
|
||||
/// projection cache entry will be registered on the fulfillcx, so any users
|
||||
|
@ -1578,10 +1578,9 @@ fn assoc_ty_def<'cx, 'gcx, 'tcx>(
|
|||
/// If that is done, after evaluation the obligations, it is a good idea to
|
||||
/// call `ProjectionCache::complete` to make sure the obligations won't be
|
||||
/// re-evaluated and avoid an exponential worst-case.
|
||||
///
|
||||
/// FIXME: we probably also want some sort of cross-infcx cache here to
|
||||
/// reduce the amount of duplication. Let's see what we get with the Chalk
|
||||
/// reforms.
|
||||
//
|
||||
// FIXME: we probably also want some sort of cross-infcx cache here to
|
||||
// reduce the amount of duplication. Let's see what we get with the Chalk reforms.
|
||||
#[derive(Default)]
|
||||
pub struct ProjectionCache<'tcx> {
|
||||
map: SnapshotMap<ProjectionCacheKey<'tcx>, ProjectionCacheEntry<'tcx>>,
|
||||
|
|
|
@ -184,7 +184,7 @@ impl_stable_hash_for!(struct DtorckConstraint<'tcx> {
|
|||
/// outlive. This is similar but not *quite* the same as the
|
||||
/// `needs_drop` test in the compiler already -- that is, for every
|
||||
/// type T for which this function return true, needs-drop would
|
||||
/// return false. But the reverse does not hold: in particular,
|
||||
/// return `false`. But the reverse does not hold: in particular,
|
||||
/// `needs_drop` returns false for `PhantomData`, but it is not
|
||||
/// trivial for dropck-outlives.
|
||||
///
|
||||
|
|
|
@ -24,7 +24,7 @@ impl<'cx, 'gcx, 'tcx> At<'cx, 'gcx, 'tcx> {
|
|||
/// the normalized value along with various outlives relations (in
|
||||
/// the form of obligations that must be discharged).
|
||||
///
|
||||
/// NB. This will *eventually* be the main means of
|
||||
/// N.B., this will *eventually* be the main means of
|
||||
/// normalizing, but for now should be used only when we actually
|
||||
/// know that normalization will succeed, since error reporting
|
||||
/// and other details are still "under development".
|
||||
|
|
|
@ -45,7 +45,7 @@ impl<'cx, 'tcx> TyCtxt<'cx, 'tcx, 'tcx> {
|
|||
/// a `T` (with regions erased). This is appropriate when the
|
||||
/// binder is being instantiated at the call site.
|
||||
///
|
||||
/// NB. Currently, higher-ranked type bounds inhibit
|
||||
/// N.B., currently, higher-ranked type bounds inhibit
|
||||
/// normalization. Therefore, each time we erase them in
|
||||
/// codegen, we need to normalize the contents.
|
||||
pub fn normalize_erasing_late_bound_regions<T>(
|
||||
|
|
|
@ -52,7 +52,7 @@ pub trait Normalizable<'gcx, 'tcx>: fmt::Debug + TypeFoldable<'tcx> + Lift<'gcx>
|
|||
canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Normalize<Self>>>,
|
||||
) -> Fallible<CanonicalizedQueryResponse<'gcx, Self>>;
|
||||
|
||||
/// Convert from the `'gcx` (lifted) form of `Self` into the `tcx`
|
||||
/// Converts from the `'gcx` (lifted) form of `Self` into the `tcx`
|
||||
/// form of `Self`.
|
||||
fn shrink_to_tcx_lifetime(
|
||||
v: &'a CanonicalizedQueryResponse<'gcx, Self>,
|
||||
|
|
|
@ -331,7 +331,7 @@ enum BuiltinImplConditions<'tcx> {
|
|||
/// - `EvaluatedToErr` implies `EvaluatedToRecur`
|
||||
/// - the "union" of evaluation results is equal to their maximum -
|
||||
/// all the "potential success" candidates can potentially succeed,
|
||||
/// so they are no-ops when unioned with a definite error, and within
|
||||
/// so they are noops when unioned with a definite error, and within
|
||||
/// the categories it's easy to see that the unions are correct.
|
||||
pub enum EvaluationResult {
|
||||
/// Evaluation successful
|
||||
|
@ -383,31 +383,30 @@ pub enum EvaluationResult {
|
|||
/// ```
|
||||
///
|
||||
/// When we try to prove it, we first go the first option, which
|
||||
/// recurses. This shows us that the impl is "useless" - it won't
|
||||
/// recurses. This shows us that the impl is "useless" -- it won't
|
||||
/// tell us that `T: Trait` unless it already implemented `Trait`
|
||||
/// by some other means. However, that does not prevent `T: Trait`
|
||||
/// does not hold, because of the bound (which can indeed be satisfied
|
||||
/// by `SomeUnsizedType` from another crate).
|
||||
///
|
||||
/// FIXME: when an `EvaluatedToRecur` goes past its parent root, we
|
||||
/// ought to convert it to an `EvaluatedToErr`, because we know
|
||||
/// there definitely isn't a proof tree for that obligation. Not
|
||||
/// doing so is still sound - there isn't any proof tree, so the
|
||||
/// branch still can't be a part of a minimal one - but does not
|
||||
/// re-enable caching.
|
||||
//
|
||||
// FIXME: when an `EvaluatedToRecur` goes past its parent root, we
|
||||
// ought to convert it to an `EvaluatedToErr`, because we know
|
||||
// there definitely isn't a proof tree for that obligation. Not
|
||||
// doing so is still sound -- there isn't any proof tree, so the
|
||||
// branch still can't be a part of a minimal one -- but does not re-enable caching.
|
||||
EvaluatedToRecur,
|
||||
/// Evaluation failed
|
||||
/// Evaluation failed.
|
||||
EvaluatedToErr,
|
||||
}
|
||||
|
||||
impl EvaluationResult {
|
||||
/// True if this evaluation result is known to apply, even
|
||||
/// Returns `true` if this evaluation result is known to apply, even
|
||||
/// considering outlives constraints.
|
||||
pub fn must_apply_considering_regions(self) -> bool {
|
||||
self == EvaluatedToOk
|
||||
}
|
||||
|
||||
/// True if this evaluation result is known to apply, ignoring
|
||||
/// Returns `true` if this evaluation result is known to apply, ignoring
|
||||
/// outlives constraints.
|
||||
pub fn must_apply_modulo_regions(self) -> bool {
|
||||
self <= EvaluatedToOkModuloRegions
|
||||
|
@ -981,8 +980,8 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
|
|||
/// that recursion is ok. This routine returns true if the top of the
|
||||
/// stack (`cycle[0]`):
|
||||
///
|
||||
/// - is a defaulted trait, and
|
||||
/// - it also appears in the backtrace at some position `X`; and,
|
||||
/// - is a defaulted trait,
|
||||
/// - it also appears in the backtrace at some position `X`,
|
||||
/// - all the predicates at positions `X..` between `X` an the top are
|
||||
/// also defaulted traits.
|
||||
pub fn coinductive_match<I>(&mut self, cycle: I) -> bool
|
||||
|
@ -1003,7 +1002,7 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
|
|||
}
|
||||
|
||||
/// Further evaluate `candidate` to decide whether all type parameters match and whether nested
|
||||
/// obligations are met. Returns true if `candidate` remains viable after this further
|
||||
/// obligations are met. Returns whether `candidate` remains viable after this further
|
||||
/// scrutiny.
|
||||
fn evaluate_candidate<'o>(
|
||||
&mut self,
|
||||
|
@ -1434,7 +1433,7 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Returns true if the global caches can be used.
|
||||
/// Returns `true` if the global caches can be used.
|
||||
/// Do note that if the type itself is not in the
|
||||
/// global tcx, the local caches will be used.
|
||||
fn can_use_global_caches(&self, param_env: ty::ParamEnv<'tcx>) -> bool {
|
||||
|
@ -1850,7 +1849,7 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
/// Check for the artificial impl that the compiler will create for an obligation like `X :
|
||||
/// Checks for the artificial impl that the compiler will create for an obligation like `X :
|
||||
/// FnMut<..>` where `X` is a closure type.
|
||||
///
|
||||
/// Note: the type parameters on a closure candidate are modeled as *output* type
|
||||
|
@ -2231,7 +2230,7 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
|
|||
// type variables and then we also attempt to evaluate recursive
|
||||
// bounds to see if they are satisfied.
|
||||
|
||||
/// Returns true if `victim` should be dropped in favor of
|
||||
/// Returns `true` if `victim` should be dropped in favor of
|
||||
/// `other`. Generally speaking we will drop duplicate
|
||||
/// candidates and prefer where-clause candidates.
|
||||
///
|
||||
|
@ -3235,7 +3234,7 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
|
|||
/// impl Fn(int) for Closure { ... }
|
||||
///
|
||||
/// Now imagine our obligation is `Fn(usize) for Closure`. So far
|
||||
/// we have matched the self-type `Closure`. At this point we'll
|
||||
/// we have matched the self type `Closure`. At this point we'll
|
||||
/// compare the `int` to `usize` and generate an error.
|
||||
///
|
||||
/// Note that this checking occurs *after* the impl has selected,
|
||||
|
|
|
@ -58,7 +58,7 @@ pub struct OverlapError {
|
|||
/// Suppose we have selected "source impl" with `V` instantiated with `u32`.
|
||||
/// This function will produce a substitution with `T` and `U` both mapping to `u32`.
|
||||
///
|
||||
/// Where clauses add some trickiness here, because they can be used to "define"
|
||||
/// where-clauses add some trickiness here, because they can be used to "define"
|
||||
/// an argument indirectly:
|
||||
///
|
||||
/// ```rust
|
||||
|
@ -145,10 +145,10 @@ pub fn find_associated_item<'a, 'tcx>(
|
|||
}
|
||||
}
|
||||
|
||||
/// Is impl1 a specialization of impl2?
|
||||
/// Is `impl1` a specialization of `impl2`?
|
||||
///
|
||||
/// Specialization is determined by the sets of types to which the impls apply;
|
||||
/// impl1 specializes impl2 if it applies to a subset of the types impl2 applies
|
||||
/// `impl1` specializes `impl2` if it applies to a subset of the types `impl2` applies
|
||||
/// to.
|
||||
pub(super) fn specializes<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
(impl1_def_id, impl2_def_id): (DefId, DefId))
|
||||
|
|
|
@ -97,7 +97,7 @@ impl<'a, 'gcx, 'tcx> Children {
|
|||
}
|
||||
}
|
||||
|
||||
/// Remove an impl from this set of children. Used when replacing
|
||||
/// Removes an impl from this set of children. Used when replacing
|
||||
/// an impl with a parent. The impl must be present in the list of
|
||||
/// children already.
|
||||
fn remove_existing(&mut self,
|
||||
|
@ -399,7 +399,7 @@ impl<'a, 'gcx, 'tcx> Graph {
|
|||
self.children.entry(parent).or_default().insert_blindly(tcx, child);
|
||||
}
|
||||
|
||||
/// The parent of a given impl, which is the def id of the trait when the
|
||||
/// The parent of a given impl, which is the `DefId` of the trait when the
|
||||
/// impl is a "specialization root".
|
||||
pub fn parent(&self, child: DefId) -> DefId {
|
||||
*self.parent.get(&child).unwrap()
|
||||
|
|
|
@ -110,12 +110,12 @@ impl<'a, 'gcx, 'tcx> OverloadedDeref<'tcx> {
|
|||
}
|
||||
|
||||
/// At least for initial deployment, we want to limit two-phase borrows to
|
||||
/// only a few specific cases. Right now, those mostly "things that desugar"
|
||||
/// into method calls
|
||||
/// - using x.some_method() syntax, where some_method takes &mut self
|
||||
/// - using Foo::some_method(&mut x, ...) syntax
|
||||
/// - binary assignment operators (+=, -=, *=, etc.)
|
||||
/// Anything else should be rejected until generalized two phase borrow support
|
||||
/// only a few specific cases. Right now, those are mostly "things that desugar"
|
||||
/// into method calls:
|
||||
/// - using `x.some_method()` syntax, where some_method takes `&mut self`,
|
||||
/// - using `Foo::some_method(&mut x, ...)` syntax,
|
||||
/// - binary assignment operators (`+=`, `-=`, `*=`, etc.).
|
||||
/// Anything else should be rejected until generalized two-phase borrow support
|
||||
/// is implemented. Right now, dataflow can't handle the general case where there
|
||||
/// is more than one use of a mutable borrow, and we don't want to accept too much
|
||||
/// new code via two-phase borrows, so we try to limit where we create two-phase
|
||||
|
@ -144,10 +144,10 @@ impl From<AutoBorrowMutability> for hir::Mutability {
|
|||
|
||||
#[derive(Copy, Clone, PartialEq, Debug, RustcEncodable, RustcDecodable)]
|
||||
pub enum AutoBorrow<'tcx> {
|
||||
/// Convert from T to &T.
|
||||
/// Converts from T to &T.
|
||||
Ref(ty::Region<'tcx>, AutoBorrowMutability),
|
||||
|
||||
/// Convert from T to *T.
|
||||
/// Converts from T to *T.
|
||||
RawPtr(hir::Mutability),
|
||||
}
|
||||
|
||||
|
|
|
@ -37,7 +37,7 @@ impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Returns true if this function must conform to `min_const_fn`
|
||||
/// Returns `true` if this function must conform to `min_const_fn`
|
||||
pub fn is_min_const_fn(self, def_id: DefId) -> bool {
|
||||
// Bail out if the signature doesn't contain `const`
|
||||
if !self.is_const_fn_raw(def_id) {
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
//! type context book-keeping
|
||||
//! Type context book-keeping.
|
||||
|
||||
use crate::dep_graph::DepGraph;
|
||||
use crate::dep_graph::{self, DepNode, DepConstructor};
|
||||
|
@ -413,7 +413,7 @@ pub struct TypeckTables<'tcx> {
|
|||
pub tainted_by_errors: bool,
|
||||
|
||||
/// Stores the free-region relationships that were deduced from
|
||||
/// its where clauses and parameter types. These are then
|
||||
/// its where-clauses and parameter types. These are then
|
||||
/// read-again by borrowck.
|
||||
pub free_region_map: FreeRegionMap<'tcx>,
|
||||
|
||||
|
@ -837,7 +837,7 @@ pub type CanonicalUserType<'gcx> = Canonical<'gcx, UserType<'gcx>>;
|
|||
|
||||
impl CanonicalUserType<'gcx> {
|
||||
/// Returns `true` if this represents a substitution of the form `[?0, ?1, ?2]`,
|
||||
/// i.e. each thing is mapped to a canonical variable with the same index.
|
||||
/// i.e., each thing is mapped to a canonical variable with the same index.
|
||||
pub fn is_identity(&self) -> bool {
|
||||
match self.value {
|
||||
UserType::Ty(_) => false,
|
||||
|
@ -1053,7 +1053,7 @@ pub struct GlobalCtxt<'tcx> {
|
|||
}
|
||||
|
||||
impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
||||
/// Get the global TyCtxt.
|
||||
/// Gets the global `TyCtxt`.
|
||||
#[inline]
|
||||
pub fn global_tcx(self) -> TyCtxt<'gcx, 'gcx, 'gcx> {
|
||||
TyCtxt {
|
||||
|
@ -1153,12 +1153,12 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
|||
value.lift_to_tcx(self.global_tcx())
|
||||
}
|
||||
|
||||
/// Returns true if self is the same as self.global_tcx().
|
||||
/// Returns `true` if self is the same as self.global_tcx().
|
||||
fn is_global(self) -> bool {
|
||||
ptr::eq(self.interners, &self.global_interners)
|
||||
}
|
||||
|
||||
/// Create a type context and call the closure with a `TyCtxt` reference
|
||||
/// Creates a type context and call the closure with a `TyCtxt` reference
|
||||
/// to the context. The closure enforces that the type context and any interned
|
||||
/// value (types, substs, etc.) can only be used while `ty::tls` has a valid
|
||||
/// reference to the context, to allow formatting values that need it.
|
||||
|
@ -1353,7 +1353,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Convert a `DefId` into its fully expanded `DefPath` (every
|
||||
/// Converts a `DefId` into its fully expanded `DefPath` (every
|
||||
/// `DefId` is really just an interned def-path).
|
||||
///
|
||||
/// Note that if `id` is not local to this crate, the result will
|
||||
|
|
|
@ -25,9 +25,11 @@
|
|||
//! proper thing.
|
||||
//!
|
||||
//! A `TypeFoldable` T can also be visited by a `TypeVisitor` V using similar setup:
|
||||
//!
|
||||
//! T.visit_with(V) --calls--> V.visit_T(T) --calls--> T.super_visit_with(V).
|
||||
//! These methods return true to indicate that the visitor has found what it is looking for
|
||||
//! and does not need to visit anything else.
|
||||
//!
|
||||
//! These methods return true to indicate that the visitor has found what it is
|
||||
//! looking for, and does not need to visit anything else.
|
||||
|
||||
use crate::hir::def_id::DefId;
|
||||
use crate::ty::{self, Binder, Ty, TyCtxt, TypeFlags};
|
||||
|
@ -52,7 +54,7 @@ pub trait TypeFoldable<'tcx>: fmt::Debug + Clone {
|
|||
self.super_visit_with(visitor)
|
||||
}
|
||||
|
||||
/// True if `self` has any late-bound regions that are either
|
||||
/// Returns `true` if `self` has any late-bound regions that are either
|
||||
/// bound by `binder` or bound by some binder outside of `binder`.
|
||||
/// If `binder` is `ty::INNERMOST`, this indicates whether
|
||||
/// there are any late-bound regions that appear free.
|
||||
|
@ -60,7 +62,7 @@ pub trait TypeFoldable<'tcx>: fmt::Debug + Clone {
|
|||
self.visit_with(&mut HasEscapingVarsVisitor { outer_index: binder })
|
||||
}
|
||||
|
||||
/// True if this `self` has any regions that escape `binder` (and
|
||||
/// Returns `true` if this `self` has any regions that escape `binder` (and
|
||||
/// hence are not bound by it).
|
||||
fn has_vars_bound_above(&self, binder: ty::DebruijnIndex) -> bool {
|
||||
self.has_vars_bound_at_or_above(binder.shifted_in(1))
|
||||
|
@ -141,7 +143,7 @@ pub trait TypeFoldable<'tcx>: fmt::Debug + Clone {
|
|||
}
|
||||
}
|
||||
|
||||
/// The TypeFolder trait defines the actual *folding*. There is a
|
||||
/// The `TypeFolder` trait defines the actual *folding*. There is a
|
||||
/// method defined for every foldable type. Each of these has a
|
||||
/// default implementation that does an "identity" fold. Within each
|
||||
/// identity fold, it should invoke `foo.fold_with(self)` to fold each
|
||||
|
@ -262,7 +264,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
|||
});
|
||||
}
|
||||
|
||||
/// True if `callback` returns true for every region appearing free in `value`.
|
||||
/// Returns `true` if `callback` returns true for every region appearing free in `value`.
|
||||
pub fn all_free_regions_meet(
|
||||
self,
|
||||
value: &impl TypeFoldable<'tcx>,
|
||||
|
@ -271,7 +273,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
|||
!self.any_free_region_meets(value, |r| !callback(r))
|
||||
}
|
||||
|
||||
/// True if `callback` returns true for some region appearing free in `value`.
|
||||
/// Returns `true` if `callback` returns true for some region appearing free in `value`.
|
||||
pub fn any_free_region_meets(
|
||||
self,
|
||||
value: &impl TypeFoldable<'tcx>,
|
||||
|
@ -292,8 +294,8 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
|||
/// ^ ^ ^ ^
|
||||
/// | | | | here, would be shifted in 1
|
||||
/// | | | here, would be shifted in 2
|
||||
/// | | here, would be INNERMOST shifted in by 1
|
||||
/// | here, initially, binder would be INNERMOST
|
||||
/// | | here, would be `INNERMOST` shifted in by 1
|
||||
/// | here, initially, binder would be `INNERMOST`
|
||||
/// ```
|
||||
///
|
||||
/// You see that, initially, *any* bound value is free,
|
||||
|
@ -496,7 +498,7 @@ impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for BoundVarReplacer<'a, 'gcx, 'tcx>
|
|||
}
|
||||
|
||||
impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
||||
/// Replace all regions bound by the given `Binder` with the
|
||||
/// Replaces all regions bound by the given `Binder` with the
|
||||
/// results returned by the closure; the closure is expected to
|
||||
/// return a free region (relative to this binder), and hence the
|
||||
/// binder is removed in the return type. The closure is invoked
|
||||
|
@ -520,7 +522,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
|||
self.replace_escaping_bound_vars(value.skip_binder(), fld_r, fld_t)
|
||||
}
|
||||
|
||||
/// Replace all escaping bound vars. The `fld_r` closure replaces escaping
|
||||
/// Replaces all escaping bound vars. The `fld_r` closure replaces escaping
|
||||
/// bound regions while the `fld_t` closure replaces escaping bound types.
|
||||
pub fn replace_escaping_bound_vars<T, F, G>(
|
||||
self,
|
||||
|
@ -554,7 +556,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Replace all types or regions bound by the given `Binder`. The `fld_r`
|
||||
/// Replaces all types or regions bound by the given `Binder`. The `fld_r`
|
||||
/// closure replaces bound regions while the `fld_t` closure replaces bound
|
||||
/// types.
|
||||
pub fn replace_bound_vars<T, F, G>(
|
||||
|
@ -570,7 +572,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
|||
self.replace_escaping_bound_vars(value.skip_binder(), fld_r, fld_t)
|
||||
}
|
||||
|
||||
/// Replace any late-bound regions bound in `value` with
|
||||
/// Replaces any late-bound regions bound in `value` with
|
||||
/// free variants attached to `all_outlive_scope`.
|
||||
pub fn liberate_late_bound_regions<T>(
|
||||
&self,
|
||||
|
@ -640,7 +642,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
|||
collector.regions
|
||||
}
|
||||
|
||||
/// Replace any late-bound regions bound in `value` with `'erased`. Useful in codegen but also
|
||||
/// Replaces any late-bound regions bound in `value` with `'erased`. Useful in codegen but also
|
||||
/// method lookup and a few other places where precise region relationships are not required.
|
||||
pub fn erase_late_bound_regions<T>(self, value: &Binder<T>) -> T
|
||||
where T : TypeFoldable<'tcx>
|
||||
|
@ -818,7 +820,7 @@ pub fn shift_out_vars<'a, 'gcx, 'tcx, T>(
|
|||
/// scope to which it is attached, etc. An escaping var represents
|
||||
/// a bound var for which this processing has not yet been done.
|
||||
struct HasEscapingVarsVisitor {
|
||||
/// Anything bound by `outer_index` or "above" is escaping
|
||||
/// Anything bound by `outer_index` or "above" is escaping.
|
||||
outer_index: ty::DebruijnIndex,
|
||||
}
|
||||
|
||||
|
@ -881,7 +883,7 @@ struct LateBoundRegionsCollector {
|
|||
current_index: ty::DebruijnIndex,
|
||||
regions: FxHashSet<ty::BoundRegion>,
|
||||
|
||||
/// If true, we only want regions that are known to be
|
||||
/// `true` if we only want regions that are known to be
|
||||
/// "constrained" when you equate this type with another type. In
|
||||
/// particular, if you have e.g., `&'a u32` and `&'b u32`, equating
|
||||
/// them constraints `'a == 'b`. But if you have `<&'a u32 as
|
||||
|
|
|
@ -22,14 +22,14 @@ pub struct DefIdForest {
|
|||
}
|
||||
|
||||
impl<'a, 'gcx, 'tcx> DefIdForest {
|
||||
/// Create an empty forest.
|
||||
/// Creates an empty forest.
|
||||
pub fn empty() -> DefIdForest {
|
||||
DefIdForest {
|
||||
root_ids: SmallVec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a forest consisting of a single tree representing the entire
|
||||
/// Creates a forest consisting of a single tree representing the entire
|
||||
/// crate.
|
||||
#[inline]
|
||||
pub fn full(tcx: TyCtxt<'a, 'gcx, 'tcx>) -> DefIdForest {
|
||||
|
@ -37,7 +37,7 @@ impl<'a, 'gcx, 'tcx> DefIdForest {
|
|||
DefIdForest::from_id(crate_id)
|
||||
}
|
||||
|
||||
/// Create a forest containing a DefId and all its descendants.
|
||||
/// Creates a forest containing a DefId and all its descendants.
|
||||
pub fn from_id(id: DefId) -> DefIdForest {
|
||||
let mut root_ids = SmallVec::new();
|
||||
root_ids.push(id);
|
||||
|
@ -46,12 +46,12 @@ impl<'a, 'gcx, 'tcx> DefIdForest {
|
|||
}
|
||||
}
|
||||
|
||||
/// Test whether the forest is empty.
|
||||
/// Tests whether the forest is empty.
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.root_ids.is_empty()
|
||||
}
|
||||
|
||||
/// Test whether the forest contains a given DefId.
|
||||
/// Tests whether the forest contains a given DefId.
|
||||
pub fn contains(&self,
|
||||
tcx: TyCtxt<'a, 'gcx, 'tcx>,
|
||||
id: DefId) -> bool
|
||||
|
|
|
@ -22,17 +22,17 @@ pub enum InstanceDef<'tcx> {
|
|||
/// `<T as Trait>::method` where `method` receives unsizeable `self: Self`.
|
||||
VtableShim(DefId),
|
||||
|
||||
/// \<fn() as FnTrait>::call_*
|
||||
/// def-id is FnTrait::call_*
|
||||
/// `<fn() as FnTrait>::call_*`
|
||||
/// `DefId` is `FnTrait::call_*`
|
||||
FnPtrShim(DefId, Ty<'tcx>),
|
||||
|
||||
/// <Trait as Trait>::fn
|
||||
/// `<Trait as Trait>::fn`
|
||||
Virtual(DefId, usize),
|
||||
|
||||
/// <[mut closure] as FnOnce>::call_once
|
||||
/// `<[mut closure] as FnOnce>::call_once`
|
||||
ClosureOnceShim { call_once: DefId },
|
||||
|
||||
/// drop_in_place::<T>; None for empty drop glue.
|
||||
/// `drop_in_place::<T>; None` for empty drop glue.
|
||||
DropGlue(DefId, Option<Ty<'tcx>>),
|
||||
|
||||
///`<T as Clone>::clone` shim.
|
||||
|
@ -220,7 +220,7 @@ impl<'a, 'b, 'tcx> Instance<'tcx> {
|
|||
self.def.def_id()
|
||||
}
|
||||
|
||||
/// Resolve a (def_id, substs) pair to an (optional) instance -- most commonly,
|
||||
/// Resolves a `(def_id, substs)` pair to an (optional) instance -- most commonly,
|
||||
/// this is used to find the precise code that will run for a trait method invocation,
|
||||
/// if known.
|
||||
///
|
||||
|
|
|
@ -43,7 +43,7 @@ pub fn with_forced_impl_filename_line<F: FnOnce() -> R, R>(f: F) -> R {
|
|||
})
|
||||
}
|
||||
|
||||
/// Add the `crate::` prefix to paths where appropriate.
|
||||
/// Adds the `crate::` prefix to paths where appropriate.
|
||||
pub fn with_crate_prefix<F: FnOnce() -> R, R>(f: F) -> R {
|
||||
SHOULD_PREFIX_WITH_CRATE.with(|flag| {
|
||||
let old = flag.get();
|
||||
|
@ -55,7 +55,7 @@ pub fn with_crate_prefix<F: FnOnce() -> R, R>(f: F) -> R {
|
|||
}
|
||||
|
||||
impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
||||
/// Returns a string identifying this def-id. This string is
|
||||
/// Returns a string identifying this `DefId`. This string is
|
||||
/// suitable for user output. It is relative to the current crate
|
||||
/// root, unless with_forced_absolute_paths was used.
|
||||
pub fn item_path_str(self, def_id: DefId) -> String {
|
||||
|
@ -468,7 +468,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
|||
buffer.push(&format!("<impl at {}>", span_str));
|
||||
}
|
||||
|
||||
/// Returns the def-id of `def_id`'s parent in the def tree. If
|
||||
/// Returns the `DefId` of `def_id`'s parent in the def tree. If
|
||||
/// this returns `None`, then `def_id` represents a crate root or
|
||||
/// inlined root.
|
||||
pub fn parent_def_id(self, def_id: DefId) -> Option<DefId> {
|
||||
|
@ -478,9 +478,9 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
|||
}
|
||||
|
||||
/// As a heuristic, when we see an impl, if we see that the
|
||||
/// 'self-type' is a type defined in the same module as the impl,
|
||||
/// 'self type' is a type defined in the same module as the impl,
|
||||
/// we can omit including the path to the impl itself. This
|
||||
/// function tries to find a "characteristic def-id" for a
|
||||
/// function tries to find a "characteristic `DefId`" for a
|
||||
/// type. It's just a heuristic so it makes some questionable
|
||||
/// decisions and we may want to adjust it later.
|
||||
pub fn characteristic_def_id_of_type(ty: Ty<'_>) -> Option<DefId> {
|
||||
|
|
|
@ -46,7 +46,7 @@ impl IntegerExt for Integer {
|
|||
}
|
||||
}
|
||||
|
||||
/// Get the Integer type from an attr::IntType.
|
||||
/// Gets the Integer type from an attr::IntType.
|
||||
fn from_attr<C: HasDataLayout>(cx: &C, ity: attr::IntType) -> Integer {
|
||||
let dl = cx.data_layout();
|
||||
|
||||
|
@ -62,7 +62,7 @@ impl IntegerExt for Integer {
|
|||
}
|
||||
}
|
||||
|
||||
/// Find the appropriate Integer type and signedness for the given
|
||||
/// Finds the appropriate Integer type and signedness for the given
|
||||
/// signed discriminant range and #[repr] attribute.
|
||||
/// N.B.: u128 values above i128::MAX will be treated as signed, but
|
||||
/// that shouldn't affect anything, other than maybe debuginfo.
|
||||
|
@ -1686,7 +1686,7 @@ impl<'a, 'tcx, C> TyLayoutMethods<'tcx, C> for Ty<'tcx>
|
|||
tcx.types.re_static,
|
||||
tcx.mk_array(tcx.types.usize, 3),
|
||||
)
|
||||
/* FIXME use actual fn pointers
|
||||
/* FIXME: use actual fn pointers
|
||||
Warning: naively computing the number of entries in the
|
||||
vtable by counting the methods on the trait + methods on
|
||||
all parent traits does not work, because some methods can
|
||||
|
|
|
@ -135,8 +135,8 @@ pub enum AssociatedItemContainer {
|
|||
}
|
||||
|
||||
impl AssociatedItemContainer {
|
||||
/// Asserts that this is the def-id of an associated item declared
|
||||
/// in a trait, and returns the trait def-id.
|
||||
/// Asserts that this is the `DefId` of an associated item declared
|
||||
/// in a trait, and returns the trait `DefId`.
|
||||
pub fn assert_trait(&self) -> DefId {
|
||||
match *self {
|
||||
TraitContainer(id) => id,
|
||||
|
@ -154,7 +154,7 @@ impl AssociatedItemContainer {
|
|||
|
||||
/// The "header" of an impl is everything outside the body: a Self type, a trait
|
||||
/// ref (in the case of a trait impl), and a set of predicates (from the
|
||||
/// bounds/where clauses).
|
||||
/// bounds / where-clauses).
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
|
||||
pub struct ImplHeader<'tcx> {
|
||||
pub impl_def_id: DefId,
|
||||
|
@ -489,12 +489,12 @@ pub struct TyS<'tcx> {
|
|||
/// So, for a type without any late-bound things, like `u32`, this
|
||||
/// will be *innermost*, because that is the innermost binder that
|
||||
/// captures nothing. But for a type `&'D u32`, where `'D` is a
|
||||
/// late-bound region with debruijn index `D`, this would be `D + 1`
|
||||
/// late-bound region with De Bruijn index `D`, this would be `D + 1`
|
||||
/// -- the binder itself does not capture `D`, but `D` is captured
|
||||
/// by an inner binder.
|
||||
///
|
||||
/// We call this concept an "exclusive" binder `D` because all
|
||||
/// debruijn indices within the type are contained within `0..D`
|
||||
/// De Bruijn indices within the type are contained within `0..D`
|
||||
/// (exclusive).
|
||||
outer_exclusive_binder: ty::DebruijnIndex,
|
||||
}
|
||||
|
@ -720,9 +720,9 @@ pub struct UpvarPath {
|
|||
pub hir_id: hir::HirId,
|
||||
}
|
||||
|
||||
/// Upvars do not get their own node-id. Instead, we use the pair of
|
||||
/// the original var id (that is, the root variable that is referenced
|
||||
/// by the upvar) and the id of the closure expression.
|
||||
/// Upvars do not get their own `NodeId`. Instead, we use the pair of
|
||||
/// the original var ID (that is, the root variable that is referenced
|
||||
/// by the upvar) and the ID of the closure expression.
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
|
||||
pub struct UpvarId {
|
||||
pub var_path: UpvarPath,
|
||||
|
@ -1457,7 +1457,7 @@ impl<'tcx> Predicate<'tcx> {
|
|||
|
||||
/// Represents the bounds declared on a particular set of type
|
||||
/// parameters. Should eventually be generalized into a flag list of
|
||||
/// where clauses. You can obtain a `InstantiatedPredicates` list from a
|
||||
/// where-clauses. You can obtain a `InstantiatedPredicates` list from a
|
||||
/// `GenericPredicates` by using the `instantiate` method. Note that this method
|
||||
/// reflects an important semantic invariant of `InstantiatedPredicates`: while
|
||||
/// the `GenericPredicates` are expressed in terms of the bound type
|
||||
|
@ -1619,7 +1619,7 @@ pub struct ParamEnv<'tcx> {
|
|||
|
||||
impl<'tcx> ParamEnv<'tcx> {
|
||||
/// Construct a trait environment suitable for contexts where
|
||||
/// there are no where clauses in scope. Hidden types (like `impl
|
||||
/// there are no where-clauses in scope. Hidden types (like `impl
|
||||
/// Trait`) are left hidden, so this is suitable for ordinary
|
||||
/// type-checking.
|
||||
#[inline]
|
||||
|
@ -1627,12 +1627,12 @@ impl<'tcx> ParamEnv<'tcx> {
|
|||
Self::new(List::empty(), Reveal::UserFacing, None)
|
||||
}
|
||||
|
||||
/// Construct a trait environment with no where clauses in scope
|
||||
/// Construct a trait environment with no where-clauses in scope
|
||||
/// where the values of all `impl Trait` and other hidden types
|
||||
/// are revealed. This is suitable for monomorphized, post-typeck
|
||||
/// environments like codegen or doing optimizations.
|
||||
///
|
||||
/// N.B. If you want to have predicates in scope, use `ParamEnv::new`,
|
||||
/// N.B., if you want to have predicates in scope, use `ParamEnv::new`,
|
||||
/// or invoke `param_env.with_reveal_all()`.
|
||||
#[inline]
|
||||
pub fn reveal_all() -> Self {
|
||||
|
@ -1736,7 +1736,7 @@ impl<'a, 'gcx, T> HashStable<StableHashingContext<'a>> for ParamEnvAnd<'gcx, T>
|
|||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub struct Destructor {
|
||||
/// The def-id of the destructor method
|
||||
/// The `DefId` of the destructor method
|
||||
pub did: DefId,
|
||||
}
|
||||
|
||||
|
@ -1781,20 +1781,21 @@ pub struct VariantDef {
|
|||
}
|
||||
|
||||
impl<'a, 'gcx, 'tcx> VariantDef {
|
||||
/// Create a new `VariantDef`.
|
||||
/// Creates a new `VariantDef`.
|
||||
///
|
||||
/// - `did` is the DefId used for the variant - for tuple-structs, it is the constructor DefId,
|
||||
/// and for everything else, it is the variant DefId.
|
||||
/// - `did` is the `DefId` used for the variant.
|
||||
/// This is the constructor `DefId` for tuple stucts, and the variant `DefId` for everything
|
||||
/// else.
|
||||
/// - `attribute_def_id` is the DefId that has the variant's attributes.
|
||||
/// this is the struct DefId for structs, and the variant DefId for variants.
|
||||
/// This is the struct `DefId` for structs, and the variant `DefId` for variants.
|
||||
///
|
||||
/// Note that we *could* use the constructor DefId, because the constructor attributes
|
||||
/// Note that we *could* use the constructor `DefId`, because the constructor attributes
|
||||
/// redirect to the base attributes, but compiling a small crate requires
|
||||
/// loading the AdtDefs for all the structs in the universe (e.g., coherence for any
|
||||
/// loading the `AdtDef`s for all the structs in the universe (e.g., coherence for any
|
||||
/// built-in trait), and we do not want to load attributes twice.
|
||||
///
|
||||
/// If someone speeds up attribute loading to not be a performance concern, they can
|
||||
/// remove this hack and use the constructor DefId everywhere.
|
||||
/// remove this hack and use the constructor `DefId` everywhere.
|
||||
pub fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>,
|
||||
did: DefId,
|
||||
ident: Ident,
|
||||
|
@ -2049,13 +2050,13 @@ impl ReprOptions {
|
|||
}
|
||||
|
||||
/// Returns `true` if this `#[repr()]` should inhibit struct field reordering
|
||||
/// optimizations, such as with repr(C), repr(packed(1)), or repr(<int>).
|
||||
/// optimizations, such as with `repr(C)`, `repr(packed(1))`, or `repr(<int>)`.
|
||||
pub fn inhibit_struct_field_reordering_opt(&self) -> bool {
|
||||
self.flags.intersects(ReprFlags::IS_UNOPTIMISABLE) || self.pack == 1 ||
|
||||
self.int.is_some()
|
||||
}
|
||||
|
||||
/// Returns true if this `#[repr()]` should inhibit union abi optimisations
|
||||
/// Returns `true` if this `#[repr()]` should inhibit union ABI optimisations.
|
||||
pub fn inhibit_union_abi_opt(&self) -> bool {
|
||||
self.c()
|
||||
}
|
||||
|
@ -2170,14 +2171,14 @@ impl<'a, 'gcx, 'tcx> AdtDef {
|
|||
self.flags.contains(AdtFlags::HAS_CTOR)
|
||||
}
|
||||
|
||||
/// Returns whether this type is `#[fundamental]` for the purposes
|
||||
/// Returns `true` if this type is `#[fundamental]` for the purposes
|
||||
/// of coherence checking.
|
||||
#[inline]
|
||||
pub fn is_fundamental(&self) -> bool {
|
||||
self.flags.contains(AdtFlags::IS_FUNDAMENTAL)
|
||||
}
|
||||
|
||||
/// Returns `true` if this is PhantomData<T>.
|
||||
/// Returns `true` if this is `PhantomData<T>`.
|
||||
#[inline]
|
||||
pub fn is_phantom_data(&self) -> bool {
|
||||
self.flags.contains(AdtFlags::IS_PHANTOM_DATA)
|
||||
|
@ -2199,7 +2200,7 @@ impl<'a, 'gcx, 'tcx> AdtDef {
|
|||
self.flags.contains(AdtFlags::IS_BOX)
|
||||
}
|
||||
|
||||
/// Returns whether this type has a destructor.
|
||||
/// Returns `true` if this type has a destructor.
|
||||
pub fn has_dtor(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> bool {
|
||||
self.destructor(tcx).is_some()
|
||||
}
|
||||
|
@ -2320,7 +2321,7 @@ impl<'a, 'gcx, 'tcx> AdtDef {
|
|||
})
|
||||
}
|
||||
|
||||
/// Compute the discriminant value used by a specific variant.
|
||||
/// Computes the discriminant value used by a specific variant.
|
||||
/// Unlike `discriminants`, this is (amortized) constant-time,
|
||||
/// only doing at most one query for evaluating an explicit
|
||||
/// discriminant (the last one before the requested variant),
|
||||
|
@ -2336,9 +2337,9 @@ impl<'a, 'gcx, 'tcx> AdtDef {
|
|||
explicit_value.checked_add(tcx, offset as u128).0
|
||||
}
|
||||
|
||||
/// Yields a DefId for the discriminant and an offset to add to it
|
||||
/// Yields a `DefId` for the discriminant and an offset to add to it
|
||||
/// Alternatively, if there is no explicit discriminant, returns the
|
||||
/// inferred discriminant directly
|
||||
/// inferred discriminant directly.
|
||||
pub fn discriminant_def_for_variant(
|
||||
&self,
|
||||
variant_index: VariantIdx,
|
||||
|
@ -2368,15 +2369,15 @@ impl<'a, 'gcx, 'tcx> AdtDef {
|
|||
}
|
||||
|
||||
/// Returns a list of types such that `Self: Sized` if and only
|
||||
/// if that type is Sized, or `TyErr` if this type is recursive.
|
||||
/// if that type is `Sized`, or `TyErr` if this type is recursive.
|
||||
///
|
||||
/// Oddly enough, checking that the sized-constraint is Sized is
|
||||
/// Oddly enough, checking that the sized-constraint is `Sized` is
|
||||
/// actually more expressive than checking all members:
|
||||
/// the Sized trait is inductive, so an associated type that references
|
||||
/// Self would prevent its containing ADT from being Sized.
|
||||
/// the `Sized` trait is inductive, so an associated type that references
|
||||
/// `Self` would prevent its containing ADT from being `Sized`.
|
||||
///
|
||||
/// Due to normalization being eager, this applies even if
|
||||
/// the associated type is behind a pointer, e.g., issue #31299.
|
||||
/// the associated type is behind a pointer (e.g., issue #31299).
|
||||
pub fn sized_constraint(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> &'tcx [Ty<'tcx>] {
|
||||
match tcx.try_adt_sized_constraint(DUMMY_SP, self.did) {
|
||||
Ok(tys) => tys,
|
||||
|
@ -2480,7 +2481,7 @@ impl<'a, 'gcx, 'tcx> FieldDef {
|
|||
}
|
||||
}
|
||||
|
||||
/// Represents the various closure traits in the Rust language. This
|
||||
/// Represents the various closure traits in the language. This
|
||||
/// will determine the type of the environment (`self`, in the
|
||||
/// desugaring) argument that the closure expects.
|
||||
///
|
||||
|
@ -2560,7 +2561,7 @@ impl<'tcx> TyS<'tcx> {
|
|||
}
|
||||
|
||||
/// Walks `ty` and any types appearing within `ty`, invoking the
|
||||
/// callback `f` on each type. If the callback returns false, then the
|
||||
/// callback `f` on each type. If the callback returns `false`, then the
|
||||
/// children of the current type are ignored.
|
||||
///
|
||||
/// Note: prefer `ty.walk()` where possible.
|
||||
|
@ -2670,7 +2671,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
|||
self.typeck_tables_of(self.hir().body_owner_def_id(body))
|
||||
}
|
||||
|
||||
/// Returns an iterator of the def-ids for all body-owners in this
|
||||
/// Returns an iterator of the `DefId`s for all body-owners in this
|
||||
/// crate. If you would prefer to iterate over the bodies
|
||||
/// themselves, you can do `self.hir().krate().body_ids.iter()`.
|
||||
pub fn body_owners(
|
||||
|
@ -2917,7 +2918,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Return the possibly-auto-generated MIR of a (DefId, Subst) pair.
|
||||
/// Returns the possibly-auto-generated MIR of a `(DefId, Subst)` pair.
|
||||
pub fn instance_mir(self, instance: ty::InstanceDef<'gcx>)
|
||||
-> &'gcx Mir<'gcx>
|
||||
{
|
||||
|
@ -2937,7 +2938,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Get the attributes of a definition.
|
||||
/// Gets the attributes of a definition.
|
||||
pub fn get_attrs(self, did: DefId) -> Attributes<'gcx> {
|
||||
if let Some(id) = self.hir().as_local_hir_id(did) {
|
||||
Attributes::Borrowed(self.hir().attrs_by_hir_id(id))
|
||||
|
@ -2946,7 +2947,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Determine whether an item is annotated with an attribute.
|
||||
/// Determines whether an item is annotated with an attribute.
|
||||
pub fn has_attr(self, did: DefId, attr: &str) -> bool {
|
||||
attr::contains_name(&self.get_attrs(did), attr)
|
||||
}
|
||||
|
@ -2960,14 +2961,14 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
|||
self.optimized_mir(def_id).generator_layout.as_ref().unwrap()
|
||||
}
|
||||
|
||||
/// Given the def-id of an impl, return the def_id of the trait it implements.
|
||||
/// If it implements no trait, return `None`.
|
||||
/// Given the `DefId` of an impl, returns the `DefId` of the trait it implements.
|
||||
/// If it implements no trait, returns `None`.
|
||||
pub fn trait_id_of_impl(self, def_id: DefId) -> Option<DefId> {
|
||||
self.impl_trait_ref(def_id).map(|tr| tr.def_id)
|
||||
}
|
||||
|
||||
/// If the given defid describes a method belonging to an impl, return the
|
||||
/// def-id of the impl that the method belongs to. Otherwise, return `None`.
|
||||
/// If the given defid describes a method belonging to an impl, returns the
|
||||
/// `DefId` of the impl that the method belongs to; otherwise, returns `None`.
|
||||
pub fn impl_of_method(self, def_id: DefId) -> Option<DefId> {
|
||||
let item = if def_id.krate != LOCAL_CRATE {
|
||||
if let Some(Def::Method(_)) = self.describe_def(def_id) {
|
||||
|
@ -2998,9 +2999,9 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
// Hygienically compare a use-site name (`use_name`) for a field or an associated item with its
|
||||
// supposed definition name (`def_name`). The method also needs `DefId` of the supposed
|
||||
// definition's parent/scope to perform comparison.
|
||||
/// Hygienically compares a use-site name (`use_name`) for a field or an associated item with
|
||||
/// its supposed definition name (`def_name`). The method also needs `DefId` of the supposed
|
||||
/// definition's parent/scope to perform comparison.
|
||||
pub fn hygienic_eq(self, use_name: Ident, def_name: Ident, def_parent_def_id: DefId) -> bool {
|
||||
self.adjust_ident(use_name, def_parent_def_id, DUMMY_NODE_ID).0 == def_name.modern()
|
||||
}
|
||||
|
@ -3082,7 +3083,7 @@ fn associated_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> Asso
|
|||
parent_item.node)
|
||||
}
|
||||
|
||||
/// Calculates the Sized-constraint.
|
||||
/// Calculates the `Sized` constraint.
|
||||
///
|
||||
/// In fact, there are only a few options for the types in the constraint:
|
||||
/// - an obviously-unsized type
|
||||
|
@ -3135,9 +3136,9 @@ fn def_span<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> Span {
|
|||
tcx.hir().span_if_local(def_id).unwrap()
|
||||
}
|
||||
|
||||
/// If the given def ID describes an item belonging to a trait,
|
||||
/// return the ID of the trait that the trait item belongs to.
|
||||
/// Otherwise, return `None`.
|
||||
/// If the given `DefId` describes an item belonging to a trait,
|
||||
/// returns the `DefId` of the trait that the trait item belongs to;
|
||||
/// otherwise, returns `None`.
|
||||
fn trait_of_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> Option<DefId> {
|
||||
tcx.opt_associated_item(def_id)
|
||||
.and_then(|associated_item| {
|
||||
|
@ -3232,10 +3233,9 @@ fn instance_def_size_estimate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
|||
}
|
||||
}
|
||||
|
||||
/// If `def_id` is an issue 33140 hack impl, return its self type. Otherwise
|
||||
/// return None.
|
||||
/// If `def_id` is an issue 33140 hack impl, returns its self type; otherwise, returns `None`.
|
||||
///
|
||||
/// See ImplOverlapKind::Issue33140 for more details.
|
||||
/// See [`ImplOverlapKind::Issue33140`] for more details.
|
||||
fn issue33140_self_ty<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
def_id: DefId)
|
||||
-> Option<Ty<'tcx>>
|
||||
|
|
|
@ -31,37 +31,38 @@ use {
|
|||
rustc_data_structures::stable_hasher::{StableHasherResult, StableHasher, HashStable},
|
||||
};
|
||||
|
||||
/// Indicates the state of a query for a given key in a query map
|
||||
/// Indicates the state of a query for a given key in a query map.
|
||||
pub(super) enum QueryResult<'tcx> {
|
||||
/// An already executing query. The query job can be used to await for its completion
|
||||
/// An already executing query. The query job can be used to await for its completion.
|
||||
Started(Lrc<QueryJob<'tcx>>),
|
||||
|
||||
/// The query panicked. Queries trying to wait on this will raise a fatal error / silently panic
|
||||
/// The query panicked. Queries trying to wait on this will raise a fatal error or
|
||||
/// silently panic.
|
||||
Poisoned,
|
||||
}
|
||||
|
||||
/// A span and a query key
|
||||
/// Represents a span and a query key.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct QueryInfo<'tcx> {
|
||||
/// The span for a reason this query was required
|
||||
/// The span corresponding to the reason for which this query was required.
|
||||
pub span: Span,
|
||||
pub query: Query<'tcx>,
|
||||
}
|
||||
|
||||
/// A object representing an active query job.
|
||||
/// Representss an object representing an active query job.
|
||||
pub struct QueryJob<'tcx> {
|
||||
pub info: QueryInfo<'tcx>,
|
||||
|
||||
/// The parent query job which created this job and is implicitly waiting on it.
|
||||
pub parent: Option<Lrc<QueryJob<'tcx>>>,
|
||||
|
||||
/// The latch which is used to wait on this job
|
||||
/// The latch that is used to wait on this job.
|
||||
#[cfg(parallel_compiler)]
|
||||
latch: QueryLatch<'tcx>,
|
||||
}
|
||||
|
||||
impl<'tcx> QueryJob<'tcx> {
|
||||
/// Creates a new query job
|
||||
/// Creates a new query job.
|
||||
pub fn new(info: QueryInfo<'tcx>, parent: Option<Lrc<QueryJob<'tcx>>>) -> Self {
|
||||
QueryJob {
|
||||
info,
|
||||
|
@ -230,7 +231,7 @@ impl<'tcx> QueryLatch<'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Remove a single waiter from the list of waiters.
|
||||
/// Removes a single waiter from the list of waiters.
|
||||
/// This is used to break query cycles.
|
||||
fn extract_waiter(
|
||||
&self,
|
||||
|
|
|
@ -102,12 +102,12 @@ define_queries! { <'tcx>
|
|||
/// Records the type of every item.
|
||||
[] fn type_of: TypeOfItem(DefId) -> Ty<'tcx>,
|
||||
|
||||
/// Maps from the def-id of an item (trait/struct/enum/fn) to its
|
||||
/// Maps from the `DefId` of an item (trait/struct/enum/fn) to its
|
||||
/// associated generics.
|
||||
[] fn generics_of: GenericsOfItem(DefId) -> &'tcx ty::Generics,
|
||||
|
||||
/// Maps from the def-id of an item (trait/struct/enum/fn) to the
|
||||
/// predicates (where clauses) that must be proven true in order
|
||||
/// Maps from the `DefId` of an item (trait/struct/enum/fn) to the
|
||||
/// predicates (where-clauses) that must be proven true in order
|
||||
/// to reference it. This is almost always the "predicates query"
|
||||
/// that you want.
|
||||
///
|
||||
|
@ -123,8 +123,8 @@ define_queries! { <'tcx>
|
|||
/// user.)
|
||||
[] fn predicates_of: PredicatesOfItem(DefId) -> Lrc<ty::GenericPredicates<'tcx>>,
|
||||
|
||||
/// Maps from the def-id of an item (trait/struct/enum/fn) to the
|
||||
/// predicates (where clauses) directly defined on it. This is
|
||||
/// Maps from the `DefId` of an item (trait/struct/enum/fn) to the
|
||||
/// predicates (where-clauses) directly defined on it. This is
|
||||
/// equal to the `explicit_predicates_of` predicates plus the
|
||||
/// `inferred_outlives_of` predicates.
|
||||
[] fn predicates_defined_on: PredicatesDefinedOnItem(DefId)
|
||||
|
@ -138,7 +138,7 @@ define_queries! { <'tcx>
|
|||
/// Foo<'a, T> { x: &'a T }`, this would return `T: 'a`).
|
||||
[] fn inferred_outlives_of: InferredOutlivesOf(DefId) -> Lrc<Vec<ty::Predicate<'tcx>>>,
|
||||
|
||||
/// Maps from the def-id of a trait to the list of
|
||||
/// Maps from the `DefId` of a trait to the list of
|
||||
/// super-predicates. This is a subset of the full list of
|
||||
/// predicates. We store these in a separate map because we must
|
||||
/// evaluate them even during type conversion, often before the
|
||||
|
@ -216,7 +216,7 @@ define_queries! { <'tcx>
|
|||
},
|
||||
|
||||
Codegen {
|
||||
/// Set of all the def-ids in this crate that have MIR associated with
|
||||
/// Set of all the `DefId`s in this crate that have MIR associated with
|
||||
/// them. This includes all the body owners, but also things like struct
|
||||
/// constructors.
|
||||
[] fn mir_keys: mir_keys(CrateNum) -> Lrc<DefIdSet>,
|
||||
|
@ -226,11 +226,11 @@ define_queries! { <'tcx>
|
|||
/// the value isn't known except to the pass itself.
|
||||
[] fn mir_const_qualif: MirConstQualif(DefId) -> (u8, Lrc<BitSet<mir::Local>>),
|
||||
|
||||
/// Fetch the MIR for a given def-id right after it's built - this includes
|
||||
/// Fetch the MIR for a given `DefId` right after it's built - this includes
|
||||
/// unreachable code.
|
||||
[] fn mir_built: MirBuilt(DefId) -> &'tcx Steal<mir::Mir<'tcx>>,
|
||||
|
||||
/// Fetch the MIR for a given def-id up till the point where it is
|
||||
/// Fetch the MIR for a given `DefId` up till the point where it is
|
||||
/// ready for const evaluation.
|
||||
///
|
||||
/// See the README for the `mir` module for details.
|
||||
|
@ -244,7 +244,7 @@ define_queries! { <'tcx>
|
|||
},
|
||||
|
||||
TypeChecking {
|
||||
/// The result of unsafety-checking this def-id.
|
||||
/// The result of unsafety-checking this `DefId`.
|
||||
[] fn unsafety_check_result: UnsafetyCheckResult(DefId) -> mir::UnsafetyCheckResult,
|
||||
|
||||
/// HACK: when evaluated, this reports a "unsafe derive on repr(packed)" error
|
||||
|
@ -307,13 +307,13 @@ define_queries! { <'tcx>
|
|||
TypeChecking {
|
||||
/// Gets a complete map from all types to their inherent impls.
|
||||
/// Not meant to be used directly outside of coherence.
|
||||
/// (Defined only for LOCAL_CRATE)
|
||||
/// (Defined only for `LOCAL_CRATE`.)
|
||||
[] fn crate_inherent_impls: crate_inherent_impls_dep_node(CrateNum)
|
||||
-> Lrc<CrateInherentImpls>,
|
||||
|
||||
/// Checks all types in the krate for overlap in their inherent impls. Reports errors.
|
||||
/// Checks all types in the crate for overlap in their inherent impls. Reports errors.
|
||||
/// Not meant to be used directly outside of coherence.
|
||||
/// (Defined only for LOCAL_CRATE)
|
||||
/// (Defined only for `LOCAL_CRATE`.)
|
||||
[] fn crate_inherent_impls_overlap_check: inherent_impls_overlap_check_dep_node(CrateNum)
|
||||
-> (),
|
||||
},
|
||||
|
@ -321,9 +321,9 @@ define_queries! { <'tcx>
|
|||
Other {
|
||||
/// Evaluate a constant without running sanity checks
|
||||
///
|
||||
/// DO NOT USE THIS outside const eval. Const eval uses this to break query cycles during
|
||||
/// validation. Please add a comment to every use site explaining why using `const_eval`
|
||||
/// isn't sufficient
|
||||
/// **Do not use this** outside const eval. Const eval uses this to break query cycles
|
||||
/// during validation. Please add a comment to every use site explaining why using
|
||||
/// `const_eval` isn't sufficient
|
||||
[] fn const_eval_raw: const_eval_raw_dep_node(ty::ParamEnvAnd<'tcx, GlobalId<'tcx>>)
|
||||
-> ConstEvalRawResult<'tcx>,
|
||||
|
||||
|
@ -344,7 +344,7 @@ define_queries! { <'tcx>
|
|||
Other {
|
||||
[] fn reachable_set: reachability_dep_node(CrateNum) -> ReachableSet,
|
||||
|
||||
/// Per-body `region::ScopeTree`. The `DefId` should be the owner-def-id for the body;
|
||||
/// Per-body `region::ScopeTree`. The `DefId` should be the owner `DefId` for the body;
|
||||
/// in the case of closures, this will be redirected to the enclosing function.
|
||||
[] fn region_scope_tree: RegionScopeTree(DefId) -> Lrc<region::ScopeTree>,
|
||||
|
||||
|
@ -398,7 +398,7 @@ define_queries! { <'tcx>
|
|||
-> Lrc<specialization_graph::Graph>,
|
||||
[] fn is_object_safe: ObjectSafety(DefId) -> bool,
|
||||
|
||||
/// Get the ParameterEnvironment for a given item; this environment
|
||||
/// Gets the ParameterEnvironment for a given item; this environment
|
||||
/// will be in "user-facing" mode, meaning that it is suitabe for
|
||||
/// type-checking etc, and it does not normalize specializable
|
||||
/// associated types. This is almost always what you want,
|
||||
|
@ -485,7 +485,7 @@ define_queries! { <'tcx>
|
|||
|
||||
[] fn foreign_modules: ForeignModules(CrateNum) -> Lrc<Vec<ForeignModule>>,
|
||||
|
||||
/// Identifies the entry-point (e.g. the `main` function) for a given
|
||||
/// Identifies the entry-point (e.g., the `main` function) for a given
|
||||
/// crate, returning `None` if there is no entry point (such as for library crates).
|
||||
[] fn entry_fn: EntryFn(CrateNum) -> Option<(DefId, EntryFnType)>,
|
||||
[] fn plugin_registrar_fn: PluginRegistrarFn(CrateNum) -> Option<DefId>,
|
||||
|
|
|
@ -103,7 +103,7 @@ impl AbsoluteBytePos {
|
|||
}
|
||||
|
||||
impl<'sess> OnDiskCache<'sess> {
|
||||
/// Create a new OnDiskCache instance from the serialized data in `data`.
|
||||
/// Creates a new OnDiskCache instance from the serialized data in `data`.
|
||||
pub fn new(sess: &'sess Session, data: Vec<u8>, start_pos: usize) -> OnDiskCache<'sess> {
|
||||
debug_assert!(sess.opts.incremental.is_some());
|
||||
|
||||
|
@ -325,7 +325,7 @@ impl<'sess> OnDiskCache<'sess> {
|
|||
})
|
||||
}
|
||||
|
||||
/// Load a diagnostic emitted during the previous compilation session.
|
||||
/// Loads a diagnostic emitted during the previous compilation session.
|
||||
pub fn load_diagnostics<'a, 'tcx>(&self,
|
||||
tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
dep_node_index: SerializedDepNodeIndex)
|
||||
|
@ -339,7 +339,7 @@ impl<'sess> OnDiskCache<'sess> {
|
|||
diagnostics.unwrap_or_default()
|
||||
}
|
||||
|
||||
/// Store a diagnostic emitted during the current compilation session.
|
||||
/// Stores a diagnostic emitted during the current compilation session.
|
||||
/// Anything stored like this will be available via `load_diagnostics` in
|
||||
/// the next compilation session.
|
||||
#[inline(never)]
|
||||
|
@ -353,7 +353,7 @@ impl<'sess> OnDiskCache<'sess> {
|
|||
}
|
||||
|
||||
/// Returns the cached query result if there is something in the cache for
|
||||
/// the given SerializedDepNodeIndex. Otherwise returns None.
|
||||
/// the given `SerializedDepNodeIndex`; otherwise returns `None`.
|
||||
pub fn try_load_query_result<'tcx, T>(&self,
|
||||
tcx: TyCtxt<'_, 'tcx, 'tcx>,
|
||||
dep_node_index: SerializedDepNodeIndex)
|
||||
|
@ -366,7 +366,7 @@ impl<'sess> OnDiskCache<'sess> {
|
|||
"query result")
|
||||
}
|
||||
|
||||
/// Store a diagnostic emitted during computation of an anonymous query.
|
||||
/// Stores a diagnostic emitted during computation of an anonymous query.
|
||||
/// Since many anonymous queries can share the same `DepNode`, we aggregate
|
||||
/// them -- as opposed to regular queries where we assume that there is a
|
||||
/// 1:1 relationship between query-key and `DepNode`.
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//! The implementation of the query system itself. Defines the macros
|
||||
//! that generate the actual methods on tcx which find and execute the
|
||||
//! provider, manage the caches, and so forth.
|
||||
//! The implementation of the query system itself. This defines the macros that
|
||||
//! generate the actual methods on tcx which find and execute the provider,
|
||||
//! manage the caches, and so forth.
|
||||
|
||||
use crate::dep_graph::{DepNodeIndex, DepNode, DepKind, SerializedDepNodeIndex};
|
||||
use crate::errors::DiagnosticBuilder;
|
||||
|
@ -1017,8 +1017,8 @@ macro_rules! define_queries_inner {
|
|||
}
|
||||
|
||||
impl<'a, $tcx, 'lcx> TyCtxt<'a, $tcx, 'lcx> {
|
||||
/// Return a transparent wrapper for `TyCtxt` which ensures queries
|
||||
/// are executed instead of returing their result
|
||||
/// Returns a transparent wrapper for `TyCtxt`, which ensures queries
|
||||
/// are executed instead of just returing their results.
|
||||
#[inline(always)]
|
||||
pub fn ensure(self) -> TyCtxtEnsure<'a, $tcx, 'lcx> {
|
||||
TyCtxtEnsure {
|
||||
|
@ -1026,7 +1026,7 @@ macro_rules! define_queries_inner {
|
|||
}
|
||||
}
|
||||
|
||||
/// Return a transparent wrapper for `TyCtxt` which uses
|
||||
/// Returns a transparent wrapper for `TyCtxt` which uses
|
||||
/// `span` as the location of queries performed through it.
|
||||
#[inline(always)]
|
||||
pub fn at(self, span: Span) -> TyCtxtAt<'a, $tcx, 'lcx> {
|
||||
|
@ -1067,7 +1067,7 @@ macro_rules! define_queries_struct {
|
|||
(tcx: $tcx:tt,
|
||||
input: ($(([$($modifiers:tt)*] [$($attr:tt)*] [$name:ident]))*)) => {
|
||||
pub struct Queries<$tcx> {
|
||||
/// This provides access to the incr. comp. on-disk cache for query results.
|
||||
/// This provides access to the incrimental comilation on-disk cache for query results.
|
||||
/// Do not access this directly. It is only meant to be used by
|
||||
/// `DepGraph::try_mark_green()` and the query infrastructure.
|
||||
pub(crate) on_disk_cache: OnDiskCache<'tcx>,
|
||||
|
@ -1123,22 +1123,22 @@ macro_rules! define_provider_struct {
|
|||
///
|
||||
/// Now, if force_from_dep_node() would always fail, it would be pretty useless.
|
||||
/// Fortunately, we can use some contextual information that will allow us to
|
||||
/// reconstruct query-keys for certain kinds of DepNodes. In particular, we
|
||||
/// enforce by construction that the GUID/fingerprint of certain DepNodes is a
|
||||
/// valid DefPathHash. Since we also always build a huge table that maps every
|
||||
/// DefPathHash in the current codebase to the corresponding DefId, we have
|
||||
/// reconstruct query-keys for certain kinds of `DepNode`s. In particular, we
|
||||
/// enforce by construction that the GUID/fingerprint of certain `DepNode`s is a
|
||||
/// valid `DefPathHash`. Since we also always build a huge table that maps every
|
||||
/// `DefPathHash` in the current codebase to the corresponding `DefId`, we have
|
||||
/// everything we need to re-run the query.
|
||||
///
|
||||
/// Take the `mir_validated` query as an example. Like many other queries, it
|
||||
/// just has a single parameter: the DefId of the item it will compute the
|
||||
/// validated MIR for. Now, when we call `force_from_dep_node()` on a dep-node
|
||||
/// with kind `MirValidated`, we know that the GUID/fingerprint of the dep-node
|
||||
/// is actually a DefPathHash, and can therefore just look up the corresponding
|
||||
/// DefId in `tcx.def_path_hash_to_def_id`.
|
||||
/// just has a single parameter: the `DefId` of the item it will compute the
|
||||
/// validated MIR for. Now, when we call `force_from_dep_node()` on a `DepNode`
|
||||
/// with kind `MirValidated`, we know that the GUID/fingerprint of the `DepNode`
|
||||
/// is actually a `DefPathHash`, and can therefore just look up the corresponding
|
||||
/// `DefId` in `tcx.def_path_hash_to_def_id`.
|
||||
///
|
||||
/// When you implement a new query, it will likely have a corresponding new
|
||||
/// DepKind, and you'll have to support it here in `force_from_dep_node()`. As
|
||||
/// a rule of thumb, if your query takes a DefId or DefIndex as sole parameter,
|
||||
/// `DepKind`, and you'll have to support it here in `force_from_dep_node()`. As
|
||||
/// a rule of thumb, if your query takes a `DefId` or `DefIndex` as sole parameter,
|
||||
/// then `force_from_dep_node()` should not fail for it. Otherwise, you can just
|
||||
/// add it to the "We don't have enough information to reconstruct..." group in
|
||||
/// the match below.
|
||||
|
|
|
@ -30,7 +30,7 @@ pub trait TypeRelation<'a, 'gcx: 'a+'tcx, 'tcx: 'a> : Sized {
|
|||
/// Returns a static string we can use for printouts.
|
||||
fn tag(&self) -> &'static str;
|
||||
|
||||
/// Returns true if the value `a` is the "expected" type in the
|
||||
/// Returns `true` if the value `a` is the "expected" type in the
|
||||
/// relation. Just affects error messages.
|
||||
fn a_is_expected(&self) -> bool;
|
||||
|
||||
|
|
|
@ -12,14 +12,14 @@ use rustc_data_structures::sync::{RwLock, ReadGuard, MappedReadGuard};
|
|||
/// Steal<Mir<'tcx>>` (to be very specific). Now we can read from this
|
||||
/// as much as we want (using `borrow()`), but you can also
|
||||
/// `steal()`. Once you steal, any further attempt to read will panic.
|
||||
/// Therefore we know that -- assuming no ICE -- nobody is observing
|
||||
/// Therefore, we know that -- assuming no ICE -- nobody is observing
|
||||
/// the fact that the MIR was updated.
|
||||
///
|
||||
/// Obviously, whenever you have a query that yields a `Steal` value,
|
||||
/// you must treat it with caution, and make sure that you know that
|
||||
/// -- once the value is stolen -- it will never be read from again.
|
||||
///
|
||||
/// FIXME(#41710) -- what is the best way to model linear queries?
|
||||
//
|
||||
// FIXME(#41710): what is the best way to model linear queries?
|
||||
pub struct Steal<T> {
|
||||
value: RwLock<Option<T>>
|
||||
}
|
||||
|
|
|
@ -47,7 +47,7 @@ pub enum BoundRegion {
|
|||
|
||||
/// Named region parameters for functions (a in &'a T)
|
||||
///
|
||||
/// The def-id is needed to distinguish free regions in
|
||||
/// The `DefId` is needed to distinguish free regions in
|
||||
/// the event of shadowing.
|
||||
BrNamed(DefId, InternedString),
|
||||
|
||||
|
@ -442,17 +442,17 @@ impl<'tcx> GeneratorSubsts<'tcx> {
|
|||
self.split(def_id, tcx).return_ty
|
||||
}
|
||||
|
||||
/// Return the "generator signature", which consists of its yield
|
||||
/// Returns the "generator signature", which consists of its yield
|
||||
/// and return types.
|
||||
///
|
||||
/// NB. Some bits of the code prefers to see this wrapped in a
|
||||
/// N.B., some bits of the code prefers to see this wrapped in a
|
||||
/// binder, but it never contains bound regions. Probably this
|
||||
/// function should be removed.
|
||||
pub fn poly_sig(self, def_id: DefId, tcx: TyCtxt<'_, '_, '_>) -> PolyGenSig<'tcx> {
|
||||
ty::Binder::dummy(self.sig(def_id, tcx))
|
||||
}
|
||||
|
||||
/// Return the "generator signature", which consists of its yield
|
||||
/// Returns the "generator signature", which consists of its yield
|
||||
/// and return types.
|
||||
pub fn sig(self, def_id: DefId, tcx: TyCtxt<'_, '_, '_>) -> GenSig<'tcx> {
|
||||
ty::GenSig {
|
||||
|
@ -520,11 +520,11 @@ impl<'tcx> UpvarSubsts<'tcx> {
|
|||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, PartialOrd, Ord, Eq, Hash, RustcEncodable, RustcDecodable)]
|
||||
pub enum ExistentialPredicate<'tcx> {
|
||||
/// e.g., Iterator
|
||||
/// E.g., `Iterator`.
|
||||
Trait(ExistentialTraitRef<'tcx>),
|
||||
/// e.g., Iterator::Item = T
|
||||
/// E.g., `Iterator::Item = T`.
|
||||
Projection(ExistentialProjection<'tcx>),
|
||||
/// e.g., Send
|
||||
/// E.g., `Send`.
|
||||
AutoTrait(DefId),
|
||||
}
|
||||
|
||||
|
@ -655,12 +655,12 @@ impl<'tcx> Binder<&'tcx List<ExistentialPredicate<'tcx>>> {
|
|||
}
|
||||
|
||||
/// A complete reference to a trait. These take numerous guises in syntax,
|
||||
/// but perhaps the most recognizable form is in a where clause:
|
||||
/// but perhaps the most recognizable form is in a where-clause:
|
||||
///
|
||||
/// T: Foo<U>
|
||||
///
|
||||
/// This would be represented by a trait-reference where the def-id is the
|
||||
/// def-id for the trait `Foo` and the substs define `T` as parameter 0,
|
||||
/// This would be represented by a trait-reference where the `DefId` is the
|
||||
/// `DefId` for the trait `Foo` and the substs define `T` as parameter 0,
|
||||
/// and `U` as parameter 1.
|
||||
///
|
||||
/// Trait references also appear in object types like `Foo<U>`, but in
|
||||
|
@ -766,9 +766,9 @@ impl<'a, 'gcx, 'tcx> ExistentialTraitRef<'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Object types don't have a self-type specified. Therefore, when
|
||||
/// Object types don't have a self type specified. Therefore, when
|
||||
/// we convert the principal trait-ref into a normal trait-ref,
|
||||
/// you must give *some* self-type. A common choice is `mk_err()`
|
||||
/// you must give *some* self type. A common choice is `mk_err()`
|
||||
/// or some placeholder type.
|
||||
pub fn with_self_ty(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, self_ty: Ty<'tcx>)
|
||||
-> ty::TraitRef<'tcx> {
|
||||
|
@ -789,9 +789,9 @@ impl<'tcx> PolyExistentialTraitRef<'tcx> {
|
|||
self.skip_binder().def_id
|
||||
}
|
||||
|
||||
/// Object types don't have a self-type specified. Therefore, when
|
||||
/// Object types don't have a self type specified. Therefore, when
|
||||
/// we convert the principal trait-ref into a normal trait-ref,
|
||||
/// you must give *some* self-type. A common choice is `mk_err()`
|
||||
/// you must give *some* self type. A common choice is `mk_err()`
|
||||
/// or some placeholder type.
|
||||
pub fn with_self_ty(&self, tcx: TyCtxt<'_, '_, 'tcx>,
|
||||
self_ty: Ty<'tcx>)
|
||||
|
@ -829,7 +829,7 @@ impl<T> Binder<T> {
|
|||
|
||||
/// Skips the binder and returns the "bound" value. This is a
|
||||
/// risky thing to do because it's easy to get confused about
|
||||
/// debruijn indices and the like. It is usually better to
|
||||
/// De Bruijn indices and the like. It is usually better to
|
||||
/// discharge the binder using `no_bound_vars` or
|
||||
/// `replace_late_bound_regions` or something like
|
||||
/// that. `skip_binder` is only valid when you are either
|
||||
|
@ -840,7 +840,7 @@ impl<T> Binder<T> {
|
|||
///
|
||||
/// Some examples where `skip_binder` is reasonable:
|
||||
///
|
||||
/// - extracting the def-id from a PolyTraitRef;
|
||||
/// - extracting the `DefId` from a PolyTraitRef;
|
||||
/// - comparing the self type of a PolyTraitRef to see if it is equal to
|
||||
/// a type parameter `X`, since the type `X` does not reference any regions
|
||||
pub fn skip_binder(&self) -> &T {
|
||||
|
@ -884,8 +884,8 @@ impl<T> Binder<T> {
|
|||
}
|
||||
|
||||
/// Given two things that have the same binder level,
|
||||
/// and an operation that wraps on their contents, execute the operation
|
||||
/// and then wrap its result.
|
||||
/// and an operation that wraps on their contents, executes the operation
|
||||
/// and then wraps its result.
|
||||
///
|
||||
/// `f` should consider bound regions at depth 1 to be free, and
|
||||
/// anything it produces with bound regions at depth 1 will be
|
||||
|
@ -896,7 +896,7 @@ impl<T> Binder<T> {
|
|||
Binder(f(self.0, u.0))
|
||||
}
|
||||
|
||||
/// Split the contents into two things that share the same binder
|
||||
/// Splits the contents into two things that share the same binder
|
||||
/// level as the original, returning two distinct binders.
|
||||
///
|
||||
/// `f` should consider bound regions at depth 1 to be free, and
|
||||
|
@ -1118,14 +1118,14 @@ pub type Region<'tcx> = &'tcx RegionKind;
|
|||
/// ## Bound Regions
|
||||
///
|
||||
/// These are regions that are stored behind a binder and must be substituted
|
||||
/// with some concrete region before being used. There are 2 kind of
|
||||
/// bound regions: early-bound, which are bound in an item's Generics,
|
||||
/// and are substituted by a Substs, and late-bound, which are part of
|
||||
/// higher-ranked types (e.g., `for<'a> fn(&'a ())`) and are substituted by
|
||||
/// with some concrete region before being used. There are two kind of
|
||||
/// bound regions: early-bound, which are bound in an item's `Generics`,
|
||||
/// and are substituted by a `Substs`, and late-bound, which are part of
|
||||
/// higher-ranked types (e.g., `for<'a> fn(&'a ())`), and are substituted by
|
||||
/// the likes of `liberate_late_bound_regions`. The distinction exists
|
||||
/// because higher-ranked lifetimes aren't supported in all places. See [1][2].
|
||||
///
|
||||
/// Unlike Param-s, bound regions are not supposed to exist "in the wild"
|
||||
/// Unlike `Param`s, bound regions are not supposed to exist "in the wild"
|
||||
/// outside their binder, e.g., in types passed to type inference, and
|
||||
/// should first be substituted (by placeholder regions, free regions,
|
||||
/// or region variables).
|
||||
|
@ -1141,7 +1141,7 @@ pub type Region<'tcx> = &'tcx RegionKind;
|
|||
/// To do this, we replace the bound regions with placeholder markers,
|
||||
/// which don't satisfy any relation not explicitly provided.
|
||||
///
|
||||
/// There are 2 kinds of placeholder regions in rustc: `ReFree` and
|
||||
/// There are two kinds of placeholder regions in rustc: `ReFree` and
|
||||
/// `RePlaceholder`. When checking an item's body, `ReFree` is supposed
|
||||
/// to be used. These also support explicit bounds: both the internally-stored
|
||||
/// *scope*, which the region is assumed to outlive, as well as other
|
||||
|
@ -1346,11 +1346,11 @@ impl<'a, 'tcx, 'gcx> PolyExistentialProjection<'tcx> {
|
|||
|
||||
impl DebruijnIndex {
|
||||
/// Returns the resulting index when this value is moved into
|
||||
/// `amount` number of new binders. So e.g., if you had
|
||||
/// `amount` number of new binders. So, e.g., if you had
|
||||
///
|
||||
/// for<'a> fn(&'a x)
|
||||
///
|
||||
/// and you wanted to change to
|
||||
/// and you wanted to change it to
|
||||
///
|
||||
/// for<'a> fn(for<'b> fn(&'a x))
|
||||
///
|
||||
|
@ -1378,7 +1378,7 @@ impl DebruijnIndex {
|
|||
*self = self.shifted_out(amount);
|
||||
}
|
||||
|
||||
/// Adjusts any Debruijn Indices so as to make `to_binder` the
|
||||
/// Adjusts any De Bruijn indices so as to make `to_binder` the
|
||||
/// innermost binder. That is, if we have something bound at `to_binder`,
|
||||
/// it will now be bound at INNERMOST. This is an appropriate thing to do
|
||||
/// when moving a region out from inside binders:
|
||||
|
@ -1388,12 +1388,12 @@ impl DebruijnIndex {
|
|||
/// // Binder: D3 D2 D1 ^^
|
||||
/// ```
|
||||
///
|
||||
/// Here, the region `'a` would have the debruijn index D3,
|
||||
/// Here, the region `'a` would have the De Bruijn index D3,
|
||||
/// because it is the bound 3 binders out. However, if we wanted
|
||||
/// to refer to that region `'a` in the second argument (the `_`),
|
||||
/// those two binders would not be in scope. In that case, we
|
||||
/// might invoke `shift_out_to_binder(D3)`. This would adjust the
|
||||
/// debruijn index of `'a` to D1 (the innermost binder).
|
||||
/// De Bruijn index of `'a` to D1 (the innermost binder).
|
||||
///
|
||||
/// If we invoke `shift_out_to_binder` and the region is in fact
|
||||
/// bound by one of the binders we are shifting out of, that is an
|
||||
|
@ -1444,7 +1444,7 @@ impl RegionKind {
|
|||
}
|
||||
}
|
||||
|
||||
/// Adjusts any Debruijn Indices so as to make `to_binder` the
|
||||
/// Adjusts any De Bruijn indices so as to make `to_binder` the
|
||||
/// innermost binder. That is, if we have something bound at `to_binder`,
|
||||
/// it will now be bound at INNERMOST. This is an appropriate thing to do
|
||||
/// when moving a region out from inside binders:
|
||||
|
@ -1454,12 +1454,12 @@ impl RegionKind {
|
|||
/// // Binder: D3 D2 D1 ^^
|
||||
/// ```
|
||||
///
|
||||
/// Here, the region `'a` would have the debruijn index D3,
|
||||
/// Here, the region `'a` would have the De Bruijn index D3,
|
||||
/// because it is the bound 3 binders out. However, if we wanted
|
||||
/// to refer to that region `'a` in the second argument (the `_`),
|
||||
/// those two binders would not be in scope. In that case, we
|
||||
/// might invoke `shift_out_to_binder(D3)`. This would adjust the
|
||||
/// debruijn index of `'a` to D1 (the innermost binder).
|
||||
/// De Bruijn index of `'a` to D1 (the innermost binder).
|
||||
///
|
||||
/// If we invoke `shift_out_to_binder` and the region is in fact
|
||||
/// bound by one of the binders we are shifting out of, that is an
|
||||
|
@ -1528,7 +1528,7 @@ impl RegionKind {
|
|||
flags
|
||||
}
|
||||
|
||||
/// Given an early-bound or free region, returns the def-id where it was bound.
|
||||
/// Given an early-bound or free region, returns the `DefId` where it was bound.
|
||||
/// For example, consider the regions in this snippet of code:
|
||||
///
|
||||
/// ```
|
||||
|
@ -1543,10 +1543,10 @@ impl RegionKind {
|
|||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// Here, `free_region_binding_scope('a)` would return the def-id
|
||||
/// Here, `free_region_binding_scope('a)` would return the `DefId`
|
||||
/// of the impl, and for all the other highlighted regions, it
|
||||
/// would return the def-id of the function. In other cases (not shown), this
|
||||
/// function might return the def-id of a closure.
|
||||
/// would return the `DefId` of the function. In other cases (not shown), this
|
||||
/// function might return the `DefId` of a closure.
|
||||
pub fn free_region_binding_scope(&self, tcx: TyCtxt<'_, '_, '_>) -> DefId {
|
||||
match self {
|
||||
ty::ReEarlyBound(br) => {
|
||||
|
@ -1772,7 +1772,7 @@ impl<'a, 'gcx, 'tcx> TyS<'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Returns true if this type is a floating point type and false otherwise.
|
||||
/// Returns `true` if this type is a floating point type.
|
||||
pub fn is_floating_point(&self) -> bool {
|
||||
match self.sty {
|
||||
Float(_) |
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue