Merge pull request #12 from rust-lang/master
sync with rust-lang/rust branch master
This commit is contained in:
commit
e316ba3b60
887 changed files with 13627 additions and 11457 deletions
22
.mailmap
22
.mailmap
|
@ -117,6 +117,9 @@ Jason Toffaletti <toffaletti@gmail.com> Jason Toffaletti <jason@topsy.com>
|
|||
Jauhien Piatlicki <jauhien@gentoo.org> Jauhien Piatlicki <jpiatlicki@zertisa.com>
|
||||
Jay True <glacjay@gmail.com>
|
||||
Jeremy Letang <letang.jeremy@gmail.com>
|
||||
Jeremy Stucki <dev@jeremystucki.ch> <stucki.jeremy@gmail.com>
|
||||
Jeremy Stucki <dev@jeremystucki.ch> <jeremy@myelin.ch>
|
||||
Jeremy Stucki <dev@jeremystucki.ch>
|
||||
Jethro Beekman <github@jbeekman.nl>
|
||||
Jihyun Yu <j.yu@navercorp.com> <yjh0502@gmail.com>
|
||||
Jihyun Yu <j.yu@navercorp.com> jihyun <jihyun@nablecomm.com>
|
||||
|
@ -181,12 +184,19 @@ Neil Pankey <npankey@gmail.com> <neil@wire.im>
|
|||
Nick Platt <platt.nicholas@gmail.com>
|
||||
Nicole Mazzuca <npmazzuca@gmail.com>
|
||||
Nif Ward <nif.ward@gmail.com>
|
||||
Oliver Schneider <oliver.schneider@kit.edu> oli-obk <github6541940@oli-obk.de>
|
||||
Oliver Schneider <oliver.schneider@kit.edu> Oliver 'ker' Schneider <rust19446194516@oli-obk.de>
|
||||
Oliver Schneider <oliver.schneider@kit.edu> Oliver Schneider <git-spam-no-reply9815368754983@oli-obk.de>
|
||||
Oliver Schneider <oliver.schneider@kit.edu> Oliver Schneider <git-spam9815368754983@oli-obk.de>
|
||||
Oliver Schneider <oliver.schneider@kit.edu> Oliver Schneider <github333195615777966@oli-obk.de>
|
||||
Oliver Schneider <oliver.schneider@kit.edu> Oliver Schneider <github6541940@oli-obk.de>
|
||||
Oliver Scherer <oliver.schneider@kit.edu> <git-spam-no-reply9815368754983@oli-obk.de>
|
||||
Oliver Scherer <oliver.schneider@kit.edu> <git-spam9815368754983@oli-obk.de>
|
||||
Oliver Scherer <oliver.schneider@kit.edu> <github333195615777966@oli-obk.de>
|
||||
Oliver Scherer <oliver.schneider@kit.edu> <github6541940@oli-obk.de>
|
||||
Oliver Scherer <oliver.schneider@kit.edu> <rust19446194516@oli-obk.de>
|
||||
Oliver Scherer <oliver.schneider@kit.edu> <git-no-reply-9879165716479413131@oli-obk.de>
|
||||
Oliver Scherer <oliver.schneider@kit.edu> <git1984941651981@oli-obk.de>
|
||||
Oliver Scherer <oliver.schneider@kit.edu> <github35764891676564198441@oli-obk.de>
|
||||
Oliver Scherer <oliver.schneider@kit.edu> <github6541940@oli-obk.de>
|
||||
Oliver Scherer <oliver.schneider@kit.edu> <oli-obk@users.noreply.github.com>
|
||||
Oliver Scherer <oliver.schneider@kit.edu> <public.oliver.schneider@kit.edu>
|
||||
Oliver Scherer <oliver.schneider@kit.edu> <obk8176014uqher834@olio-obk.de>
|
||||
Oliver Scherer <oliver.schneider@kit.edu>
|
||||
Ožbolt Menegatti <ozbolt.menegatti@gmail.com> gareins <ozbolt.menegatti@gmail.com>
|
||||
Paul Faria <paul_faria@ultimatesoftware.com> Paul Faria <Nashenas88@gmail.com>
|
||||
Peer Aramillo Irizar <peer.aramillo.irizar@gmail.com> parir <peer.aramillo.irizar@gmail.com>
|
||||
|
|
101
Cargo.lock
101
Cargo.lock
|
@ -185,7 +185,7 @@ dependencies = [
|
|||
"serde",
|
||||
"serde_json",
|
||||
"time",
|
||||
"toml 0.4.10",
|
||||
"toml",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -202,7 +202,7 @@ name = "build-manifest"
|
|||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"serde",
|
||||
"toml 0.4.10",
|
||||
"toml",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -287,7 +287,7 @@ dependencies = [
|
|||
"git2-curl",
|
||||
"glob",
|
||||
"hex",
|
||||
"home 0.4.2",
|
||||
"home 0.5.0",
|
||||
"ignore",
|
||||
"im-rc",
|
||||
"jobserver",
|
||||
|
@ -316,7 +316,7 @@ dependencies = [
|
|||
"tar",
|
||||
"tempfile",
|
||||
"termcolor",
|
||||
"toml 0.5.3",
|
||||
"toml",
|
||||
"unicode-width",
|
||||
"url 2.1.0",
|
||||
"walkdir",
|
||||
|
@ -442,7 +442,7 @@ dependencies = [
|
|||
"semver",
|
||||
"serde",
|
||||
"smallvec",
|
||||
"toml 0.5.3",
|
||||
"toml",
|
||||
"unicode-normalization",
|
||||
"url 2.1.0",
|
||||
]
|
||||
|
@ -946,6 +946,7 @@ name = "error_index_generator"
|
|||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"rustdoc",
|
||||
"walkdir",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1138,10 +1139,12 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "getopts"
|
||||
version = "0.2.19"
|
||||
version = "0.2.21"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "72327b15c228bfe31f1390f93dd5e9279587f0463836393c9df719ce62a3e450"
|
||||
checksum = "14dbbfd5c71d70241ecf9e6f13737f7b5ce823821063188d7e46c41d371eebd5"
|
||||
dependencies = [
|
||||
"rustc-std-workspace-core",
|
||||
"rustc-std-workspace-std",
|
||||
"unicode-width",
|
||||
]
|
||||
|
||||
|
@ -1157,9 +1160,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "git2"
|
||||
version = "0.9.2"
|
||||
version = "0.10.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8cb400360e8a4d61b10e648285bbfa919bbf9519d0d5d5720354456f44349226"
|
||||
checksum = "327d698f86a7ebdfeb86a4238ccdb004828939d3a3555b6ead679541d14e36c0"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
"libc",
|
||||
|
@ -1172,9 +1175,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "git2-curl"
|
||||
version = "0.10.1"
|
||||
version = "0.11.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2293de73491c3dc4174c5949ef53d2cc037b27613f88d72032e3f5237247a7dd"
|
||||
checksum = "cd6527e480187ce19aaf4fa6acfb7657b25628ce31cb8ffabdfca3bf731524c5"
|
||||
dependencies = [
|
||||
"curl",
|
||||
"git2",
|
||||
|
@ -1279,9 +1282,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "home"
|
||||
version = "0.4.2"
|
||||
version = "0.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "013e4e6e9134211bb4d6bf53dd8cfb75d9e2715cc33614b9c0827718c6fbe0b8"
|
||||
checksum = "c07c315e106bd6f83f026a20ddaeef2706782e490db1dcdd37caad38a0e895b3"
|
||||
dependencies = [
|
||||
"scopeguard 1.0.0",
|
||||
"winapi 0.3.6",
|
||||
|
@ -1579,9 +1582,9 @@ checksum = "b294d6fa9ee409a054354afc4352b0b9ef7ca222c69b8812cbea9e7d2bf3783f"
|
|||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.60"
|
||||
version = "0.2.61"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d44e80633f007889c7eff624b709ab43c92d708caad982295768a7b13ca3b5eb"
|
||||
checksum = "c665266eb592905e8503ba3403020f4b8794d26263f412ca33171600eca9a6fa"
|
||||
dependencies = [
|
||||
"rustc-std-workspace-core",
|
||||
]
|
||||
|
@ -1601,9 +1604,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "libgit2-sys"
|
||||
version = "0.8.2"
|
||||
version = "0.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4c179ed6d19cd3a051e68c177fbbc214e79ac4724fac3a850ec9f3d3eb8a5578"
|
||||
checksum = "8c2078aec6f4b16d1b89f6a72e4f6eb1e75ffa85312023291e89c6d3087bc8fb"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"libc",
|
||||
|
@ -1665,9 +1668,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "log"
|
||||
version = "0.4.6"
|
||||
version = "0.4.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c84ec4b527950aa83a329754b01dbe3f58361d1c5efacd1f6d68c494d08a17c6"
|
||||
checksum = "14b6052be84e6b71ab17edffc2eeabf5c2c3ae1fdb464aae35ac50c67a44e1f7"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
]
|
||||
|
@ -1785,7 +1788,7 @@ dependencies = [
|
|||
"serde_json",
|
||||
"shlex",
|
||||
"tempfile",
|
||||
"toml 0.5.3",
|
||||
"toml",
|
||||
"toml-query",
|
||||
]
|
||||
|
||||
|
@ -1989,6 +1992,7 @@ dependencies = [
|
|||
"compiletest_rs",
|
||||
"directories",
|
||||
"env_logger 0.6.0",
|
||||
"getrandom",
|
||||
"hex",
|
||||
"log",
|
||||
"num-traits",
|
||||
|
@ -2374,6 +2378,9 @@ dependencies = [
|
|||
[[package]]
|
||||
name = "proc_macro"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"std",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "profiler_builtins"
|
||||
|
@ -2760,7 +2767,7 @@ dependencies = [
|
|||
"tokio",
|
||||
"tokio-process",
|
||||
"tokio-timer",
|
||||
"toml 0.5.3",
|
||||
"toml",
|
||||
"url 1.7.2",
|
||||
"walkdir",
|
||||
]
|
||||
|
@ -3062,6 +3069,13 @@ dependencies = [
|
|||
"core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustc-std-workspace-std"
|
||||
version = "1.0.0"
|
||||
dependencies = [
|
||||
"std",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustc-workspace-hack"
|
||||
version = "1.0.0"
|
||||
|
@ -3218,6 +3232,7 @@ dependencies = [
|
|||
"rustc_data_structures",
|
||||
"serialize",
|
||||
"syntax_pos",
|
||||
"term_size",
|
||||
"termcolor",
|
||||
"unicode-width",
|
||||
]
|
||||
|
@ -3564,7 +3579,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "rustfmt-nightly"
|
||||
version = "1.4.4"
|
||||
version = "1.4.6"
|
||||
dependencies = [
|
||||
"annotate-snippets",
|
||||
"atty",
|
||||
|
@ -3590,7 +3605,7 @@ dependencies = [
|
|||
"serde_json",
|
||||
"structopt",
|
||||
"term 0.6.0",
|
||||
"toml 0.5.3",
|
||||
"toml",
|
||||
"unicode-segmentation",
|
||||
"unicode-width",
|
||||
"unicode_categories",
|
||||
|
@ -4067,6 +4082,10 @@ dependencies = [
|
|||
[[package]]
|
||||
name = "term"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"core",
|
||||
"std",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "term"
|
||||
|
@ -4089,6 +4108,17 @@ dependencies = [
|
|||
"winapi 0.3.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "term_size"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9e5b9a66db815dcfd2da92db471106457082577c3c278d4138ab3e3b4e189327"
|
||||
dependencies = [
|
||||
"kernel32-sys",
|
||||
"libc",
|
||||
"winapi 0.2.8",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "termcolor"
|
||||
version = "1.0.4"
|
||||
|
@ -4113,8 +4143,13 @@ dependencies = [
|
|||
name = "test"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"core",
|
||||
"getopts",
|
||||
"libc",
|
||||
"panic_abort",
|
||||
"panic_unwind",
|
||||
"proc_macro",
|
||||
"std",
|
||||
"term 0.0.0",
|
||||
]
|
||||
|
||||
|
@ -4382,15 +4417,6 @@ dependencies = [
|
|||
"tokio-reactor",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "toml"
|
||||
version = "0.4.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "758664fc71a3a69038656bee8b6be6477d2a6c315a6b81f7081f591bffa4111f"
|
||||
dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "toml"
|
||||
version = "0.5.3"
|
||||
|
@ -4411,7 +4437,7 @@ dependencies = [
|
|||
"is-match",
|
||||
"lazy_static 1.3.0",
|
||||
"regex",
|
||||
"toml 0.5.3",
|
||||
"toml",
|
||||
"toml-query_derive",
|
||||
]
|
||||
|
||||
|
@ -4491,9 +4517,14 @@ checksum = "aa6024fc12ddfd1c6dbc14a80fa2324d4568849869b779f6bd37e5e4c03344d1"
|
|||
|
||||
[[package]]
|
||||
name = "unicode-width"
|
||||
version = "0.1.5"
|
||||
version = "0.1.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "882386231c45df4700b275c7ff55b6f3698780a650026380e72dabe76fa46526"
|
||||
checksum = "7007dbd421b92cc6e28410fe7362e2e0a2503394908f417b68ec8d1c364c4e20"
|
||||
dependencies = [
|
||||
"compiler_builtins",
|
||||
"rustc-std-workspace-core",
|
||||
"rustc-std-workspace-std",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "unicode-xid"
|
||||
|
|
|
@ -68,6 +68,7 @@ rustc-workspace-hack = { path = 'src/tools/rustc-workspace-hack' }
|
|||
# here
|
||||
rustc-std-workspace-core = { path = 'src/tools/rustc-std-workspace-core' }
|
||||
rustc-std-workspace-alloc = { path = 'src/tools/rustc-std-workspace-alloc' }
|
||||
rustc-std-workspace-std = { path = 'src/tools/rustc-std-workspace-std' }
|
||||
|
||||
[patch."https://github.com/rust-lang/rust-clippy"]
|
||||
clippy_lints = { path = "src/tools/clippy/clippy_lints" }
|
||||
|
|
|
@ -382,11 +382,6 @@
|
|||
# This is the name of the directory in which codegen backends will get installed
|
||||
#codegen-backends-dir = "codegen-backends"
|
||||
|
||||
# Flag indicating whether `libstd` calls an imported function to handle basic IO
|
||||
# when targeting WebAssembly. Enable this to debug tests for the `wasm32-unknown-unknown`
|
||||
# target, as without this option the test output will not be captured.
|
||||
#wasm-syscall = false
|
||||
|
||||
# Indicates whether LLD will be compiled and made available in the sysroot for
|
||||
# rustc to execute.
|
||||
#lld = false
|
||||
|
|
|
@ -44,7 +44,7 @@ cc = "1.0.35"
|
|||
libc = "0.2"
|
||||
serde = { version = "1.0.8", features = ["derive"] }
|
||||
serde_json = "1.0.2"
|
||||
toml = "0.4"
|
||||
toml = "0.5"
|
||||
lazy_static = "1.3.0"
|
||||
time = "0.1"
|
||||
petgraph = "0.4.13"
|
||||
|
|
|
@ -5,9 +5,6 @@
|
|||
//! parent directory, and otherwise documentation can be found throughout the `build`
|
||||
//! directory in each respective module.
|
||||
|
||||
// NO-RUSTC-WRAPPER
|
||||
#![deny(warnings, rust_2018_idioms, unused_lifetimes)]
|
||||
|
||||
use std::env;
|
||||
|
||||
use bootstrap::{Config, Build};
|
||||
|
|
|
@ -15,9 +15,6 @@
|
|||
//! switching compilers for the bootstrap and for build scripts will probably
|
||||
//! never get replaced.
|
||||
|
||||
// NO-RUSTC-WRAPPER
|
||||
#![deny(warnings, rust_2018_idioms, unused_lifetimes)]
|
||||
|
||||
use std::env;
|
||||
use std::ffi::OsString;
|
||||
use std::io;
|
||||
|
@ -124,8 +121,9 @@ fn main() {
|
|||
|
||||
if env::var_os("RUSTC_DENY_WARNINGS").is_some() &&
|
||||
env::var_os("RUSTC_EXTERNAL_TOOL").is_none() {
|
||||
// When extending this list, search for `NO-RUSTC-WRAPPER` and add the new lints
|
||||
// there as well, some code doesn't go through this `rustc` wrapper.
|
||||
// When extending this list, add the new lints to the RUSTFLAGS of the
|
||||
// build_bootstrap function of src/bootstrap/bootstrap.py as well as
|
||||
// some code doesn't go through this `rustc` wrapper.
|
||||
cmd.arg("-Dwarnings");
|
||||
cmd.arg("-Drust_2018_idioms");
|
||||
cmd.arg("-Dunused_lifetimes");
|
||||
|
|
|
@ -2,12 +2,10 @@
|
|||
//!
|
||||
//! See comments in `src/bootstrap/rustc.rs` for more information.
|
||||
|
||||
// NO-RUSTC-WRAPPER
|
||||
#![deny(warnings, rust_2018_idioms, unused_lifetimes)]
|
||||
|
||||
use std::env;
|
||||
use std::process::Command;
|
||||
use std::path::PathBuf;
|
||||
use std::ffi::OsString;
|
||||
|
||||
fn main() {
|
||||
let args = env::args_os().skip(1).collect::<Vec<_>>();
|
||||
|
@ -47,7 +45,9 @@ fn main() {
|
|||
cmd.arg("-Z").arg("force-unstable-if-unmarked");
|
||||
}
|
||||
if let Some(linker) = env::var_os("RUSTC_TARGET_LINKER") {
|
||||
cmd.arg("--linker").arg(linker).arg("-Z").arg("unstable-options");
|
||||
let mut arg = OsString::from("-Clinker=");
|
||||
arg.push(&linker);
|
||||
cmd.arg(arg);
|
||||
}
|
||||
|
||||
// Bootstrap's Cargo-command builder sets this variable to the current Rust version; let's pick
|
||||
|
|
|
@ -320,7 +320,7 @@ class RustBuild(object):
|
|||
def __init__(self):
|
||||
self.cargo_channel = ''
|
||||
self.date = ''
|
||||
self._download_url = 'https://static.rust-lang.org'
|
||||
self._download_url = ''
|
||||
self.rustc_channel = ''
|
||||
self.build = ''
|
||||
self.build_dir = os.path.join(os.getcwd(), "build")
|
||||
|
@ -631,6 +631,8 @@ class RustBuild(object):
|
|||
target_linker = self.get_toml("linker", build_section)
|
||||
if target_linker is not None:
|
||||
env["RUSTFLAGS"] += "-C linker=" + target_linker + " "
|
||||
if self.get_toml("deny-warnings", "rust") != "false":
|
||||
env["RUSTFLAGS"] += "-Dwarnings -Drust_2018_idioms -Dunused_lifetimes "
|
||||
|
||||
env["PATH"] = os.path.join(self.bin_root(), "bin") + \
|
||||
os.pathsep + env["PATH"]
|
||||
|
@ -731,9 +733,19 @@ class RustBuild(object):
|
|||
self.update_submodule(module[0], module[1], recorded_submodules)
|
||||
print("Submodules updated in %.2f seconds" % (time() - start_time))
|
||||
|
||||
def set_normal_environment(self):
|
||||
"""Set download URL for normal environment"""
|
||||
if 'RUSTUP_DIST_SERVER' in os.environ:
|
||||
self._download_url = os.environ['RUSTUP_DIST_SERVER']
|
||||
else:
|
||||
self._download_url = 'https://static.rust-lang.org'
|
||||
|
||||
def set_dev_environment(self):
|
||||
"""Set download URL for development environment"""
|
||||
self._download_url = 'https://dev-static.rust-lang.org'
|
||||
if 'RUSTUP_DEV_DIST_SERVER' in os.environ:
|
||||
self._download_url = os.environ['RUSTUP_DEV_DIST_SERVER']
|
||||
else:
|
||||
self._download_url = 'https://dev-static.rust-lang.org'
|
||||
|
||||
def check_vendored_status(self):
|
||||
"""Check that vendoring is configured properly"""
|
||||
|
@ -826,6 +838,8 @@ def bootstrap(help_triggered):
|
|||
|
||||
if 'dev' in data:
|
||||
build.set_dev_environment()
|
||||
else:
|
||||
build.set_normal_environment()
|
||||
|
||||
build.update_submodules()
|
||||
|
||||
|
|
|
@ -337,7 +337,6 @@ impl<'a> Builder<'a> {
|
|||
match kind {
|
||||
Kind::Build => describe!(
|
||||
compile::Std,
|
||||
compile::Test,
|
||||
compile::Rustc,
|
||||
compile::CodegenBackend,
|
||||
compile::StartupObjects,
|
||||
|
@ -363,7 +362,6 @@ impl<'a> Builder<'a> {
|
|||
),
|
||||
Kind::Check | Kind::Clippy | Kind::Fix => describe!(
|
||||
check::Std,
|
||||
check::Test,
|
||||
check::Rustc,
|
||||
check::CodegenBackend,
|
||||
check::Rustdoc
|
||||
|
@ -425,8 +423,6 @@ impl<'a> Builder<'a> {
|
|||
doc::TheBook,
|
||||
doc::Standalone,
|
||||
doc::Std,
|
||||
doc::Test,
|
||||
doc::WhitelistedRustc,
|
||||
doc::Rustc,
|
||||
doc::Rustdoc,
|
||||
doc::ErrorIndex,
|
||||
|
@ -801,7 +797,7 @@ impl<'a> Builder<'a> {
|
|||
}
|
||||
|
||||
match mode {
|
||||
Mode::Std | Mode::Test | Mode::ToolBootstrap | Mode::ToolStd | Mode::ToolTest=> {},
|
||||
Mode::Std | Mode::ToolBootstrap | Mode::ToolStd => {},
|
||||
Mode::Rustc | Mode::Codegen | Mode::ToolRustc => {
|
||||
// Build proc macros both for the host and the target
|
||||
if target != compiler.host && cmd != "check" {
|
||||
|
@ -852,7 +848,6 @@ impl<'a> Builder<'a> {
|
|||
// things still build right, please do!
|
||||
match mode {
|
||||
Mode::Std => metadata.push_str("std"),
|
||||
Mode::Test => metadata.push_str("test"),
|
||||
_ => {},
|
||||
}
|
||||
cargo.env("__CARGO_DEFAULT_LIB_METADATA", &metadata);
|
||||
|
@ -875,8 +870,7 @@ impl<'a> Builder<'a> {
|
|||
}
|
||||
|
||||
if cmd == "clippy" {
|
||||
extra_args.push_str("-Zforce-unstable-if-unmarked -Zunstable-options \
|
||||
--json-rendered=termcolor");
|
||||
extra_args.push_str("-Zforce-unstable-if-unmarked");
|
||||
}
|
||||
|
||||
if !extra_args.is_empty() {
|
||||
|
@ -949,9 +943,9 @@ impl<'a> Builder<'a> {
|
|||
|
||||
let debuginfo_level = match mode {
|
||||
Mode::Rustc | Mode::Codegen => self.config.rust_debuginfo_level_rustc,
|
||||
Mode::Std | Mode::Test => self.config.rust_debuginfo_level_std,
|
||||
Mode::Std => self.config.rust_debuginfo_level_std,
|
||||
Mode::ToolBootstrap | Mode::ToolStd |
|
||||
Mode::ToolTest | Mode::ToolRustc => self.config.rust_debuginfo_level_tools,
|
||||
Mode::ToolRustc => self.config.rust_debuginfo_level_tools,
|
||||
};
|
||||
cargo.env("RUSTC_DEBUGINFO_LEVEL", debuginfo_level.to_string());
|
||||
|
||||
|
@ -1151,7 +1145,6 @@ impl<'a> Builder<'a> {
|
|||
|
||||
match (mode, self.config.rust_codegen_units_std, self.config.rust_codegen_units) {
|
||||
(Mode::Std, Some(n), _) |
|
||||
(Mode::Test, Some(n), _) |
|
||||
(_, _, Some(n)) => {
|
||||
cargo.env("RUSTC_CODEGEN_UNITS", n.to_string());
|
||||
}
|
||||
|
|
|
@ -365,27 +365,6 @@ fn dist_with_same_targets_and_hosts() {
|
|||
},
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
first(builder.cache.all::<compile::Test>()),
|
||||
&[
|
||||
compile::Test {
|
||||
compiler: Compiler { host: a, stage: 0 },
|
||||
target: a,
|
||||
},
|
||||
compile::Test {
|
||||
compiler: Compiler { host: a, stage: 1 },
|
||||
target: a,
|
||||
},
|
||||
compile::Test {
|
||||
compiler: Compiler { host: a, stage: 2 },
|
||||
target: a,
|
||||
},
|
||||
compile::Test {
|
||||
compiler: Compiler { host: a, stage: 1 },
|
||||
target: b,
|
||||
},
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
first(builder.cache.all::<compile::Assemble>()),
|
||||
&[
|
||||
|
@ -415,7 +394,47 @@ fn build_default() {
|
|||
let b = INTERNER.intern_str("B");
|
||||
let c = INTERNER.intern_str("C");
|
||||
|
||||
assert!(!builder.cache.all::<compile::Std>().is_empty());
|
||||
assert_eq!(
|
||||
first(builder.cache.all::<compile::Std>()),
|
||||
&[
|
||||
compile::Std {
|
||||
compiler: Compiler { host: a, stage: 0 },
|
||||
target: a,
|
||||
},
|
||||
compile::Std {
|
||||
compiler: Compiler { host: a, stage: 1 },
|
||||
target: a,
|
||||
},
|
||||
compile::Std {
|
||||
compiler: Compiler { host: a, stage: 2 },
|
||||
target: a,
|
||||
},
|
||||
compile::Std {
|
||||
compiler: Compiler { host: b, stage: 2 },
|
||||
target: a,
|
||||
},
|
||||
compile::Std {
|
||||
compiler: Compiler { host: a, stage: 1 },
|
||||
target: b,
|
||||
},
|
||||
compile::Std {
|
||||
compiler: Compiler { host: a, stage: 2 },
|
||||
target: b,
|
||||
},
|
||||
compile::Std {
|
||||
compiler: Compiler { host: b, stage: 2 },
|
||||
target: b,
|
||||
},
|
||||
compile::Std {
|
||||
compiler: Compiler { host: a, stage: 2 },
|
||||
target: c,
|
||||
},
|
||||
compile::Std {
|
||||
compiler: Compiler { host: b, stage: 2 },
|
||||
target: c,
|
||||
},
|
||||
]
|
||||
);
|
||||
assert!(!builder.cache.all::<compile::Assemble>().is_empty());
|
||||
assert_eq!(
|
||||
first(builder.cache.all::<compile::Rustc>()),
|
||||
|
@ -450,48 +469,6 @@ fn build_default() {
|
|||
},
|
||||
]
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
first(builder.cache.all::<compile::Test>()),
|
||||
&[
|
||||
compile::Test {
|
||||
compiler: Compiler { host: a, stage: 0 },
|
||||
target: a,
|
||||
},
|
||||
compile::Test {
|
||||
compiler: Compiler { host: a, stage: 1 },
|
||||
target: a,
|
||||
},
|
||||
compile::Test {
|
||||
compiler: Compiler { host: a, stage: 2 },
|
||||
target: a,
|
||||
},
|
||||
compile::Test {
|
||||
compiler: Compiler { host: b, stage: 2 },
|
||||
target: a,
|
||||
},
|
||||
compile::Test {
|
||||
compiler: Compiler { host: a, stage: 1 },
|
||||
target: b,
|
||||
},
|
||||
compile::Test {
|
||||
compiler: Compiler { host: a, stage: 2 },
|
||||
target: b,
|
||||
},
|
||||
compile::Test {
|
||||
compiler: Compiler { host: b, stage: 2 },
|
||||
target: b,
|
||||
},
|
||||
compile::Test {
|
||||
compiler: Compiler { host: a, stage: 2 },
|
||||
target: c,
|
||||
},
|
||||
compile::Test {
|
||||
compiler: Compiler { host: b, stage: 2 },
|
||||
target: c,
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -506,7 +483,47 @@ fn build_with_target_flag() {
|
|||
let b = INTERNER.intern_str("B");
|
||||
let c = INTERNER.intern_str("C");
|
||||
|
||||
assert!(!builder.cache.all::<compile::Std>().is_empty());
|
||||
assert_eq!(
|
||||
first(builder.cache.all::<compile::Std>()),
|
||||
&[
|
||||
compile::Std {
|
||||
compiler: Compiler { host: a, stage: 0 },
|
||||
target: a,
|
||||
},
|
||||
compile::Std {
|
||||
compiler: Compiler { host: a, stage: 1 },
|
||||
target: a,
|
||||
},
|
||||
compile::Std {
|
||||
compiler: Compiler { host: a, stage: 2 },
|
||||
target: a,
|
||||
},
|
||||
compile::Std {
|
||||
compiler: Compiler { host: b, stage: 2 },
|
||||
target: a,
|
||||
},
|
||||
compile::Std {
|
||||
compiler: Compiler { host: a, stage: 1 },
|
||||
target: b,
|
||||
},
|
||||
compile::Std {
|
||||
compiler: Compiler { host: a, stage: 2 },
|
||||
target: b,
|
||||
},
|
||||
compile::Std {
|
||||
compiler: Compiler { host: b, stage: 2 },
|
||||
target: b,
|
||||
},
|
||||
compile::Std {
|
||||
compiler: Compiler { host: a, stage: 2 },
|
||||
target: c,
|
||||
},
|
||||
compile::Std {
|
||||
compiler: Compiler { host: b, stage: 2 },
|
||||
target: c,
|
||||
},
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
first(builder.cache.all::<compile::Assemble>()),
|
||||
&[
|
||||
|
@ -541,48 +558,6 @@ fn build_with_target_flag() {
|
|||
},
|
||||
]
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
first(builder.cache.all::<compile::Test>()),
|
||||
&[
|
||||
compile::Test {
|
||||
compiler: Compiler { host: a, stage: 0 },
|
||||
target: a,
|
||||
},
|
||||
compile::Test {
|
||||
compiler: Compiler { host: a, stage: 1 },
|
||||
target: a,
|
||||
},
|
||||
compile::Test {
|
||||
compiler: Compiler { host: a, stage: 2 },
|
||||
target: a,
|
||||
},
|
||||
compile::Test {
|
||||
compiler: Compiler { host: b, stage: 2 },
|
||||
target: a,
|
||||
},
|
||||
compile::Test {
|
||||
compiler: Compiler { host: a, stage: 1 },
|
||||
target: b,
|
||||
},
|
||||
compile::Test {
|
||||
compiler: Compiler { host: a, stage: 2 },
|
||||
target: b,
|
||||
},
|
||||
compile::Test {
|
||||
compiler: Compiler { host: b, stage: 2 },
|
||||
target: b,
|
||||
},
|
||||
compile::Test {
|
||||
compiler: Compiler { host: a, stage: 2 },
|
||||
target: c,
|
||||
},
|
||||
compile::Test {
|
||||
compiler: Compiler { host: b, stage: 2 },
|
||||
target: c,
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//! Implementation of compiling the compiler and standard library, in "check"-based modes.
|
||||
|
||||
use crate::compile::{run_cargo, std_cargo, test_cargo, rustc_cargo, rustc_cargo_env,
|
||||
use crate::compile::{run_cargo, std_cargo, rustc_cargo, rustc_cargo_env,
|
||||
add_to_sysroot};
|
||||
use crate::builder::{RunConfig, Builder, Kind, ShouldRun, Step};
|
||||
use crate::tool::{prepare_tool_cargo, SourceType};
|
||||
|
@ -34,7 +34,7 @@ impl Step for Std {
|
|||
const DEFAULT: bool = true;
|
||||
|
||||
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
|
||||
run.all_krates("std")
|
||||
run.all_krates("test")
|
||||
}
|
||||
|
||||
fn make_run(run: RunConfig<'_>) {
|
||||
|
@ -92,7 +92,7 @@ impl Step for Rustc {
|
|||
let compiler = builder.compiler(0, builder.config.build);
|
||||
let target = self.target;
|
||||
|
||||
builder.ensure(Test { target });
|
||||
builder.ensure(Std { target });
|
||||
|
||||
let mut cargo = builder.cargo(compiler, Mode::Rustc, target,
|
||||
cargo_subcommand(builder.kind));
|
||||
|
@ -159,47 +159,6 @@ impl Step for CodegenBackend {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct Test {
|
||||
pub target: Interned<String>,
|
||||
}
|
||||
|
||||
impl Step for Test {
|
||||
type Output = ();
|
||||
const DEFAULT: bool = true;
|
||||
|
||||
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
|
||||
run.all_krates("test")
|
||||
}
|
||||
|
||||
fn make_run(run: RunConfig<'_>) {
|
||||
run.builder.ensure(Test {
|
||||
target: run.target,
|
||||
});
|
||||
}
|
||||
|
||||
fn run(self, builder: &Builder<'_>) {
|
||||
let compiler = builder.compiler(0, builder.config.build);
|
||||
let target = self.target;
|
||||
|
||||
builder.ensure(Std { target });
|
||||
|
||||
let mut cargo = builder.cargo(compiler, Mode::Test, target, cargo_subcommand(builder.kind));
|
||||
test_cargo(builder, &compiler, target, &mut cargo);
|
||||
|
||||
builder.info(&format!("Checking test artifacts ({} -> {})", &compiler.host, target));
|
||||
run_cargo(builder,
|
||||
&mut cargo,
|
||||
args(builder.kind),
|
||||
&libtest_stamp(builder, compiler, target),
|
||||
true);
|
||||
|
||||
let libdir = builder.sysroot_libdir(compiler, target);
|
||||
let hostdir = builder.sysroot_libdir(compiler, compiler.host);
|
||||
add_to_sysroot(builder, &libdir, &hostdir, &libtest_stamp(builder, compiler, target));
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct Rustdoc {
|
||||
pub target: Interned<String>,
|
||||
|
@ -258,16 +217,6 @@ pub fn libstd_stamp(
|
|||
builder.cargo_out(compiler, Mode::Std, target).join(".libstd-check.stamp")
|
||||
}
|
||||
|
||||
/// Cargo's output path for libtest in a given stage, compiled by a particular
|
||||
/// compiler for the specified target.
|
||||
pub fn libtest_stamp(
|
||||
builder: &Builder<'_>,
|
||||
compiler: Compiler,
|
||||
target: Interned<String>,
|
||||
) -> PathBuf {
|
||||
builder.cargo_out(compiler, Mode::Test, target).join(".libtest-check.stamp")
|
||||
}
|
||||
|
||||
/// Cargo's output path for librustc in a given stage, compiled by a particular
|
||||
/// compiler for the specified target.
|
||||
pub fn librustc_stamp(
|
||||
|
|
|
@ -39,7 +39,7 @@ impl Step for Std {
|
|||
const DEFAULT: bool = true;
|
||||
|
||||
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
|
||||
run.all_krates("std")
|
||||
run.all_krates("test")
|
||||
}
|
||||
|
||||
fn make_run(run: RunConfig<'_>) {
|
||||
|
@ -216,7 +216,7 @@ pub fn std_cargo(builder: &Builder<'_>,
|
|||
|
||||
cargo.arg("--features").arg(features)
|
||||
.arg("--manifest-path")
|
||||
.arg(builder.src.join("src/libstd/Cargo.toml"));
|
||||
.arg(builder.src.join("src/libtest/Cargo.toml"));
|
||||
|
||||
if target.contains("musl") {
|
||||
if let Some(p) = builder.musl_root(target) {
|
||||
|
@ -358,129 +358,6 @@ impl Step for StartupObjects {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialOrd, Ord, Copy, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct Test {
|
||||
pub target: Interned<String>,
|
||||
pub compiler: Compiler,
|
||||
}
|
||||
|
||||
impl Step for Test {
|
||||
type Output = ();
|
||||
const DEFAULT: bool = true;
|
||||
|
||||
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
|
||||
run.all_krates("test")
|
||||
}
|
||||
|
||||
fn make_run(run: RunConfig<'_>) {
|
||||
run.builder.ensure(Test {
|
||||
compiler: run.builder.compiler(run.builder.top_stage, run.host),
|
||||
target: run.target,
|
||||
});
|
||||
}
|
||||
|
||||
/// Builds libtest.
|
||||
///
|
||||
/// This will build libtest and supporting libraries for a particular stage of
|
||||
/// the build using the `compiler` targeting the `target` architecture. The
|
||||
/// artifacts created will also be linked into the sysroot directory.
|
||||
fn run(self, builder: &Builder<'_>) {
|
||||
let target = self.target;
|
||||
let compiler = self.compiler;
|
||||
|
||||
builder.ensure(Std { compiler, target });
|
||||
|
||||
if builder.config.keep_stage.contains(&compiler.stage) {
|
||||
builder.info("Warning: Using a potentially old libtest. This may not behave well.");
|
||||
builder.ensure(TestLink {
|
||||
compiler,
|
||||
target_compiler: compiler,
|
||||
target,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
let compiler_to_use = builder.compiler_for(compiler.stage, compiler.host, target);
|
||||
if compiler_to_use != compiler {
|
||||
builder.ensure(Test {
|
||||
compiler: compiler_to_use,
|
||||
target,
|
||||
});
|
||||
builder.info(
|
||||
&format!("Uplifting stage1 test ({} -> {})", builder.config.build, target));
|
||||
builder.ensure(TestLink {
|
||||
compiler: compiler_to_use,
|
||||
target_compiler: compiler,
|
||||
target,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
let mut cargo = builder.cargo(compiler, Mode::Test, target, "build");
|
||||
test_cargo(builder, &compiler, target, &mut cargo);
|
||||
|
||||
builder.info(&format!("Building stage{} test artifacts ({} -> {})", compiler.stage,
|
||||
&compiler.host, target));
|
||||
run_cargo(builder,
|
||||
&mut cargo,
|
||||
vec![],
|
||||
&libtest_stamp(builder, compiler, target),
|
||||
false);
|
||||
|
||||
builder.ensure(TestLink {
|
||||
compiler: builder.compiler(compiler.stage, builder.config.build),
|
||||
target_compiler: compiler,
|
||||
target,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/// Same as `std_cargo`, but for libtest
|
||||
pub fn test_cargo(builder: &Builder<'_>,
|
||||
_compiler: &Compiler,
|
||||
_target: Interned<String>,
|
||||
cargo: &mut Command) {
|
||||
if let Some(target) = env::var_os("MACOSX_STD_DEPLOYMENT_TARGET") {
|
||||
cargo.env("MACOSX_DEPLOYMENT_TARGET", target);
|
||||
}
|
||||
cargo.arg("--manifest-path")
|
||||
.arg(builder.src.join("src/libtest/Cargo.toml"));
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct TestLink {
|
||||
pub compiler: Compiler,
|
||||
pub target_compiler: Compiler,
|
||||
pub target: Interned<String>,
|
||||
}
|
||||
|
||||
impl Step for TestLink {
|
||||
type Output = ();
|
||||
|
||||
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
|
||||
run.never()
|
||||
}
|
||||
|
||||
/// Same as `std_link`, only for libtest
|
||||
fn run(self, builder: &Builder<'_>) {
|
||||
let compiler = self.compiler;
|
||||
let target_compiler = self.target_compiler;
|
||||
let target = self.target;
|
||||
builder.info(&format!("Copying stage{} test from stage{} ({} -> {} / {})",
|
||||
target_compiler.stage,
|
||||
compiler.stage,
|
||||
&compiler.host,
|
||||
target_compiler.host,
|
||||
target));
|
||||
add_to_sysroot(
|
||||
builder,
|
||||
&builder.sysroot_libdir(target_compiler, target),
|
||||
&builder.sysroot_libdir(target_compiler, compiler.host),
|
||||
&libtest_stamp(builder, compiler, target)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialOrd, Ord, Copy, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct Rustc {
|
||||
pub target: Interned<String>,
|
||||
|
@ -512,7 +389,7 @@ impl Step for Rustc {
|
|||
let compiler = self.compiler;
|
||||
let target = self.target;
|
||||
|
||||
builder.ensure(Test { compiler, target });
|
||||
builder.ensure(Std { compiler, target });
|
||||
|
||||
if builder.config.keep_stage.contains(&compiler.stage) {
|
||||
builder.info("Warning: Using a potentially old librustc. This may not behave well.");
|
||||
|
@ -541,7 +418,7 @@ impl Step for Rustc {
|
|||
}
|
||||
|
||||
// Ensure that build scripts and proc macros have a std / libproc_macro to link against.
|
||||
builder.ensure(Test {
|
||||
builder.ensure(Std {
|
||||
compiler: builder.compiler(self.compiler.stage, builder.config.build),
|
||||
target: builder.config.build,
|
||||
});
|
||||
|
@ -872,16 +749,6 @@ pub fn libstd_stamp(
|
|||
builder.cargo_out(compiler, Mode::Std, target).join(".libstd.stamp")
|
||||
}
|
||||
|
||||
/// Cargo's output path for libtest in a given stage, compiled by a particular
|
||||
/// compiler for the specified target.
|
||||
pub fn libtest_stamp(
|
||||
builder: &Builder<'_>,
|
||||
compiler: Compiler,
|
||||
target: Interned<String>,
|
||||
) -> PathBuf {
|
||||
builder.cargo_out(compiler, Mode::Test, target).join(".libtest.stamp")
|
||||
}
|
||||
|
||||
/// Cargo's output path for librustc in a given stage, compiled by a particular
|
||||
/// compiler for the specified target.
|
||||
pub fn librustc_stamp(
|
||||
|
|
|
@ -122,7 +122,6 @@ pub struct Config {
|
|||
|
||||
// libstd features
|
||||
pub backtrace: bool, // support for RUST_BACKTRACE
|
||||
pub wasm_syscall: bool,
|
||||
|
||||
// misc
|
||||
pub low_priority: bool,
|
||||
|
@ -318,7 +317,6 @@ struct Rust {
|
|||
save_toolstates: Option<String>,
|
||||
codegen_backends: Option<Vec<String>>,
|
||||
codegen_backends_dir: Option<String>,
|
||||
wasm_syscall: Option<bool>,
|
||||
lld: Option<bool>,
|
||||
lldb: Option<bool>,
|
||||
llvm_tools: Option<bool>,
|
||||
|
@ -558,7 +556,6 @@ impl Config {
|
|||
if let Some(true) = rust.incremental {
|
||||
config.incremental = true;
|
||||
}
|
||||
set(&mut config.wasm_syscall, rust.wasm_syscall);
|
||||
set(&mut config.lld_enabled, rust.lld);
|
||||
set(&mut config.lldb_enabled, rust.lldb);
|
||||
set(&mut config.llvm_tools_enabled, rust.llvm_tools);
|
||||
|
|
|
@ -678,12 +678,7 @@ impl Step for Std {
|
|||
if builder.hosts.iter().any(|t| t == target) {
|
||||
builder.ensure(compile::Rustc { compiler, target });
|
||||
} else {
|
||||
if builder.no_std(target) == Some(true) {
|
||||
// the `test` doesn't compile for no-std targets
|
||||
builder.ensure(compile::Std { compiler, target });
|
||||
} else {
|
||||
builder.ensure(compile::Test { compiler, target });
|
||||
}
|
||||
builder.ensure(compile::Std { compiler, target });
|
||||
}
|
||||
|
||||
let image = tmpdir(builder).join(format!("{}-{}-image", name, target));
|
||||
|
@ -912,6 +907,7 @@ impl Step for Src {
|
|||
"src/libproc_macro",
|
||||
"src/tools/rustc-std-workspace-core",
|
||||
"src/tools/rustc-std-workspace-alloc",
|
||||
"src/tools/rustc-std-workspace-std",
|
||||
"src/librustc",
|
||||
"src/libsyntax",
|
||||
];
|
||||
|
|
|
@ -375,7 +375,7 @@ impl Step for Standalone {
|
|||
up_to_date(&footer, &html) &&
|
||||
up_to_date(&favicon, &html) &&
|
||||
up_to_date(&full_toc, &html) &&
|
||||
up_to_date(&version_info, &html) &&
|
||||
(builder.config.dry_run || up_to_date(&version_info, &html)) &&
|
||||
(builder.config.dry_run || up_to_date(&rustdoc, &html)) {
|
||||
continue
|
||||
}
|
||||
|
@ -413,7 +413,7 @@ impl Step for Std {
|
|||
|
||||
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
|
||||
let builder = run.builder;
|
||||
run.all_krates("std").default_condition(builder.config.docs)
|
||||
run.all_krates("test").default_condition(builder.config.docs)
|
||||
}
|
||||
|
||||
fn make_run(run: RunConfig<'_>) {
|
||||
|
@ -478,138 +478,12 @@ impl Step for Std {
|
|||
builder.run(&mut cargo);
|
||||
builder.cp_r(&my_out, &out);
|
||||
};
|
||||
for krate in &["alloc", "core", "std"] {
|
||||
for krate in &["alloc", "core", "std", "proc_macro", "test"] {
|
||||
run_cargo_rustdoc_for(krate);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
|
||||
pub struct Test {
|
||||
stage: u32,
|
||||
target: Interned<String>,
|
||||
}
|
||||
|
||||
impl Step for Test {
|
||||
type Output = ();
|
||||
const DEFAULT: bool = true;
|
||||
|
||||
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
|
||||
let builder = run.builder;
|
||||
run.krate("test").default_condition(builder.config.docs)
|
||||
}
|
||||
|
||||
fn make_run(run: RunConfig<'_>) {
|
||||
run.builder.ensure(Test {
|
||||
stage: run.builder.top_stage,
|
||||
target: run.target,
|
||||
});
|
||||
}
|
||||
|
||||
/// Compile all libtest documentation.
|
||||
///
|
||||
/// This will generate all documentation for libtest and its dependencies. This
|
||||
/// is largely just a wrapper around `cargo doc`.
|
||||
fn run(self, builder: &Builder<'_>) {
|
||||
let stage = self.stage;
|
||||
let target = self.target;
|
||||
builder.info(&format!("Documenting stage{} test ({})", stage, target));
|
||||
let out = builder.doc_out(target);
|
||||
t!(fs::create_dir_all(&out));
|
||||
let compiler = builder.compiler_for(stage, builder.config.build, target);
|
||||
|
||||
// Build libstd docs so that we generate relative links
|
||||
builder.ensure(Std { stage, target });
|
||||
|
||||
builder.ensure(compile::Test { compiler, target });
|
||||
let out_dir = builder.stage_out(compiler, Mode::Test)
|
||||
.join(target).join("doc");
|
||||
|
||||
// See docs in std above for why we symlink
|
||||
let my_out = builder.crate_doc_out(target);
|
||||
t!(symlink_dir_force(&builder.config, &my_out, &out_dir));
|
||||
|
||||
let mut cargo = builder.cargo(compiler, Mode::Test, target, "doc");
|
||||
compile::test_cargo(builder, &compiler, target, &mut cargo);
|
||||
|
||||
cargo.arg("--no-deps")
|
||||
.arg("-p").arg("test")
|
||||
.env("RUSTDOC_RESOURCE_SUFFIX", crate::channel::CFG_RELEASE_NUM)
|
||||
.env("RUSTDOC_GENERATE_REDIRECT_PAGES", "1");
|
||||
|
||||
builder.run(&mut cargo);
|
||||
builder.cp_r(&my_out, &out);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
|
||||
pub struct WhitelistedRustc {
|
||||
stage: u32,
|
||||
target: Interned<String>,
|
||||
}
|
||||
|
||||
impl Step for WhitelistedRustc {
|
||||
type Output = ();
|
||||
const DEFAULT: bool = true;
|
||||
const ONLY_HOSTS: bool = true;
|
||||
|
||||
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
|
||||
let builder = run.builder;
|
||||
run.krate("rustc-main").default_condition(builder.config.docs)
|
||||
}
|
||||
|
||||
fn make_run(run: RunConfig<'_>) {
|
||||
run.builder.ensure(WhitelistedRustc {
|
||||
stage: run.builder.top_stage,
|
||||
target: run.target,
|
||||
});
|
||||
}
|
||||
|
||||
/// Generates whitelisted compiler crate documentation.
|
||||
///
|
||||
/// This will generate all documentation for crates that are whitelisted
|
||||
/// to be included in the standard documentation. This documentation is
|
||||
/// included in the standard Rust documentation, so we should always
|
||||
/// document it and symlink to merge with the rest of the std and test
|
||||
/// documentation. We don't build other compiler documentation
|
||||
/// here as we want to be able to keep it separate from the standard
|
||||
/// documentation. This is largely just a wrapper around `cargo doc`.
|
||||
fn run(self, builder: &Builder<'_>) {
|
||||
let stage = self.stage;
|
||||
let target = self.target;
|
||||
builder.info(&format!("Documenting stage{} whitelisted compiler ({})", stage, target));
|
||||
let out = builder.doc_out(target);
|
||||
t!(fs::create_dir_all(&out));
|
||||
let compiler = builder.compiler_for(stage, builder.config.build, target);
|
||||
|
||||
// Build libstd docs so that we generate relative links
|
||||
builder.ensure(Std { stage, target });
|
||||
|
||||
builder.ensure(compile::Rustc { compiler, target });
|
||||
let out_dir = builder.stage_out(compiler, Mode::Rustc)
|
||||
.join(target).join("doc");
|
||||
|
||||
// See docs in std above for why we symlink
|
||||
let my_out = builder.crate_doc_out(target);
|
||||
t!(symlink_dir_force(&builder.config, &my_out, &out_dir));
|
||||
|
||||
let mut cargo = builder.cargo(compiler, Mode::Rustc, target, "doc");
|
||||
compile::rustc_cargo(builder, &mut cargo);
|
||||
|
||||
// We don't want to build docs for internal compiler dependencies in this
|
||||
// step (there is another step for that). Therefore, we whitelist the crates
|
||||
// for which docs must be built.
|
||||
for krate in &["proc_macro"] {
|
||||
cargo.arg("-p").arg(krate)
|
||||
.env("RUSTDOC_RESOURCE_SUFFIX", crate::channel::CFG_RELEASE_NUM)
|
||||
.env("RUSTDOC_GENERATE_REDIRECT_PAGES", "1");
|
||||
}
|
||||
|
||||
builder.run(&mut cargo);
|
||||
builder.cp_r(&my_out, &out);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
|
||||
pub struct Rustc {
|
||||
stage: u32,
|
||||
|
@ -825,8 +699,7 @@ impl Step for ErrorIndex {
|
|||
index.arg(crate::channel::CFG_RELEASE_NUM);
|
||||
|
||||
// FIXME: shouldn't have to pass this env var
|
||||
index.env("CFG_BUILD", &builder.config.build)
|
||||
.env("RUSTC_ERROR_METADATA_DST", builder.extended_error_dir());
|
||||
index.env("CFG_BUILD", &builder.config.build);
|
||||
|
||||
builder.run(&mut index);
|
||||
}
|
||||
|
|
|
@ -103,9 +103,6 @@
|
|||
//! More documentation can be found in each respective module below, and you can
|
||||
//! also check out the `src/bootstrap/README.md` file for more information.
|
||||
|
||||
// NO-RUSTC-WRAPPER
|
||||
#![deny(warnings, rust_2018_idioms, unused_lifetimes)]
|
||||
|
||||
#![feature(core_intrinsics)]
|
||||
#![feature(drain_filter)]
|
||||
|
||||
|
@ -297,9 +294,6 @@ pub enum Mode {
|
|||
/// Build the standard library, placing output in the "stageN-std" directory.
|
||||
Std,
|
||||
|
||||
/// Build libtest, placing output in the "stageN-test" directory.
|
||||
Test,
|
||||
|
||||
/// Build librustc, and compiler libraries, placing output in the "stageN-rustc" directory.
|
||||
Rustc,
|
||||
|
||||
|
@ -315,7 +309,6 @@ pub enum Mode {
|
|||
/// Compile a tool which uses all libraries we compile (up to rustc).
|
||||
/// Doesn't use the stage0 compiler libraries like "other", and includes
|
||||
/// tools like rustdoc, cargo, rls, etc.
|
||||
ToolTest,
|
||||
ToolStd,
|
||||
ToolRustc,
|
||||
}
|
||||
|
@ -502,9 +495,6 @@ impl Build {
|
|||
if self.config.profiler {
|
||||
features.push_str(" profiler");
|
||||
}
|
||||
if self.config.wasm_syscall {
|
||||
features.push_str(" wasm_syscall");
|
||||
}
|
||||
features
|
||||
}
|
||||
|
||||
|
@ -536,11 +526,10 @@ impl Build {
|
|||
fn stage_out(&self, compiler: Compiler, mode: Mode) -> PathBuf {
|
||||
let suffix = match mode {
|
||||
Mode::Std => "-std",
|
||||
Mode::Test => "-test",
|
||||
Mode::Rustc => "-rustc",
|
||||
Mode::Codegen => "-codegen",
|
||||
Mode::ToolBootstrap => "-bootstrap-tools",
|
||||
Mode::ToolStd | Mode::ToolTest | Mode::ToolRustc => "-tools",
|
||||
Mode::ToolStd | Mode::ToolRustc => "-tools",
|
||||
};
|
||||
self.out.join(&*compiler.host)
|
||||
.join(format!("stage{}{}", compiler.stage, suffix))
|
||||
|
|
|
@ -1040,21 +1040,10 @@ impl Step for Compiletest {
|
|||
builder.ensure(compile::Rustc { compiler, target });
|
||||
}
|
||||
|
||||
if builder.no_std(target) == Some(true) {
|
||||
// the `test` doesn't compile for no-std targets
|
||||
builder.ensure(compile::Std { compiler, target });
|
||||
} else {
|
||||
builder.ensure(compile::Test { compiler, target });
|
||||
}
|
||||
builder.ensure(compile::Std { compiler, target });
|
||||
// ensure that `libproc_macro` is available on the host.
|
||||
builder.ensure(compile::Std { compiler, target: compiler.host });
|
||||
|
||||
if builder.no_std(target) == Some(true) {
|
||||
// for no_std run-make (e.g., thumb*),
|
||||
// we need a host compiler which is called by cargo.
|
||||
builder.ensure(compile::Std { compiler, target: compiler.host });
|
||||
}
|
||||
|
||||
// HACK(eddyb) ensure that `libproc_macro` is available on the host.
|
||||
builder.ensure(compile::Test { compiler, target: compiler.host });
|
||||
// Also provide `rust_test_helpers` for the host.
|
||||
builder.ensure(native::TestHelpers { target: compiler.host });
|
||||
|
||||
|
@ -1399,7 +1388,7 @@ impl Step for DocTest {
|
|||
fn run(self, builder: &Builder<'_>) {
|
||||
let compiler = self.compiler;
|
||||
|
||||
builder.ensure(compile::Test {
|
||||
builder.ensure(compile::Std {
|
||||
compiler,
|
||||
target: compiler.host,
|
||||
});
|
||||
|
@ -1535,8 +1524,7 @@ impl Step for ErrorIndex {
|
|||
);
|
||||
tool.arg("markdown")
|
||||
.arg(&output)
|
||||
.env("CFG_BUILD", &builder.config.build)
|
||||
.env("RUSTC_ERROR_METADATA_DST", builder.extended_error_dir());
|
||||
.env("CFG_BUILD", &builder.config.build);
|
||||
|
||||
builder.info(&format!("Testing error-index stage{}", compiler.stage));
|
||||
let _time = util::timeit(&builder);
|
||||
|
@ -1710,8 +1698,7 @@ impl Step for Crate {
|
|||
|
||||
fn should_run(mut run: ShouldRun<'_>) -> ShouldRun<'_> {
|
||||
let builder = run.builder;
|
||||
run = run.krate("test");
|
||||
for krate in run.builder.in_tree_crates("std") {
|
||||
for krate in run.builder.in_tree_crates("test") {
|
||||
if !(krate.name.starts_with("rustc_") && krate.name.ends_with("san")) {
|
||||
run = run.path(krate.local_path(&builder).to_str().unwrap());
|
||||
}
|
||||
|
@ -1735,14 +1722,9 @@ impl Step for Crate {
|
|||
});
|
||||
};
|
||||
|
||||
for krate in builder.in_tree_crates("std") {
|
||||
if run.path.ends_with(&krate.local_path(&builder)) {
|
||||
make(Mode::Std, krate);
|
||||
}
|
||||
}
|
||||
for krate in builder.in_tree_crates("test") {
|
||||
if run.path.ends_with(&krate.local_path(&builder)) {
|
||||
make(Mode::Test, krate);
|
||||
make(Mode::Std, krate);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1762,7 +1744,7 @@ impl Step for Crate {
|
|||
let test_kind = self.test_kind;
|
||||
let krate = self.krate;
|
||||
|
||||
builder.ensure(compile::Test { compiler, target });
|
||||
builder.ensure(compile::Std { compiler, target });
|
||||
builder.ensure(RemoteCopyLibs { compiler, target });
|
||||
|
||||
// If we're not doing a full bootstrap but we're testing a stage2
|
||||
|
@ -1776,9 +1758,6 @@ impl Step for Crate {
|
|||
Mode::Std => {
|
||||
compile::std_cargo(builder, &compiler, target, &mut cargo);
|
||||
}
|
||||
Mode::Test => {
|
||||
compile::test_cargo(builder, &compiler, target, &mut cargo);
|
||||
}
|
||||
Mode::Rustc => {
|
||||
builder.ensure(compile::Rustc { compiler, target });
|
||||
compile::rustc_cargo(builder, &mut cargo);
|
||||
|
@ -1832,16 +1811,6 @@ impl Step for Crate {
|
|||
.expect("nodejs not configured"),
|
||||
);
|
||||
} else if target.starts_with("wasm32") {
|
||||
// Warn about running tests without the `wasm_syscall` feature enabled.
|
||||
// The javascript shim implements the syscall interface so that test
|
||||
// output can be correctly reported.
|
||||
if !builder.config.wasm_syscall {
|
||||
builder.info(
|
||||
"Libstd was built without `wasm_syscall` feature enabled: \
|
||||
test output may not be visible."
|
||||
);
|
||||
}
|
||||
|
||||
// On the wasm32-unknown-unknown target we're using LTO which is
|
||||
// incompatible with `-C prefer-dynamic`, so disable that here
|
||||
cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
|
||||
|
@ -1980,7 +1949,7 @@ impl Step for RemoteCopyLibs {
|
|||
return;
|
||||
}
|
||||
|
||||
builder.ensure(compile::Test { compiler, target });
|
||||
builder.ensure(compile::Std { compiler, target });
|
||||
|
||||
builder.info(&format!("REMOTE copy libs to emulator ({})", target));
|
||||
t!(fs::create_dir_all(builder.out.join("tmp")));
|
||||
|
|
|
@ -577,12 +577,6 @@ impl Step for Cargo {
|
|||
}
|
||||
|
||||
fn run(self, builder: &Builder<'_>) -> PathBuf {
|
||||
// Cargo depends on procedural macros, so make sure the host
|
||||
// libstd/libproc_macro is available.
|
||||
builder.ensure(compile::Test {
|
||||
compiler: self.compiler,
|
||||
target: builder.config.build,
|
||||
});
|
||||
builder.ensure(ToolBuild {
|
||||
compiler: self.compiler,
|
||||
target: self.target,
|
||||
|
@ -650,31 +644,10 @@ macro_rules! tool_extended {
|
|||
|
||||
tool_extended!((self, builder),
|
||||
Cargofmt, rustfmt, "src/tools/rustfmt", "cargo-fmt", {};
|
||||
CargoClippy, clippy, "src/tools/clippy", "cargo-clippy", {
|
||||
// Clippy depends on procedural macros, so make sure that's built for
|
||||
// the compiler itself.
|
||||
builder.ensure(compile::Test {
|
||||
compiler: self.compiler,
|
||||
target: builder.config.build,
|
||||
});
|
||||
};
|
||||
Clippy, clippy, "src/tools/clippy", "clippy-driver", {
|
||||
// Clippy depends on procedural macros, so make sure that's built for
|
||||
// the compiler itself.
|
||||
builder.ensure(compile::Test {
|
||||
compiler: self.compiler,
|
||||
target: builder.config.build,
|
||||
});
|
||||
};
|
||||
CargoClippy, clippy, "src/tools/clippy", "cargo-clippy", {};
|
||||
Clippy, clippy, "src/tools/clippy", "clippy-driver", {};
|
||||
Miri, miri, "src/tools/miri", "miri", {};
|
||||
CargoMiri, miri, "src/tools/miri", "cargo-miri", {
|
||||
// Miri depends on procedural macros, so make sure that's built for
|
||||
// the compiler itself.
|
||||
builder.ensure(compile::Test {
|
||||
compiler: self.compiler,
|
||||
target: builder.config.build,
|
||||
});
|
||||
};
|
||||
CargoMiri, miri, "src/tools/miri", "cargo-miri", {};
|
||||
Rls, rls, "src/tools/rls", "rls", {
|
||||
let clippy = builder.ensure(Clippy {
|
||||
compiler: self.compiler,
|
||||
|
@ -684,12 +657,6 @@ tool_extended!((self, builder),
|
|||
if clippy.is_some() {
|
||||
self.extra_features.push("clippy".to_owned());
|
||||
}
|
||||
// RLS depends on procedural macros, so make sure that's built for
|
||||
// the compiler itself.
|
||||
builder.ensure(compile::Test {
|
||||
compiler: self.compiler,
|
||||
target: builder.config.build,
|
||||
});
|
||||
};
|
||||
Rustfmt, rustfmt, "src/tools/rustfmt", "rustfmt", {};
|
||||
);
|
||||
|
|
|
@ -1,6 +1,3 @@
|
|||
// NO-RUSTC-WRAPPER
|
||||
#![deny(warnings, rust_2018_idioms, unused_lifetimes)]
|
||||
|
||||
use std::fs::File;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::{Command, Stdio};
|
||||
|
@ -262,7 +259,7 @@ pub fn native_lib_boilerplate(
|
|||
if !up_to_date(Path::new("build.rs"), ×tamp) || !up_to_date(src_dir, ×tamp) {
|
||||
Ok(NativeLibBoilerplate {
|
||||
src_dir: src_dir.to_path_buf(),
|
||||
out_dir: out_dir,
|
||||
out_dir,
|
||||
})
|
||||
} else {
|
||||
Err(())
|
||||
|
|
|
@ -104,9 +104,7 @@ ENV TARGETS=$TARGETS,armv5te-unknown-linux-musleabi
|
|||
ENV TARGETS=$TARGETS,armv7-unknown-linux-musleabihf
|
||||
ENV TARGETS=$TARGETS,aarch64-unknown-linux-musl
|
||||
ENV TARGETS=$TARGETS,sparc64-unknown-linux-gnu
|
||||
# FIXME: temporarily disable the redox builder,
|
||||
# see: https://github.com/rust-lang/rust/issues/63160
|
||||
# ENV TARGETS=$TARGETS,x86_64-unknown-redox
|
||||
ENV TARGETS=$TARGETS,x86_64-unknown-redox
|
||||
ENV TARGETS=$TARGETS,thumbv6m-none-eabi
|
||||
ENV TARGETS=$TARGETS,thumbv7m-none-eabi
|
||||
ENV TARGETS=$TARGETS,thumbv7em-none-eabi
|
||||
|
@ -132,7 +130,7 @@ ENV CC_mipsel_unknown_linux_musl=mipsel-openwrt-linux-gcc \
|
|||
CC_thumbv7neon_unknown_linux_gnueabihf=arm-linux-gnueabihf-gcc \
|
||||
AR_thumbv7neon_unknown_linux_gnueabihf=arm-linux-gnueabihf-ar \
|
||||
CXX_thumbv7neon_unknown_linux_gnueabihf=arm-linux-gnueabihf-g++
|
||||
|
||||
|
||||
ENV RUST_CONFIGURE_ARGS \
|
||||
--musl-root-armv5te=/musl-armv5te \
|
||||
--musl-root-arm=/musl-arm \
|
||||
|
|
|
@ -5,7 +5,7 @@ mkdir /usr/local/mipsel-linux-musl
|
|||
# Note that this originally came from:
|
||||
# https://downloads.openwrt.org/snapshots/trunk/malta/generic/
|
||||
# OpenWrt-Toolchain-malta-le_gcc-5.3.0_musl-1.1.15.Linux-x86_64.tar.bz2
|
||||
URL="https://rust-lang-ci2.s3.amazonaws.com/libc"
|
||||
URL="https://rust-lang-ci-mirrors.s3-us-west-1.amazonaws.com/rustc"
|
||||
FILE="OpenWrt-Toolchain-malta-le_gcc-5.3.0_musl-1.1.15.Linux-x86_64.tar.bz2"
|
||||
curl -L "$URL/$FILE" | tar xjf - -C /usr/local/mipsel-linux-musl --strip-components=2
|
||||
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit c5da1e11915d3f28266168baaf55822f7e3fe999
|
||||
Subproject commit 432ca26686c11d396eed6a59499f93ce1bf2433c
|
|
@ -1 +1 @@
|
|||
Subproject commit 8a7d05615e5bc0a7fb961b4919c44f5221ee54da
|
||||
Subproject commit 38b9a76bc8b59ac862663807fc51c9b757337fd6
|
|
@ -1 +1 @@
|
|||
Subproject commit b4b3536839042a6743fc76f0d9ad2a812020aeaa
|
||||
Subproject commit d191a0cdd3b92648e0f1e53b13140a14677cc65b
|
|
@ -1 +1 @@
|
|||
Subproject commit f2c15ba5ee89ae9469a2cf60494977749901d764
|
||||
Subproject commit 580839d90aacd537f0293697096fa8355bc4e673
|
|
@ -1 +1 @@
|
|||
Subproject commit 6f4ba673ff9d4613e98415bc095347a6a0031e9c
|
||||
Subproject commit 6e25a3d0d3573eb42b2e2339f1219e969d1b3dee
|
|
@ -304,3 +304,10 @@ to customize the output:
|
|||
|
||||
Note that it is invalid to combine the `--json` argument with the `--color`
|
||||
argument, and it is required to combine `--json` with `--error-format=json`.
|
||||
|
||||
## `@path`: load command-line flags from a path
|
||||
|
||||
If you specify `@path` on the command-line, then it will open `path` and read
|
||||
command line options from it. These options are one per line; a blank line indicates
|
||||
an empty option. The file can use Unix or Windows style line endings, and must be
|
||||
encoded as UTF-8.
|
||||
|
|
|
@ -208,7 +208,7 @@ error: missing documentation for a function
|
|||
|
||||
To fix the lint, add documentation to all items.
|
||||
|
||||
## single-use-lifetime
|
||||
## single-use-lifetimes
|
||||
|
||||
This lint detects lifetimes that are only used once. Some example code that
|
||||
triggers this lint:
|
||||
|
|
|
@ -311,19 +311,6 @@ When `rustdoc` receives this flag, it will print an extra "Version (version)" in
|
|||
the crate root's docs. You can use this flag to differentiate between different versions of your
|
||||
library's documentation.
|
||||
|
||||
### `--linker`: control the linker used for documentation tests
|
||||
|
||||
Using this flag looks like this:
|
||||
|
||||
```bash
|
||||
$ rustdoc --test src/lib.rs -Z unstable-options --linker foo
|
||||
$ rustdoc --test README.md -Z unstable-options --linker foo
|
||||
```
|
||||
|
||||
When `rustdoc` runs your documentation tests, it needs to compile and link the tests as executables
|
||||
before running them. This flag can be used to change the linker used on these executables. It's
|
||||
equivalent to passing `-C linker=foo` to `rustc`.
|
||||
|
||||
### `--sort-modules-by-appearance`: control how items on module pages are sorted
|
||||
|
||||
Using this flag looks like this:
|
||||
|
|
|
@ -15,113 +15,7 @@ const buffer = fs.readFileSync(process.argv[2]);
|
|||
Error.stackTraceLimit = 20;
|
||||
|
||||
let m = new WebAssembly.Module(buffer);
|
||||
|
||||
let memory = null;
|
||||
|
||||
function viewstruct(data, fields) {
|
||||
return new Uint32Array(memory.buffer).subarray(data/4, data/4 + fields);
|
||||
}
|
||||
|
||||
function copystr(a, b) {
|
||||
let view = new Uint8Array(memory.buffer).subarray(a, a + b);
|
||||
return String.fromCharCode.apply(null, view);
|
||||
}
|
||||
|
||||
function syscall_write([fd, ptr, len]) {
|
||||
let s = copystr(ptr, len);
|
||||
switch (fd) {
|
||||
case 1: process.stdout.write(s); break;
|
||||
case 2: process.stderr.write(s); break;
|
||||
}
|
||||
}
|
||||
|
||||
function syscall_exit([code]) {
|
||||
process.exit(code);
|
||||
}
|
||||
|
||||
function syscall_args(params) {
|
||||
let [ptr, len] = params;
|
||||
|
||||
// Calculate total required buffer size
|
||||
let totalLen = -1;
|
||||
for (let i = 2; i < process.argv.length; ++i) {
|
||||
totalLen += Buffer.byteLength(process.argv[i]) + 1;
|
||||
}
|
||||
if (totalLen < 0) { totalLen = 0; }
|
||||
params[2] = totalLen;
|
||||
|
||||
// If buffer is large enough, copy data
|
||||
if (len >= totalLen) {
|
||||
let view = new Uint8Array(memory.buffer);
|
||||
for (let i = 2; i < process.argv.length; ++i) {
|
||||
let value = process.argv[i];
|
||||
Buffer.from(value).copy(view, ptr);
|
||||
ptr += Buffer.byteLength(process.argv[i]) + 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function syscall_getenv(params) {
|
||||
let [keyPtr, keyLen, valuePtr, valueLen] = params;
|
||||
|
||||
let key = copystr(keyPtr, keyLen);
|
||||
let value = process.env[key];
|
||||
|
||||
if (value == null) {
|
||||
params[4] = 0xFFFFFFFF;
|
||||
} else {
|
||||
let view = new Uint8Array(memory.buffer);
|
||||
let totalLen = Buffer.byteLength(value);
|
||||
params[4] = totalLen;
|
||||
if (valueLen >= totalLen) {
|
||||
Buffer.from(value).copy(view, valuePtr);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function syscall_time(params) {
|
||||
let t = Date.now();
|
||||
let secs = Math.floor(t / 1000);
|
||||
let millis = t % 1000;
|
||||
params[1] = Math.floor(secs / 0x100000000);
|
||||
params[2] = secs % 0x100000000;
|
||||
params[3] = Math.floor(millis * 1000000);
|
||||
}
|
||||
|
||||
let imports = {};
|
||||
imports.env = {
|
||||
// These are generated by LLVM itself for various intrinsic calls. Hopefully
|
||||
// one day this is not necessary and something will automatically do this.
|
||||
fmod: function(x, y) { return x % y; },
|
||||
exp2: function(x) { return Math.pow(2, x); },
|
||||
exp2f: function(x) { return Math.pow(2, x); },
|
||||
ldexp: function(x, y) { return x * Math.pow(2, y); },
|
||||
ldexpf: function(x, y) { return x * Math.pow(2, y); },
|
||||
sin: Math.sin,
|
||||
sinf: Math.sin,
|
||||
cos: Math.cos,
|
||||
cosf: Math.cos,
|
||||
log: Math.log,
|
||||
log2: Math.log2,
|
||||
log10: Math.log10,
|
||||
log10f: Math.log10,
|
||||
|
||||
rust_wasm_syscall: function(index, data) {
|
||||
switch (index) {
|
||||
case 1: syscall_write(viewstruct(data, 3)); return true;
|
||||
case 2: syscall_exit(viewstruct(data, 1)); return true;
|
||||
case 3: syscall_args(viewstruct(data, 3)); return true;
|
||||
case 4: syscall_getenv(viewstruct(data, 5)); return true;
|
||||
case 6: syscall_time(viewstruct(data, 4)); return true;
|
||||
default:
|
||||
console.log("Unsupported syscall: " + index);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let instance = new WebAssembly.Instance(m, imports);
|
||||
memory = instance.exports.memory;
|
||||
let instance = new WebAssembly.Instance(m, {});
|
||||
try {
|
||||
instance.exports.main();
|
||||
} catch (e) {
|
||||
|
|
|
@ -276,7 +276,7 @@ impl<T> LinkedList<T> {
|
|||
/// ```
|
||||
#[inline]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
pub fn new() -> Self {
|
||||
pub const fn new() -> Self {
|
||||
LinkedList {
|
||||
head: None,
|
||||
tail: None,
|
||||
|
|
|
@ -1810,7 +1810,7 @@ impl<T> VecDeque<T> {
|
|||
other
|
||||
}
|
||||
|
||||
/// Moves all the elements of `other` into `Self`, leaving `other` empty.
|
||||
/// Moves all the elements of `other` into `self`, leaving `other` empty.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
|
@ -1847,7 +1847,7 @@ impl<T> VecDeque<T> {
|
|||
///
|
||||
/// let mut buf = VecDeque::new();
|
||||
/// buf.extend(1..5);
|
||||
/// buf.retain(|&x| x%2 == 0);
|
||||
/// buf.retain(|&x| x % 2 == 0);
|
||||
/// assert_eq!(buf, [2, 4]);
|
||||
/// ```
|
||||
///
|
||||
|
|
|
@ -291,6 +291,7 @@ use crate::raw_vec::RawVec;
|
|||
/// [`reserve`]: ../../std/vec/struct.Vec.html#method.reserve
|
||||
/// [owned slice]: ../../std/boxed/struct.Box.html
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
#[cfg_attr(all(not(bootstrap), not(test)), rustc_diagnostic_item = "vec_type")]
|
||||
pub struct Vec<T> {
|
||||
buf: RawVec<T>,
|
||||
len: usize,
|
||||
|
|
|
@ -1012,8 +1012,10 @@ mod impls {
|
|||
impl Ord for $t {
|
||||
#[inline]
|
||||
fn cmp(&self, other: &$t) -> Ordering {
|
||||
if *self == *other { Equal }
|
||||
else if *self < *other { Less }
|
||||
// The order here is important to generate more optimal assembly.
|
||||
// See <https://github.com/rust-lang/rust/issues/63758> for more info.
|
||||
if *self < *other { Less }
|
||||
else if *self == *other { Equal }
|
||||
else { Greater }
|
||||
}
|
||||
}
|
||||
|
|
|
@ -104,22 +104,17 @@ pub const fn identity<T>(x: T) -> T { x }
|
|||
/// If you need to do a costly conversion it is better to implement [`From`] with type
|
||||
/// `&T` or write a custom function.
|
||||
///
|
||||
/// `AsRef` has the same signature as [`Borrow`], but `Borrow` is different in few aspects:
|
||||
/// `AsRef` has the same signature as [`Borrow`], but [`Borrow`] is different in few aspects:
|
||||
///
|
||||
/// - Unlike `AsRef`, `Borrow` has a blanket impl for any `T`, and can be used to accept either
|
||||
/// - Unlike `AsRef`, [`Borrow`] has a blanket impl for any `T`, and can be used to accept either
|
||||
/// a reference or a value.
|
||||
/// - `Borrow` also requires that `Hash`, `Eq` and `Ord` for borrowed value are
|
||||
/// - [`Borrow`] also requires that [`Hash`], [`Eq`] and [`Ord`] for borrowed value are
|
||||
/// equivalent to those of the owned value. For this reason, if you want to
|
||||
/// borrow only a single field of a struct you can implement `AsRef`, but not `Borrow`.
|
||||
///
|
||||
/// [`Borrow`]: ../../std/borrow/trait.Borrow.html
|
||||
/// borrow only a single field of a struct you can implement `AsRef`, but not [`Borrow`].
|
||||
///
|
||||
/// **Note: This trait must not fail**. If the conversion can fail, use a
|
||||
/// dedicated method which returns an [`Option<T>`] or a [`Result<T, E>`].
|
||||
///
|
||||
/// [`Option<T>`]: ../../std/option/enum.Option.html
|
||||
/// [`Result<T, E>`]: ../../std/result/enum.Result.html
|
||||
///
|
||||
/// # Generic Implementations
|
||||
///
|
||||
/// - `AsRef` auto-dereferences if the inner type is a reference or a mutable
|
||||
|
@ -132,9 +127,16 @@ pub const fn identity<T>(x: T) -> T { x }
|
|||
/// converted to the specified type `T`.
|
||||
///
|
||||
/// For example: By creating a generic function that takes an `AsRef<str>` we express that we
|
||||
/// want to accept all references that can be converted to `&str` as an argument.
|
||||
/// Since both [`String`] and `&str` implement `AsRef<str>` we can accept both as input argument.
|
||||
/// want to accept all references that can be converted to [`&str`] as an argument.
|
||||
/// Since both [`String`] and [`&str`] implement `AsRef<str>` we can accept both as input argument.
|
||||
///
|
||||
/// [`Option<T>`]: ../../std/option/enum.Option.html
|
||||
/// [`Result<T, E>`]: ../../std/result/enum.Result.html
|
||||
/// [`Borrow`]: ../../std/borrow/trait.Borrow.html
|
||||
/// [`Hash`]: ../../std/hash/trait.Hash.html
|
||||
/// [`Eq`]: ../../std/cmp/trait.Eq.html
|
||||
/// [`Ord`]: ../../std/cmp/trait.Ord.html
|
||||
/// [`&str`]: ../../std/primitive.str.html
|
||||
/// [`String`]: ../../std/string/struct.String.html
|
||||
///
|
||||
/// ```
|
||||
|
|
|
@ -518,7 +518,8 @@ impl Display for Arguments<'_> {
|
|||
label="`{Self}` cannot be formatted using `{{:?}}` because it doesn't implement `{Debug}`",
|
||||
)]
|
||||
#[doc(alias = "{:?}")]
|
||||
#[lang = "debug_trait"]
|
||||
#[cfg_attr(boostrap_stdarch_ignore_this, lang = "debug_trait")]
|
||||
#[cfg_attr(not(boostrap_stdarch_ignore_this), rustc_diagnostic_item = "debug_trait")]
|
||||
pub trait Debug {
|
||||
/// Formats the value using the given formatter.
|
||||
///
|
||||
|
|
|
@ -104,11 +104,19 @@ pub fn spin_loop() {
|
|||
}
|
||||
}
|
||||
|
||||
/// A function that is opaque to the optimizer, to allow benchmarks to
|
||||
/// pretend to use outputs to assist in avoiding dead-code
|
||||
/// elimination.
|
||||
/// An identity function that *__hints__* to the compiler to be maximally pessimistic about what
|
||||
/// `black_box` could do.
|
||||
///
|
||||
/// This function is a no-op, and does not even read from `dummy`.
|
||||
/// [`std::convert::identity`]: https://doc.rust-lang.org/core/convert/fn.identity.html
|
||||
///
|
||||
/// Unlike [`std::convert::identity`], a Rust compiler is encouraged to assume that `black_box` can
|
||||
/// use `x` in any possible valid way that Rust code is allowed to without introducing undefined
|
||||
/// behavior in the calling code. This property makes `black_box` useful for writing code in which
|
||||
/// certain optimizations are not desired, such as benchmarks.
|
||||
///
|
||||
/// Note however, that `black_box` is only (and can only be) provided on a "best-effort" basis. The
|
||||
/// extent to which it can block optimisations may vary depending upon the platform and code-gen
|
||||
/// backend used. Programs cannot rely on `black_box` for *correctness* in any way.
|
||||
#[inline]
|
||||
#[unstable(feature = "test", issue = "50297")]
|
||||
#[allow(unreachable_code)] // this makes #[cfg] a bit easier below.
|
||||
|
|
|
@ -1309,7 +1309,7 @@ impl<I> DoubleEndedIterator for Peekable<I> where I: DoubleEndedIterator {
|
|||
Self: Sized, F: FnMut(B, Self::Item) -> R, R: Try<Ok=B>
|
||||
{
|
||||
match self.peeked.take() {
|
||||
Some(None) => return Try::from_ok(init),
|
||||
Some(None) => Try::from_ok(init),
|
||||
Some(Some(v)) => match self.iter.try_rfold(init, &mut f).into_result() {
|
||||
Ok(acc) => f(acc, v),
|
||||
Err(e) => {
|
||||
|
@ -1326,7 +1326,7 @@ impl<I> DoubleEndedIterator for Peekable<I> where I: DoubleEndedIterator {
|
|||
where Fold: FnMut(Acc, Self::Item) -> Acc,
|
||||
{
|
||||
match self.peeked {
|
||||
Some(None) => return init,
|
||||
Some(None) => init,
|
||||
Some(Some(v)) => {
|
||||
let acc = self.iter.rfold(init, &mut fold);
|
||||
fold(acc, v)
|
||||
|
|
|
@ -734,7 +734,6 @@ pub(crate) mod builtin {
|
|||
#[allow_internal_unstable(fmt_internals)]
|
||||
#[rustc_builtin_macro]
|
||||
#[macro_export]
|
||||
#[rustc_macro_transparency = "opaque"]
|
||||
macro_rules! format_args {
|
||||
($fmt:expr) => ({ /* compiler built-in */ });
|
||||
($fmt:expr, $($args:tt)*) => ({ /* compiler built-in */ })
|
||||
|
@ -747,7 +746,6 @@ pub(crate) mod builtin {
|
|||
#[allow_internal_unstable(fmt_internals)]
|
||||
#[rustc_builtin_macro]
|
||||
#[macro_export]
|
||||
#[rustc_macro_transparency = "opaque"]
|
||||
macro_rules! format_args_nl {
|
||||
($fmt:expr) => ({ /* compiler built-in */ });
|
||||
($fmt:expr, $($args:tt)*) => ({ /* compiler built-in */ })
|
||||
|
@ -1235,7 +1233,6 @@ pub(crate) mod builtin {
|
|||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
#[allow_internal_unstable(test, rustc_attrs)]
|
||||
#[rustc_builtin_macro]
|
||||
#[rustc_macro_transparency = "semitransparent"]
|
||||
pub macro test($item:item) { /* compiler built-in */ }
|
||||
|
||||
/// Attribute macro applied to a function to turn it into a benchmark test.
|
||||
|
@ -1243,7 +1240,6 @@ pub(crate) mod builtin {
|
|||
reason = "`bench` is a part of custom test frameworks which are unstable")]
|
||||
#[allow_internal_unstable(test, rustc_attrs)]
|
||||
#[rustc_builtin_macro]
|
||||
#[rustc_macro_transparency = "semitransparent"]
|
||||
pub macro bench($item:item) { /* compiler built-in */ }
|
||||
|
||||
/// An implementation detail of the `#[test]` and `#[bench]` macros.
|
||||
|
@ -1251,26 +1247,22 @@ pub(crate) mod builtin {
|
|||
reason = "custom test frameworks are an unstable feature")]
|
||||
#[allow_internal_unstable(test, rustc_attrs)]
|
||||
#[rustc_builtin_macro]
|
||||
#[rustc_macro_transparency = "semitransparent"]
|
||||
pub macro test_case($item:item) { /* compiler built-in */ }
|
||||
|
||||
/// Attribute macro applied to a static to register it as a global allocator.
|
||||
#[stable(feature = "global_allocator", since = "1.28.0")]
|
||||
#[allow_internal_unstable(rustc_attrs)]
|
||||
#[rustc_builtin_macro]
|
||||
#[rustc_macro_transparency = "semitransparent"]
|
||||
pub macro global_allocator($item:item) { /* compiler built-in */ }
|
||||
|
||||
/// Unstable implementation detail of the `rustc` compiler, do not use.
|
||||
#[rustc_builtin_macro]
|
||||
#[cfg_attr(boostrap_stdarch_ignore_this, rustc_macro_transparency = "semitransparent")]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
#[allow_internal_unstable(core_intrinsics, libstd_sys_internals)]
|
||||
pub macro RustcDecodable($item:item) { /* compiler built-in */ }
|
||||
|
||||
/// Unstable implementation detail of the `rustc` compiler, do not use.
|
||||
#[rustc_builtin_macro]
|
||||
#[cfg_attr(boostrap_stdarch_ignore_this, rustc_macro_transparency = "semitransparent")]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
#[allow_internal_unstable(core_intrinsics)]
|
||||
pub macro RustcEncodable($item:item) { /* compiler built-in */ }
|
||||
|
|
|
@ -315,7 +315,7 @@ impl f32 {
|
|||
/// use std::f32;
|
||||
///
|
||||
/// let x = 2.0_f32;
|
||||
/// let abs_difference = (x.recip() - (1.0/x)).abs();
|
||||
/// let abs_difference = (x.recip() - (1.0 / x)).abs();
|
||||
///
|
||||
/// assert!(abs_difference <= f32::EPSILON);
|
||||
/// ```
|
||||
|
|
|
@ -327,7 +327,7 @@ impl f64 {
|
|||
///
|
||||
/// ```
|
||||
/// let x = 2.0_f64;
|
||||
/// let abs_difference = (x.recip() - (1.0/x)).abs();
|
||||
/// let abs_difference = (x.recip() - (1.0 / x)).abs();
|
||||
///
|
||||
/// assert!(abs_difference < 1e-10);
|
||||
/// ```
|
||||
|
|
|
@ -18,6 +18,8 @@ macro_rules! sh_impl_signed {
|
|||
}
|
||||
}
|
||||
}
|
||||
forward_ref_binop! { impl Shl, shl for Wrapping<$t>, $f,
|
||||
#[stable(feature = "wrapping_ref_ops", since = "1.39.0")] }
|
||||
|
||||
#[stable(feature = "op_assign_traits", since = "1.8.0")]
|
||||
impl ShlAssign<$f> for Wrapping<$t> {
|
||||
|
@ -41,6 +43,8 @@ macro_rules! sh_impl_signed {
|
|||
}
|
||||
}
|
||||
}
|
||||
forward_ref_binop! { impl Shr, shr for Wrapping<$t>, $f,
|
||||
#[stable(feature = "wrapping_ref_ops", since = "1.39.0")] }
|
||||
|
||||
#[stable(feature = "op_assign_traits", since = "1.8.0")]
|
||||
impl ShrAssign<$f> for Wrapping<$t> {
|
||||
|
@ -64,6 +68,8 @@ macro_rules! sh_impl_unsigned {
|
|||
Wrapping(self.0.wrapping_shl((other & self::shift_max::$t as $f) as u32))
|
||||
}
|
||||
}
|
||||
forward_ref_binop! { impl Shl, shl for Wrapping<$t>, $f,
|
||||
#[stable(feature = "wrapping_ref_ops", since = "1.39.0")] }
|
||||
|
||||
#[stable(feature = "op_assign_traits", since = "1.8.0")]
|
||||
impl ShlAssign<$f> for Wrapping<$t> {
|
||||
|
@ -83,6 +89,8 @@ macro_rules! sh_impl_unsigned {
|
|||
Wrapping(self.0.wrapping_shr((other & self::shift_max::$t as $f) as u32))
|
||||
}
|
||||
}
|
||||
forward_ref_binop! { impl Shr, shr for Wrapping<$t>, $f,
|
||||
#[stable(feature = "wrapping_ref_ops", since = "1.39.0")] }
|
||||
|
||||
#[stable(feature = "op_assign_traits", since = "1.8.0")]
|
||||
impl ShrAssign<$f> for Wrapping<$t> {
|
||||
|
|
|
@ -185,14 +185,6 @@ pub trait FnMut<Args> : FnOnce<Args> {
|
|||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ## Calling a by-value closure
|
||||
///
|
||||
/// ```
|
||||
/// let x = 5;
|
||||
/// let square_x = move || x * x;
|
||||
/// assert_eq!(square_x(), 25);
|
||||
/// ```
|
||||
///
|
||||
/// ## Using a `FnOnce` parameter
|
||||
///
|
||||
/// ```
|
||||
|
|
|
@ -3026,8 +3026,7 @@ macro_rules! len {
|
|||
if size == 0 {
|
||||
// This _cannot_ use `unchecked_sub` because we depend on wrapping
|
||||
// to represent the length of long ZST slice iterators.
|
||||
let diff = ($self.end as usize).wrapping_sub(start as usize);
|
||||
diff
|
||||
($self.end as usize).wrapping_sub(start as usize)
|
||||
} else {
|
||||
// We know that `start <= end`, so can do better than `offset_from`,
|
||||
// which needs to deal in signed. By setting appropriate flags here
|
||||
|
|
|
@ -2170,6 +2170,7 @@ impl str {
|
|||
#[inline(always)]
|
||||
#[rustc_const_unstable(feature="const_str_as_bytes")]
|
||||
pub const fn as_bytes(&self) -> &[u8] {
|
||||
#[repr(C)]
|
||||
union Slices<'a> {
|
||||
str: &'a str,
|
||||
slice: &'a [u8],
|
||||
|
@ -3557,7 +3558,7 @@ impl str {
|
|||
/// A string is a sequence of bytes. `start` in this context means the first
|
||||
/// position of that byte string; for a left-to-right language like English or
|
||||
/// Russian, this will be left side, and for right-to-left languages like
|
||||
/// like Arabic or Hebrew, this will be the right side.
|
||||
/// Arabic or Hebrew, this will be the right side.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
|
@ -3594,7 +3595,7 @@ impl str {
|
|||
/// A string is a sequence of bytes. `end` in this context means the last
|
||||
/// position of that byte string; for a left-to-right language like English or
|
||||
/// Russian, this will be right side, and for right-to-left languages like
|
||||
/// like Arabic or Hebrew, this will be the left side.
|
||||
/// Arabic or Hebrew, this will be the left side.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
|
@ -3761,7 +3762,7 @@ impl str {
|
|||
/// A string is a sequence of bytes. `start` in this context means the first
|
||||
/// position of that byte string; for a left-to-right language like English or
|
||||
/// Russian, this will be left side, and for right-to-left languages like
|
||||
/// like Arabic or Hebrew, this will be the right side.
|
||||
/// Arabic or Hebrew, this will be the right side.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
|
@ -3800,7 +3801,7 @@ impl str {
|
|||
/// A string is a sequence of bytes. `end` in this context means the last
|
||||
/// position of that byte string; for a left-to-right language like English or
|
||||
/// Russian, this will be right side, and for right-to-left languages like
|
||||
/// like Arabic or Hebrew, this will be the left side.
|
||||
/// Arabic or Hebrew, this will be the left side.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
|
|
|
@ -6,3 +6,6 @@ edition = "2018"
|
|||
|
||||
[lib]
|
||||
path = "lib.rs"
|
||||
|
||||
[dependencies]
|
||||
std = { path = "../libstd" }
|
||||
|
|
|
@ -19,12 +19,15 @@
|
|||
|
||||
#![feature(nll)]
|
||||
#![feature(staged_api)]
|
||||
#![feature(allow_internal_unstable)]
|
||||
#![feature(const_fn)]
|
||||
#![feature(decl_macro)]
|
||||
#![feature(extern_types)]
|
||||
#![feature(in_band_lifetimes)]
|
||||
#![feature(optin_builtin_traits)]
|
||||
#![feature(mem_take)]
|
||||
#![feature(non_exhaustive)]
|
||||
#![feature(rustc_attrs)]
|
||||
#![feature(specialization)]
|
||||
|
||||
#![recursion_limit="256"]
|
||||
|
@ -222,11 +225,10 @@ pub mod token_stream {
|
|||
///
|
||||
/// Unquoting is done with `$`, and works by taking the single next ident as the unquoted term.
|
||||
/// To quote `$` itself, use `$$`.
|
||||
///
|
||||
/// This is a dummy macro, the actual implementation is in `quote::quote`.`
|
||||
#[unstable(feature = "proc_macro_quote", issue = "54722")]
|
||||
#[macro_export]
|
||||
macro_rules! quote { () => {} }
|
||||
#[allow_internal_unstable(proc_macro_def_site)]
|
||||
#[cfg_attr(not(bootstrap), rustc_builtin_macro)]
|
||||
pub macro quote ($($t:tt)*) { /* compiler built-in */ }
|
||||
|
||||
#[unstable(feature = "proc_macro_internals", issue = "27812")]
|
||||
#[doc(hidden)]
|
||||
|
|
|
@ -57,9 +57,9 @@ macro_rules! quote {
|
|||
}
|
||||
|
||||
/// Quote a `TokenStream` into a `TokenStream`.
|
||||
/// This is the actual `quote!()` proc macro.
|
||||
/// This is the actual implementation of the `quote!()` proc macro.
|
||||
///
|
||||
/// It is manually loaded in `CStore::load_macro_untracked`.
|
||||
/// It is loaded by the compiler in `register_builtin_macros`.
|
||||
#[unstable(feature = "proc_macro_quote", issue = "54722")]
|
||||
pub fn quote(stream: TokenStream) -> TokenStream {
|
||||
if stream.is_empty() {
|
||||
|
|
|
@ -25,6 +25,16 @@ macro_rules! arena_types {
|
|||
[] adt_def: rustc::ty::AdtDef,
|
||||
[] steal_mir: rustc::ty::steal::Steal<rustc::mir::Body<$tcx>>,
|
||||
[] mir: rustc::mir::Body<$tcx>,
|
||||
[] steal_promoted: rustc::ty::steal::Steal<
|
||||
rustc_data_structures::indexed_vec::IndexVec<
|
||||
rustc::mir::Promoted,
|
||||
rustc::mir::Body<$tcx>
|
||||
>
|
||||
>,
|
||||
[] promoted: rustc_data_structures::indexed_vec::IndexVec<
|
||||
rustc::mir::Promoted,
|
||||
rustc::mir::Body<$tcx>
|
||||
>,
|
||||
[] tables: rustc::ty::TypeckTables<$tcx>,
|
||||
[] const_allocs: rustc::mir::interpret::Allocation,
|
||||
[] vtable_method: Option<(
|
||||
|
@ -84,6 +94,10 @@ macro_rules! arena_types {
|
|||
rustc::hir::def_id::CrateNum
|
||||
>
|
||||
>,
|
||||
[few] diagnostic_items: rustc_data_structures::fx::FxHashMap<
|
||||
syntax::symbol::Symbol,
|
||||
rustc::hir::def_id::DefId,
|
||||
>,
|
||||
[few] resolve_lifetimes: rustc::middle::resolve_lifetime::ResolveLifetimes,
|
||||
[decode] generic_predicates: rustc::ty::GenericPredicates<'tcx>,
|
||||
[few] lint_levels: rustc::lint::LintLevelMap,
|
||||
|
|
|
@ -2231,7 +2231,7 @@ register_diagnostics! {
|
|||
E0495, // cannot infer an appropriate lifetime due to conflicting requirements
|
||||
E0566, // conflicting representation hints
|
||||
E0623, // lifetime mismatch where both parameters are anonymous regions
|
||||
E0628, // generators cannot have explicit arguments
|
||||
E0628, // generators cannot have explicit parameters
|
||||
E0631, // type mismatch in closure arguments
|
||||
E0637, // "'_" is not a valid lifetime bound
|
||||
E0657, // `impl Trait` can only capture lifetimes bound at the fn level
|
||||
|
@ -2239,7 +2239,7 @@ register_diagnostics! {
|
|||
E0688, // in-band lifetimes cannot be mixed with explicit lifetime binders
|
||||
E0697, // closures cannot be static
|
||||
E0707, // multiple elided lifetimes used in arguments of `async fn`
|
||||
E0708, // `async` non-`move` closures with arguments are not currently supported
|
||||
E0708, // `async` non-`move` closures with parameters are not currently supported
|
||||
E0709, // multiple different lifetimes used in arguments of `async fn`
|
||||
E0710, // an unknown tool name found in scoped lint
|
||||
E0711, // a feature has been declared with conflicting stability attributes
|
||||
|
|
|
@ -210,8 +210,8 @@ pub trait Visitor<'v> : Sized {
|
|||
}
|
||||
}
|
||||
|
||||
fn visit_arg(&mut self, arg: &'v Arg) {
|
||||
walk_arg(self, arg)
|
||||
fn visit_param(&mut self, param: &'v Param) {
|
||||
walk_param(self, param)
|
||||
}
|
||||
|
||||
/// Visits the top-level item and (optionally) nested items / impl items. See
|
||||
|
@ -400,7 +400,7 @@ pub fn walk_mod<'v, V: Visitor<'v>>(visitor: &mut V, module: &'v Mod, mod_hir_id
|
|||
}
|
||||
|
||||
pub fn walk_body<'v, V: Visitor<'v>>(visitor: &mut V, body: &'v Body) {
|
||||
walk_list!(visitor, visit_arg, &body.arguments);
|
||||
walk_list!(visitor, visit_param, &body.params);
|
||||
visitor.visit_expr(&body.value);
|
||||
}
|
||||
|
||||
|
@ -454,10 +454,10 @@ pub fn walk_trait_ref<'v, V>(visitor: &mut V, trait_ref: &'v TraitRef)
|
|||
visitor.visit_path(&trait_ref.path, trait_ref.hir_ref_id)
|
||||
}
|
||||
|
||||
pub fn walk_arg<'v, V: Visitor<'v>>(visitor: &mut V, arg: &'v Arg) {
|
||||
visitor.visit_id(arg.hir_id);
|
||||
visitor.visit_pat(&arg.pat);
|
||||
walk_list!(visitor, visit_attribute, &arg.attrs);
|
||||
pub fn walk_param<'v, V: Visitor<'v>>(visitor: &mut V, param: &'v Param) {
|
||||
visitor.visit_id(param.hir_id);
|
||||
visitor.visit_pat(¶m.pat);
|
||||
walk_list!(visitor, visit_attribute, ¶m.attrs);
|
||||
}
|
||||
|
||||
pub fn walk_item<'v, V: Visitor<'v>>(visitor: &mut V, item: &'v Item) {
|
||||
|
|
|
@ -510,12 +510,12 @@ impl<'a> LoweringContext<'a> {
|
|||
&f.generic_params
|
||||
);
|
||||
// Mirrors visit::walk_fn_decl
|
||||
for argument in &f.decl.inputs {
|
||||
for parameter in &f.decl.inputs {
|
||||
// We don't lower the ids of argument patterns
|
||||
self.with_hir_id_owner(None, |this| {
|
||||
this.visit_pat(&argument.pat);
|
||||
this.visit_pat(¶meter.pat);
|
||||
});
|
||||
self.visit_ty(&argument.ty)
|
||||
self.visit_ty(¶meter.ty)
|
||||
}
|
||||
self.visit_fn_ret_ty(&f.decl.output)
|
||||
}
|
||||
|
@ -735,7 +735,7 @@ impl<'a> LoweringContext<'a> {
|
|||
///
|
||||
/// Presuming that in-band lifetimes are enabled, then
|
||||
/// `self.anonymous_lifetime_mode` will be updated to match the
|
||||
/// argument while `f` is running (and restored afterwards).
|
||||
/// parameter while `f` is running (and restored afterwards).
|
||||
fn collect_in_band_defs<T, F>(
|
||||
&mut self,
|
||||
parent_id: DefId,
|
||||
|
@ -880,7 +880,7 @@ impl<'a> LoweringContext<'a> {
|
|||
///
|
||||
/// Presuming that in-band lifetimes are enabled, then
|
||||
/// `self.anonymous_lifetime_mode` will be updated to match the
|
||||
/// argument while `f` is running (and restored afterwards).
|
||||
/// parameter while `f` is running (and restored afterwards).
|
||||
fn add_in_band_defs<F, T>(
|
||||
&mut self,
|
||||
generics: &Generics,
|
||||
|
@ -1080,7 +1080,7 @@ impl<'a> LoweringContext<'a> {
|
|||
ImplTraitContext::Disallowed(_) if self.is_in_dyn_type =>
|
||||
(true, ImplTraitContext::OpaqueTy(None)),
|
||||
|
||||
// We are in the argument position, but not within a dyn type:
|
||||
// We are in the parameter position, but not within a dyn type:
|
||||
//
|
||||
// fn foo(x: impl Iterator<Item: Debug>)
|
||||
//
|
||||
|
@ -1204,7 +1204,7 @@ impl<'a> LoweringContext<'a> {
|
|||
unsafety: this.lower_unsafety(f.unsafety),
|
||||
abi: f.abi,
|
||||
decl: this.lower_fn_decl(&f.decl, None, false, None),
|
||||
arg_names: this.lower_fn_args_to_names(&f.decl),
|
||||
param_names: this.lower_fn_params_to_names(&f.decl),
|
||||
}))
|
||||
},
|
||||
)
|
||||
|
@ -2093,12 +2093,12 @@ impl<'a> LoweringContext<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
fn lower_fn_args_to_names(&mut self, decl: &FnDecl) -> hir::HirVec<Ident> {
|
||||
fn lower_fn_params_to_names(&mut self, decl: &FnDecl) -> hir::HirVec<Ident> {
|
||||
decl.inputs
|
||||
.iter()
|
||||
.map(|arg| match arg.pat.node {
|
||||
.map(|param| match param.pat.node {
|
||||
PatKind::Ident(_, ident, _) => ident,
|
||||
_ => Ident::new(kw::Invalid, arg.pat.span),
|
||||
_ => Ident::new(kw::Invalid, param.pat.span),
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
@ -2136,11 +2136,11 @@ impl<'a> LoweringContext<'a> {
|
|||
let inputs = self.with_anonymous_lifetime_mode(lt_mode, |this| {
|
||||
decl.inputs
|
||||
.iter()
|
||||
.map(|arg| {
|
||||
.map(|param| {
|
||||
if let Some((_, ibty)) = &mut in_band_ty_params {
|
||||
this.lower_ty_direct(&arg.ty, ImplTraitContext::Universal(ibty))
|
||||
this.lower_ty_direct(¶m.ty, ImplTraitContext::Universal(ibty))
|
||||
} else {
|
||||
this.lower_ty_direct(&arg.ty, ImplTraitContext::disallowed())
|
||||
this.lower_ty_direct(¶m.ty, ImplTraitContext::disallowed())
|
||||
}
|
||||
})
|
||||
.collect::<HirVec<_>>()
|
||||
|
@ -2205,7 +2205,7 @@ impl<'a> LoweringContext<'a> {
|
|||
//
|
||||
// type OpaqueTy<generics_from_parent_fn> = impl Future<Output = T>;
|
||||
//
|
||||
// `inputs`: lowered types of arguments to the function (used to collect lifetimes)
|
||||
// `inputs`: lowered types of parameters to the function (used to collect lifetimes)
|
||||
// `output`: unlowered output type (`T` in `-> T`)
|
||||
// `fn_def_id`: `DefId` of the parent function (used to create child impl trait definition)
|
||||
// `opaque_ty_node_id`: `NodeId` of the opaque `impl Trait` type that should be created
|
||||
|
|
|
@ -724,7 +724,7 @@ impl LoweringContext<'_> {
|
|||
self.sess,
|
||||
fn_decl_span,
|
||||
E0628,
|
||||
"generators cannot have explicit arguments"
|
||||
"generators cannot have explicit parameters"
|
||||
);
|
||||
self.sess.abort_if_errors();
|
||||
}
|
||||
|
@ -775,7 +775,7 @@ impl LoweringContext<'_> {
|
|||
this.sess,
|
||||
fn_decl_span,
|
||||
E0708,
|
||||
"`async` non-`move` closures with arguments are not currently supported",
|
||||
"`async` non-`move` closures with parameters are not currently supported",
|
||||
)
|
||||
.help(
|
||||
"consider using `let` statements to manually capture \
|
||||
|
|
|
@ -720,7 +720,7 @@ impl LoweringContext<'_> {
|
|||
(
|
||||
// Disallow impl Trait in foreign items
|
||||
this.lower_fn_decl(fdec, None, false, None),
|
||||
this.lower_fn_args_to_names(fdec),
|
||||
this.lower_fn_params_to_names(fdec),
|
||||
)
|
||||
},
|
||||
);
|
||||
|
@ -827,7 +827,7 @@ impl LoweringContext<'_> {
|
|||
),
|
||||
),
|
||||
TraitItemKind::Method(ref sig, None) => {
|
||||
let names = self.lower_fn_args_to_names(&sig.decl);
|
||||
let names = self.lower_fn_params_to_names(&sig.decl);
|
||||
let (generics, sig) = self.lower_method_sig(
|
||||
&i.generics,
|
||||
sig,
|
||||
|
@ -1028,10 +1028,10 @@ impl LoweringContext<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
fn record_body(&mut self, arguments: HirVec<hir::Arg>, value: hir::Expr) -> hir::BodyId {
|
||||
fn record_body(&mut self, params: HirVec<hir::Param>, value: hir::Expr) -> hir::BodyId {
|
||||
let body = hir::Body {
|
||||
generator_kind: self.generator_kind,
|
||||
arguments,
|
||||
params,
|
||||
value,
|
||||
};
|
||||
let id = body.id();
|
||||
|
@ -1041,21 +1041,21 @@ impl LoweringContext<'_> {
|
|||
|
||||
fn lower_body(
|
||||
&mut self,
|
||||
f: impl FnOnce(&mut LoweringContext<'_>) -> (HirVec<hir::Arg>, hir::Expr),
|
||||
f: impl FnOnce(&mut LoweringContext<'_>) -> (HirVec<hir::Param>, hir::Expr),
|
||||
) -> hir::BodyId {
|
||||
let prev_gen_kind = self.generator_kind.take();
|
||||
let (arguments, result) = f(self);
|
||||
let body_id = self.record_body(arguments, result);
|
||||
let (parameters, result) = f(self);
|
||||
let body_id = self.record_body(parameters, result);
|
||||
self.generator_kind = prev_gen_kind;
|
||||
body_id
|
||||
}
|
||||
|
||||
fn lower_arg(&mut self, arg: &Arg) -> hir::Arg {
|
||||
hir::Arg {
|
||||
attrs: self.lower_attrs(&arg.attrs),
|
||||
hir_id: self.lower_node_id(arg.id),
|
||||
pat: self.lower_pat(&arg.pat),
|
||||
span: arg.span,
|
||||
fn lower_param(&mut self, param: &Param) -> hir::Param {
|
||||
hir::Param {
|
||||
attrs: self.lower_attrs(¶m.attrs),
|
||||
hir_id: self.lower_node_id(param.id),
|
||||
pat: self.lower_pat(¶m.pat),
|
||||
span: param.span,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1065,7 +1065,7 @@ impl LoweringContext<'_> {
|
|||
body: impl FnOnce(&mut LoweringContext<'_>) -> hir::Expr,
|
||||
) -> hir::BodyId {
|
||||
self.lower_body(|this| (
|
||||
decl.inputs.iter().map(|x| this.lower_arg(x)).collect(),
|
||||
decl.inputs.iter().map(|x| this.lower_param(x)).collect(),
|
||||
body(this),
|
||||
))
|
||||
}
|
||||
|
@ -1093,10 +1093,10 @@ impl LoweringContext<'_> {
|
|||
};
|
||||
|
||||
self.lower_body(|this| {
|
||||
let mut arguments: Vec<hir::Arg> = Vec::new();
|
||||
let mut parameters: Vec<hir::Param> = Vec::new();
|
||||
let mut statements: Vec<hir::Stmt> = Vec::new();
|
||||
|
||||
// Async function arguments are lowered into the closure body so that they are
|
||||
// Async function parameters are lowered into the closure body so that they are
|
||||
// captured and so that the drop order matches the equivalent non-async functions.
|
||||
//
|
||||
// from:
|
||||
|
@ -1121,13 +1121,13 @@ impl LoweringContext<'_> {
|
|||
//
|
||||
// If `<pattern>` is a simple ident, then it is lowered to a single
|
||||
// `let <pattern> = <pattern>;` statement as an optimization.
|
||||
for (index, argument) in decl.inputs.iter().enumerate() {
|
||||
let argument = this.lower_arg(argument);
|
||||
let span = argument.pat.span;
|
||||
for (index, parameter) in decl.inputs.iter().enumerate() {
|
||||
let parameter = this.lower_param(parameter);
|
||||
let span = parameter.pat.span;
|
||||
|
||||
// Check if this is a binding pattern, if so, we can optimize and avoid adding a
|
||||
// `let <pat> = __argN;` statement. In this case, we do not rename the argument.
|
||||
let (ident, is_simple_argument) = match argument.pat.node {
|
||||
// `let <pat> = __argN;` statement. In this case, we do not rename the parameter.
|
||||
let (ident, is_simple_parameter) = match parameter.pat.node {
|
||||
hir::PatKind::Binding(hir::BindingAnnotation::Unannotated, _, ident, _) =>
|
||||
(ident, true),
|
||||
_ => {
|
||||
|
@ -1142,32 +1142,32 @@ impl LoweringContext<'_> {
|
|||
let desugared_span =
|
||||
this.mark_span_with_reason(DesugaringKind::Async, span, None);
|
||||
|
||||
// Construct an argument representing `__argN: <ty>` to replace the argument of the
|
||||
// Construct a parameter representing `__argN: <ty>` to replace the parameter of the
|
||||
// async function.
|
||||
//
|
||||
// If this is the simple case, this argument will end up being the same as the
|
||||
// original argument, but with a different pattern id.
|
||||
// If this is the simple case, this parameter will end up being the same as the
|
||||
// original parameter, but with a different pattern id.
|
||||
let mut stmt_attrs = ThinVec::new();
|
||||
stmt_attrs.extend(argument.attrs.iter().cloned());
|
||||
let (new_argument_pat, new_argument_id) = this.pat_ident(desugared_span, ident);
|
||||
let new_argument = hir::Arg {
|
||||
attrs: argument.attrs,
|
||||
hir_id: argument.hir_id,
|
||||
pat: new_argument_pat,
|
||||
span: argument.span,
|
||||
stmt_attrs.extend(parameter.attrs.iter().cloned());
|
||||
let (new_parameter_pat, new_parameter_id) = this.pat_ident(desugared_span, ident);
|
||||
let new_parameter = hir::Param {
|
||||
attrs: parameter.attrs,
|
||||
hir_id: parameter.hir_id,
|
||||
pat: new_parameter_pat,
|
||||
span: parameter.span,
|
||||
};
|
||||
|
||||
|
||||
if is_simple_argument {
|
||||
if is_simple_parameter {
|
||||
// If this is the simple case, then we only insert one statement that is
|
||||
// `let <pat> = <pat>;`. We re-use the original argument's pattern so that
|
||||
// `HirId`s are densely assigned.
|
||||
let expr = this.expr_ident(desugared_span, ident, new_argument_id);
|
||||
let expr = this.expr_ident(desugared_span, ident, new_parameter_id);
|
||||
let stmt = this.stmt_let_pat(
|
||||
stmt_attrs,
|
||||
desugared_span,
|
||||
Some(P(expr)),
|
||||
argument.pat,
|
||||
parameter.pat,
|
||||
hir::LocalSource::AsyncFn
|
||||
);
|
||||
statements.push(stmt);
|
||||
|
@ -1179,7 +1179,7 @@ impl LoweringContext<'_> {
|
|||
// let <pat> = __argN;
|
||||
// ```
|
||||
//
|
||||
// The first statement moves the argument into the closure and thus ensures
|
||||
// The first statement moves the parameter into the closure and thus ensures
|
||||
// that the drop order is correct.
|
||||
//
|
||||
// The second statement creates the bindings that the user wrote.
|
||||
|
@ -1189,7 +1189,7 @@ impl LoweringContext<'_> {
|
|||
// statement.
|
||||
let (move_pat, move_id) = this.pat_ident_binding_mode(
|
||||
desugared_span, ident, hir::BindingAnnotation::Mutable);
|
||||
let move_expr = this.expr_ident(desugared_span, ident, new_argument_id);
|
||||
let move_expr = this.expr_ident(desugared_span, ident, new_parameter_id);
|
||||
let move_stmt = this.stmt_let_pat(
|
||||
ThinVec::new(),
|
||||
desugared_span,
|
||||
|
@ -1199,13 +1199,13 @@ impl LoweringContext<'_> {
|
|||
);
|
||||
|
||||
// Construct the `let <pat> = __argN;` statement. We re-use the original
|
||||
// argument's pattern so that `HirId`s are densely assigned.
|
||||
// parameter's pattern so that `HirId`s are densely assigned.
|
||||
let pattern_expr = this.expr_ident(desugared_span, ident, move_id);
|
||||
let pattern_stmt = this.stmt_let_pat(
|
||||
stmt_attrs,
|
||||
desugared_span,
|
||||
Some(P(pattern_expr)),
|
||||
argument.pat,
|
||||
parameter.pat,
|
||||
hir::LocalSource::AsyncFn
|
||||
);
|
||||
|
||||
|
@ -1213,7 +1213,7 @@ impl LoweringContext<'_> {
|
|||
statements.push(pattern_stmt);
|
||||
};
|
||||
|
||||
arguments.push(new_argument);
|
||||
parameters.push(new_parameter);
|
||||
}
|
||||
|
||||
let async_expr = this.make_async_expr(
|
||||
|
@ -1222,7 +1222,7 @@ impl LoweringContext<'_> {
|
|||
let body = this.lower_block_with_stmts(body, false, statements);
|
||||
this.expr_block(body, ThinVec::new())
|
||||
});
|
||||
(HirVec::from(arguments), this.expr(body.span, async_expr, ThinVec::new()))
|
||||
(HirVec::from(parameters), this.expr(body.span, async_expr, ThinVec::new()))
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
@ -363,11 +363,11 @@ impl<'a, 'hir> Visitor<'hir> for NodeCollector<'a, 'hir> {
|
|||
self.currently_in_body = prev_in_body;
|
||||
}
|
||||
|
||||
fn visit_arg(&mut self, arg: &'hir Arg) {
|
||||
let node = Node::Arg(arg);
|
||||
self.insert(arg.pat.span, arg.hir_id, node);
|
||||
self.with_parent(arg.hir_id, |this| {
|
||||
intravisit::walk_arg(this, arg);
|
||||
fn visit_param(&mut self, param: &'hir Param) {
|
||||
let node = Node::Param(param);
|
||||
self.insert(param.pat.span, param.hir_id, node);
|
||||
self.with_parent(param.hir_id, |this| {
|
||||
intravisit::walk_param(this, param);
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -154,7 +154,7 @@ impl<'a> visit::Visitor<'a> for DefCollector<'a> {
|
|||
});
|
||||
}
|
||||
|
||||
fn visit_variant(&mut self, v: &'a Variant, g: &'a Generics, item_id: NodeId) {
|
||||
fn visit_variant(&mut self, v: &'a Variant) {
|
||||
let def = self.create_def(v.id,
|
||||
DefPathData::TypeNs(v.ident.as_interned_str()),
|
||||
v.span);
|
||||
|
@ -162,12 +162,11 @@ impl<'a> visit::Visitor<'a> for DefCollector<'a> {
|
|||
if let Some(ctor_hir_id) = v.data.ctor_id() {
|
||||
this.create_def(ctor_hir_id, DefPathData::Ctor, v.span);
|
||||
}
|
||||
visit::walk_variant(this, v, g, item_id)
|
||||
visit::walk_variant(this, v)
|
||||
});
|
||||
}
|
||||
|
||||
fn visit_variant_data(&mut self, data: &'a VariantData, _: Ident,
|
||||
_: &'a Generics, _: NodeId, _: Span) {
|
||||
fn visit_variant_data(&mut self, data: &'a VariantData) {
|
||||
for (index, field) in data.fields().iter().enumerate() {
|
||||
let name = field.ident.map(|ident| ident.name)
|
||||
.unwrap_or_else(|| sym::integer(index));
|
||||
|
|
|
@ -360,7 +360,7 @@ impl<'hir> Map<'hir> {
|
|||
Node::Pat(_) |
|
||||
Node::Binding(_) |
|
||||
Node::Local(_) |
|
||||
Node::Arg(_) |
|
||||
Node::Param(_) |
|
||||
Node::Arm(_) |
|
||||
Node::Lifetime(_) |
|
||||
Node::Visibility(_) |
|
||||
|
@ -514,8 +514,7 @@ impl<'hir> Map<'hir> {
|
|||
&self.forest.krate.attrs
|
||||
}
|
||||
|
||||
pub fn get_module(&self, module: DefId) -> (&'hir Mod, Span, HirId)
|
||||
{
|
||||
pub fn get_module(&self, module: DefId) -> (&'hir Mod, Span, HirId) {
|
||||
let hir_id = self.as_local_hir_id(module).unwrap();
|
||||
self.read(hir_id);
|
||||
match self.find_entry(hir_id).unwrap().node {
|
||||
|
@ -525,7 +524,7 @@ impl<'hir> Map<'hir> {
|
|||
..
|
||||
}) => (m, span, hir_id),
|
||||
Node::Crate => (&self.forest.krate.module, self.forest.krate.span, hir_id),
|
||||
_ => panic!("not a module")
|
||||
node => panic!("not a module: {:?}", node),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -679,6 +678,16 @@ impl<'hir> Map<'hir> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Wether `hir_id` corresponds to a `mod` or a crate.
|
||||
pub fn is_hir_id_module(&self, hir_id: HirId) -> bool {
|
||||
match self.lookup(hir_id) {
|
||||
Some(Entry { node: Node::Item(Item { node: ItemKind::Mod(_), .. }), .. }) |
|
||||
Some(Entry { node: Node::Crate, .. }) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/// If there is some error when walking the parents (e.g., a node does not
|
||||
/// have a parent in the map or a node can't be found), then we return the
|
||||
/// last good `HirId` we found. Note that reaching the crate root (`id == 0`),
|
||||
|
@ -955,7 +964,7 @@ impl<'hir> Map<'hir> {
|
|||
pub fn attrs(&self, id: HirId) -> &'hir [ast::Attribute] {
|
||||
self.read(id); // reveals attributes on the node
|
||||
let attrs = match self.find_entry(id).map(|entry| entry.node) {
|
||||
Some(Node::Arg(a)) => Some(&a.attrs[..]),
|
||||
Some(Node::Param(a)) => Some(&a.attrs[..]),
|
||||
Some(Node::Local(l)) => Some(&l.attrs[..]),
|
||||
Some(Node::Item(i)) => Some(&i.attrs[..]),
|
||||
Some(Node::ForeignItem(fi)) => Some(&fi.attrs[..]),
|
||||
|
@ -1019,7 +1028,7 @@ impl<'hir> Map<'hir> {
|
|||
pub fn span(&self, hir_id: HirId) -> Span {
|
||||
self.read(hir_id); // reveals span from node
|
||||
match self.find_entry(hir_id).map(|entry| entry.node) {
|
||||
Some(Node::Arg(arg)) => arg.span,
|
||||
Some(Node::Param(param)) => param.span,
|
||||
Some(Node::Item(item)) => item.span,
|
||||
Some(Node::ForeignItem(foreign_item)) => foreign_item.span,
|
||||
Some(Node::TraitItem(trait_method)) => trait_method.span,
|
||||
|
@ -1214,7 +1223,7 @@ impl<'hir> print::PpAnn for Map<'hir> {
|
|||
Nested::TraitItem(id) => state.print_trait_item(self.trait_item(id)),
|
||||
Nested::ImplItem(id) => state.print_impl_item(self.impl_item(id)),
|
||||
Nested::Body(id) => state.print_expr(&self.body(id).value),
|
||||
Nested::BodyArgPat(id, i) => state.print_pat(&self.body(id).arguments[i].pat)
|
||||
Nested::BodyParamPat(id, i) => state.print_pat(&self.body(id).params[i].pat)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1222,7 +1231,7 @@ impl<'hir> print::PpAnn for Map<'hir> {
|
|||
impl<'a> print::State<'a> {
|
||||
pub fn print_node(&mut self, node: Node<'_>) {
|
||||
match node {
|
||||
Node::Arg(a) => self.print_arg(&a),
|
||||
Node::Param(a) => self.print_param(&a),
|
||||
Node::Item(a) => self.print_item(&a),
|
||||
Node::ForeignItem(a) => self.print_foreign_item(&a),
|
||||
Node::TraitItem(a) => self.print_trait_item(a),
|
||||
|
@ -1364,8 +1373,8 @@ fn hir_id_to_string(map: &Map<'_>, id: HirId, include_id: bool) -> String {
|
|||
Some(Node::Pat(_)) => {
|
||||
format!("pat {}{}", map.hir_to_pretty_string(id), id_str)
|
||||
}
|
||||
Some(Node::Arg(_)) => {
|
||||
format!("arg {}{}", map.hir_to_pretty_string(id), id_str)
|
||||
Some(Node::Param(_)) => {
|
||||
format!("param {}{}", map.hir_to_pretty_string(id), id_str)
|
||||
}
|
||||
Some(Node::Arm(_)) => {
|
||||
format!("arm {}{}", map.hir_to_pretty_string(id), id_str)
|
||||
|
|
|
@ -1030,7 +1030,7 @@ pub enum Mutability {
|
|||
}
|
||||
|
||||
impl Mutability {
|
||||
/// Returns `MutMutable` only if both arguments are mutable.
|
||||
/// Returns `MutMutable` only if both `self` and `other` are mutable.
|
||||
pub fn and(self, other: Self) -> Self {
|
||||
match self {
|
||||
MutMutable => other,
|
||||
|
@ -1324,7 +1324,7 @@ pub struct BodyId {
|
|||
///
|
||||
/// Here, the `Body` associated with `foo()` would contain:
|
||||
///
|
||||
/// - an `arguments` array containing the `(x, y)` pattern
|
||||
/// - an `params` array containing the `(x, y)` pattern
|
||||
/// - a `value` containing the `x + y` expression (maybe wrapped in a block)
|
||||
/// - `generator_kind` would be `None`
|
||||
///
|
||||
|
@ -1332,7 +1332,7 @@ pub struct BodyId {
|
|||
/// map using `body_owner_def_id()`.
|
||||
#[derive(RustcEncodable, RustcDecodable, Debug)]
|
||||
pub struct Body {
|
||||
pub arguments: HirVec<Arg>,
|
||||
pub params: HirVec<Param>,
|
||||
pub value: Expr,
|
||||
pub generator_kind: Option<GeneratorKind>,
|
||||
}
|
||||
|
@ -1644,7 +1644,7 @@ pub enum LocalSource {
|
|||
/// A desugared `for _ in _ { .. }` loop.
|
||||
ForLoopDesugar,
|
||||
/// When lowering async functions, we create locals within the `async move` so that
|
||||
/// all arguments are dropped after the future is polled.
|
||||
/// all parameters are dropped after the future is polled.
|
||||
///
|
||||
/// ```ignore (pseudo-Rust)
|
||||
/// async fn foo(<pattern> @ x: Type) {
|
||||
|
@ -1940,7 +1940,7 @@ pub struct BareFnTy {
|
|||
pub abi: Abi,
|
||||
pub generic_params: HirVec<GenericParam>,
|
||||
pub decl: P<FnDecl>,
|
||||
pub arg_names: HirVec<Ident>,
|
||||
pub param_names: HirVec<Ident>,
|
||||
}
|
||||
|
||||
#[derive(RustcEncodable, RustcDecodable, Debug, HashStable)]
|
||||
|
@ -2027,9 +2027,9 @@ pub struct InlineAsm {
|
|||
pub dialect: AsmDialect,
|
||||
}
|
||||
|
||||
/// Represents an argument in a function header.
|
||||
/// Represents a parameter in a function header.
|
||||
#[derive(RustcEncodable, RustcDecodable, Debug, HashStable)]
|
||||
pub struct Arg {
|
||||
pub struct Param {
|
||||
pub attrs: HirVec<Attribute>,
|
||||
pub hir_id: HirId,
|
||||
pub pat: P<Pat>,
|
||||
|
@ -2039,9 +2039,9 @@ pub struct Arg {
|
|||
/// Represents the header (not the body) of a function declaration.
|
||||
#[derive(RustcEncodable, RustcDecodable, Debug, HashStable)]
|
||||
pub struct FnDecl {
|
||||
/// The types of the function's arguments.
|
||||
/// The types of the function's parameters.
|
||||
///
|
||||
/// Additional argument data is stored in the function's [body](Body::arguments).
|
||||
/// Additional argument data is stored in the function's [body](Body::parameters).
|
||||
pub inputs: HirVec<Ty>,
|
||||
pub output: FunctionRetTy,
|
||||
pub c_variadic: bool,
|
||||
|
@ -2721,7 +2721,7 @@ impl CodegenFnAttrs {
|
|||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub enum Node<'hir> {
|
||||
Arg(&'hir Arg),
|
||||
Param(&'hir Param),
|
||||
Item(&'hir Item),
|
||||
ForeignItem(&'hir ForeignItem),
|
||||
TraitItem(&'hir TraitItem),
|
||||
|
|
|
@ -33,7 +33,7 @@ pub enum Nested {
|
|||
TraitItem(hir::TraitItemId),
|
||||
ImplItem(hir::ImplItemId),
|
||||
Body(hir::BodyId),
|
||||
BodyArgPat(hir::BodyId, usize)
|
||||
BodyParamPat(hir::BodyId, usize)
|
||||
}
|
||||
|
||||
pub trait PpAnn {
|
||||
|
@ -62,7 +62,7 @@ impl PpAnn for hir::Crate {
|
|||
Nested::TraitItem(id) => state.print_trait_item(self.trait_item(id)),
|
||||
Nested::ImplItem(id) => state.print_impl_item(self.impl_item(id)),
|
||||
Nested::Body(id) => state.print_expr(&self.body(id).value),
|
||||
Nested::BodyArgPat(id, i) => state.print_pat(&self.body(id).arguments[i].pat)
|
||||
Nested::BodyParamPat(id, i) => state.print_pat(&self.body(id).params[i].pat)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -318,7 +318,7 @@ impl<'a> State<'a> {
|
|||
}
|
||||
hir::TyKind::BareFn(ref f) => {
|
||||
self.print_ty_fn(f.abi, f.unsafety, &f.decl, None, &f.generic_params,
|
||||
&f.arg_names[..]);
|
||||
&f.param_names[..]);
|
||||
}
|
||||
hir::TyKind::Def(..) => {},
|
||||
hir::TyKind::Path(ref qpath) => {
|
||||
|
@ -1290,7 +1290,7 @@ impl<'a> State<'a> {
|
|||
hir::ExprKind::Closure(capture_clause, ref decl, body, _fn_decl_span, _gen) => {
|
||||
self.print_capture_clause(capture_clause);
|
||||
|
||||
self.print_closure_args(&decl, body);
|
||||
self.print_closure_params(&decl, body);
|
||||
self.s.space();
|
||||
|
||||
// this is a bare expression
|
||||
|
@ -1775,7 +1775,7 @@ impl<'a> State<'a> {
|
|||
self.ann.post(self, AnnNode::Pat(pat))
|
||||
}
|
||||
|
||||
pub fn print_arg(&mut self, arg: &hir::Arg) {
|
||||
pub fn print_param(&mut self, arg: &hir::Param) {
|
||||
self.print_outer_attributes(&arg.attrs);
|
||||
self.print_pat(&arg.pat);
|
||||
}
|
||||
|
@ -1864,7 +1864,7 @@ impl<'a> State<'a> {
|
|||
s.s.word(":");
|
||||
s.s.space();
|
||||
} else if let Some(body_id) = body_id {
|
||||
s.ann.nested(s, Nested::BodyArgPat(body_id, i));
|
||||
s.ann.nested(s, Nested::BodyParamPat(body_id, i));
|
||||
s.s.word(":");
|
||||
s.s.space();
|
||||
}
|
||||
|
@ -1881,13 +1881,13 @@ impl<'a> State<'a> {
|
|||
self.print_where_clause(&generics.where_clause)
|
||||
}
|
||||
|
||||
fn print_closure_args(&mut self, decl: &hir::FnDecl, body_id: hir::BodyId) {
|
||||
fn print_closure_params(&mut self, decl: &hir::FnDecl, body_id: hir::BodyId) {
|
||||
self.s.word("|");
|
||||
let mut i = 0;
|
||||
self.commasep(Inconsistent, &decl.inputs, |s, ty| {
|
||||
s.ibox(INDENT_UNIT);
|
||||
|
||||
s.ann.nested(s, Nested::BodyArgPat(body_id, i));
|
||||
s.ann.nested(s, Nested::BodyParamPat(body_id, i));
|
||||
i += 1;
|
||||
|
||||
if let hir::TyKind::Infer = ty.node {
|
||||
|
|
|
@ -331,13 +331,13 @@ impl<'a> HashStable<StableHashingContext<'a>> for hir::Body {
|
|||
hcx: &mut StableHashingContext<'a>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
let hir::Body {
|
||||
arguments,
|
||||
params,
|
||||
value,
|
||||
generator_kind,
|
||||
} = self;
|
||||
|
||||
hcx.with_node_id_hashing_mode(NodeIdHashingMode::Ignore, |hcx| {
|
||||
arguments.hash_stable(hcx, hasher);
|
||||
params.hash_stable(hcx, hasher);
|
||||
value.hash_stable(hcx, hasher);
|
||||
generator_kind.hash_stable(hcx, hasher);
|
||||
});
|
||||
|
|
|
@ -402,7 +402,6 @@ impl_stable_hash_for!(struct ::syntax_pos::hygiene::ExpnData {
|
|||
parent -> _,
|
||||
call_site,
|
||||
def_site,
|
||||
default_transparency,
|
||||
allow_internal_unstable,
|
||||
allow_internal_unsafe,
|
||||
local_inner_macros,
|
||||
|
|
|
@ -168,25 +168,21 @@ impl<'a> HashStable<StableHashingContext<'a>> for mir::interpret::AllocId {
|
|||
}
|
||||
}
|
||||
|
||||
// Allocations treat their relocations specially
|
||||
impl<'a> HashStable<StableHashingContext<'a>> for mir::interpret::Allocation {
|
||||
// `Relocations` with default type parameters is a sorted map.
|
||||
impl<'a, Tag> HashStable<StableHashingContext<'a>>
|
||||
for mir::interpret::Relocations<Tag>
|
||||
where
|
||||
Tag: HashStable<StableHashingContext<'a>>,
|
||||
{
|
||||
fn hash_stable<W: StableHasherResult>(
|
||||
&self,
|
||||
hcx: &mut StableHashingContext<'a>,
|
||||
hasher: &mut StableHasher<W>,
|
||||
) {
|
||||
let mir::interpret::Allocation {
|
||||
bytes, relocations, undef_mask, align, mutability,
|
||||
extra: _,
|
||||
} = self;
|
||||
bytes.hash_stable(hcx, hasher);
|
||||
relocations.len().hash_stable(hcx, hasher);
|
||||
for reloc in relocations.iter() {
|
||||
self.len().hash_stable(hcx, hasher);
|
||||
for reloc in self.iter() {
|
||||
reloc.hash_stable(hcx, hasher);
|
||||
}
|
||||
undef_mask.hash_stable(hcx, hasher);
|
||||
align.hash_stable(hcx, hasher);
|
||||
mutability.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1636,6 +1636,9 @@ impl<'tcx> ObligationCause<'tcx> {
|
|||
TypeError::CyclicTy(ty) if ty.is_closure() || ty.is_generator() => {
|
||||
Error0644("closure/generator type that references itself")
|
||||
}
|
||||
TypeError::IntrinsicCast => {
|
||||
Error0308("cannot coerce intrinsics to function pointers")
|
||||
}
|
||||
_ => Error0308("mismatched types"),
|
||||
},
|
||||
}
|
||||
|
@ -1650,7 +1653,7 @@ impl<'tcx> ObligationCause<'tcx> {
|
|||
hir::MatchSource::IfLetDesugar { .. } => "`if let` arms have compatible types",
|
||||
_ => "match arms have compatible types",
|
||||
},
|
||||
IfExpression { .. } => "if and else have compatible types",
|
||||
IfExpression { .. } => "if and else have incompatible types",
|
||||
IfExpressionWithNoElse => "if missing an else returns ()",
|
||||
MainFunctionType => "`main` function has the correct type",
|
||||
StartFunctionType => "`start` function has the correct type",
|
||||
|
|
|
@ -78,12 +78,12 @@ impl<'a, 'tcx> Visitor<'tcx> for FindLocalByTypeVisitor<'a, 'tcx> {
|
|||
}
|
||||
|
||||
fn visit_body(&mut self, body: &'tcx Body) {
|
||||
for argument in &body.arguments {
|
||||
for param in &body.params {
|
||||
if let (None, Some(ty)) = (
|
||||
self.found_arg_pattern,
|
||||
self.node_matches_type(argument.hir_id),
|
||||
self.node_matches_type(param.hir_id),
|
||||
) {
|
||||
self.found_arg_pattern = Some(&*argument.pat);
|
||||
self.found_arg_pattern = Some(&*param.pat);
|
||||
self.found_ty = Some(ty);
|
||||
}
|
||||
}
|
||||
|
@ -150,12 +150,12 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
|
|||
&self,
|
||||
ty: Ty<'tcx>,
|
||||
highlight: Option<ty::print::RegionHighlightMode>,
|
||||
) -> String {
|
||||
) -> (String, Option<Span>) {
|
||||
if let ty::Infer(ty::TyVar(ty_vid)) = ty.sty {
|
||||
let ty_vars = self.type_variables.borrow();
|
||||
if let TypeVariableOriginKind::TypeParameterDefinition(name) =
|
||||
ty_vars.var_origin(ty_vid).kind {
|
||||
return name.to_string();
|
||||
let var_origin = ty_vars.var_origin(ty_vid);
|
||||
if let TypeVariableOriginKind::TypeParameterDefinition(name) = var_origin.kind {
|
||||
return (name.to_string(), Some(var_origin.span));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -165,7 +165,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
|
|||
printer.region_highlight_mode = highlight;
|
||||
}
|
||||
let _ = ty.print(printer);
|
||||
s
|
||||
(s, None)
|
||||
}
|
||||
|
||||
pub fn need_type_info_err(
|
||||
|
@ -175,7 +175,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
|
|||
ty: Ty<'tcx>,
|
||||
) -> DiagnosticBuilder<'tcx> {
|
||||
let ty = self.resolve_vars_if_possible(&ty);
|
||||
let name = self.extract_type_name(&ty, None);
|
||||
let (name, name_sp) = self.extract_type_name(&ty, None);
|
||||
|
||||
let mut local_visitor = FindLocalByTypeVisitor::new(&self, ty, &self.tcx.hir());
|
||||
let ty_to_string = |ty: Ty<'tcx>| -> String {
|
||||
|
@ -200,6 +200,14 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
|
|||
}
|
||||
let err_span = if let Some(pattern) = local_visitor.found_arg_pattern {
|
||||
pattern.span
|
||||
} else if let Some(span) = name_sp {
|
||||
// `span` here lets us point at `sum` instead of the entire right hand side expr:
|
||||
// error[E0282]: type annotations needed
|
||||
// --> file2.rs:3:15
|
||||
// |
|
||||
// 3 | let _ = x.sum() as f64;
|
||||
// | ^^^ cannot infer type for `S`
|
||||
span
|
||||
} else {
|
||||
span
|
||||
};
|
||||
|
@ -325,6 +333,23 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
|
|||
};
|
||||
err.span_label(pattern.span, msg);
|
||||
}
|
||||
// Instead of the following:
|
||||
// error[E0282]: type annotations needed
|
||||
// --> file2.rs:3:15
|
||||
// |
|
||||
// 3 | let _ = x.sum() as f64;
|
||||
// | --^^^--------- cannot infer type for `S`
|
||||
// |
|
||||
// = note: type must be known at this point
|
||||
// We want:
|
||||
// error[E0282]: type annotations needed
|
||||
// --> file2.rs:3:15
|
||||
// |
|
||||
// 3 | let _ = x.sum() as f64;
|
||||
// | ^^^ cannot infer type for `S`
|
||||
// |
|
||||
// = note: type must be known at this point
|
||||
let span = name_sp.unwrap_or(span);
|
||||
if !err.span.span_labels().iter().any(|span_label| {
|
||||
span_label.label.is_some() && span_label.span == span
|
||||
}) && local_visitor.found_arg_pattern.is_none()
|
||||
|
@ -342,7 +367,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
|
|||
ty: Ty<'tcx>,
|
||||
) -> DiagnosticBuilder<'tcx> {
|
||||
let ty = self.resolve_vars_if_possible(&ty);
|
||||
let name = self.extract_type_name(&ty, None);
|
||||
let name = self.extract_type_name(&ty, None).0;
|
||||
let mut err = struct_span_err!(
|
||||
self.tcx.sess, span, E0698, "type inside {} must be known in this context", kind,
|
||||
);
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
//! where both the regions are anonymous.
|
||||
|
||||
use crate::infer::error_reporting::nice_region_error::NiceRegionError;
|
||||
use crate::infer::error_reporting::nice_region_error::util::AnonymousArgInfo;
|
||||
use crate::infer::error_reporting::nice_region_error::util::AnonymousParamInfo;
|
||||
use crate::util::common::ErrorReported;
|
||||
|
||||
impl<'a, 'tcx> NiceRegionError<'a, 'tcx> {
|
||||
|
@ -59,13 +59,13 @@ impl<'a, 'tcx> NiceRegionError<'a, 'tcx> {
|
|||
let ty_sub = self.find_anon_type(sub, &bregion_sub)?;
|
||||
|
||||
debug!(
|
||||
"try_report_anon_anon_conflict: found_arg1={:?} sup={:?} br1={:?}",
|
||||
"try_report_anon_anon_conflict: found_param1={:?} sup={:?} br1={:?}",
|
||||
ty_sub,
|
||||
sup,
|
||||
bregion_sup
|
||||
);
|
||||
debug!(
|
||||
"try_report_anon_anon_conflict: found_arg2={:?} sub={:?} br2={:?}",
|
||||
"try_report_anon_anon_conflict: found_param2={:?} sub={:?} br2={:?}",
|
||||
ty_sup,
|
||||
sub,
|
||||
bregion_sub
|
||||
|
@ -74,24 +74,24 @@ impl<'a, 'tcx> NiceRegionError<'a, 'tcx> {
|
|||
let (ty_sup, ty_fndecl_sup) = ty_sup;
|
||||
let (ty_sub, ty_fndecl_sub) = ty_sub;
|
||||
|
||||
let AnonymousArgInfo {
|
||||
arg: anon_arg_sup, ..
|
||||
} = self.find_arg_with_region(sup, sup)?;
|
||||
let AnonymousArgInfo {
|
||||
arg: anon_arg_sub, ..
|
||||
} = self.find_arg_with_region(sub, sub)?;
|
||||
let AnonymousParamInfo {
|
||||
param: anon_param_sup, ..
|
||||
} = self.find_param_with_region(sup, sup)?;
|
||||
let AnonymousParamInfo {
|
||||
param: anon_param_sub, ..
|
||||
} = self.find_param_with_region(sub, sub)?;
|
||||
|
||||
let sup_is_ret_type =
|
||||
self.is_return_type_anon(scope_def_id_sup, bregion_sup, ty_fndecl_sup);
|
||||
let sub_is_ret_type =
|
||||
self.is_return_type_anon(scope_def_id_sub, bregion_sub, ty_fndecl_sub);
|
||||
|
||||
let span_label_var1 = match anon_arg_sup.pat.simple_ident() {
|
||||
let span_label_var1 = match anon_param_sup.pat.simple_ident() {
|
||||
Some(simple_ident) => format!(" from `{}`", simple_ident),
|
||||
None => String::new(),
|
||||
};
|
||||
|
||||
let span_label_var2 = match anon_arg_sub.pat.simple_ident() {
|
||||
let span_label_var2 = match anon_param_sub.pat.simple_ident() {
|
||||
Some(simple_ident) => format!(" into `{}`", simple_ident),
|
||||
None => String::new(),
|
||||
};
|
||||
|
|
|
@ -6,7 +6,7 @@ use crate::ty;
|
|||
use errors::{Applicability, DiagnosticBuilder};
|
||||
|
||||
impl<'a, 'tcx> NiceRegionError<'a, 'tcx> {
|
||||
/// When given a `ConcreteFailure` for a function with arguments containing a named region and
|
||||
/// When given a `ConcreteFailure` for a function with parameters containing a named region and
|
||||
/// an anonymous region, emit an descriptive diagnostic error.
|
||||
pub(super) fn try_report_named_anon_conflict(&self) -> Option<DiagnosticBuilder<'a>> {
|
||||
let (span, sub, sup) = self.get_regions();
|
||||
|
@ -24,23 +24,23 @@ impl<'a, 'tcx> NiceRegionError<'a, 'tcx> {
|
|||
// only introduced anonymous regions in parameters) as well as a
|
||||
// version new_ty of its type where the anonymous region is replaced
|
||||
// with the named one.//scope_def_id
|
||||
let (named, anon, anon_arg_info, region_info) = if self.is_named_region(sub)
|
||||
let (named, anon, anon_param_info, region_info) = if self.is_named_region(sub)
|
||||
&& self.tcx().is_suitable_region(sup).is_some()
|
||||
&& self.find_arg_with_region(sup, sub).is_some()
|
||||
&& self.find_param_with_region(sup, sub).is_some()
|
||||
{
|
||||
(
|
||||
sub,
|
||||
sup,
|
||||
self.find_arg_with_region(sup, sub).unwrap(),
|
||||
self.find_param_with_region(sup, sub).unwrap(),
|
||||
self.tcx().is_suitable_region(sup).unwrap(),
|
||||
)
|
||||
} else if self.is_named_region(sup) && self.tcx().is_suitable_region(sub).is_some()
|
||||
&& self.find_arg_with_region(sub, sup).is_some()
|
||||
&& self.find_param_with_region(sub, sup).is_some()
|
||||
{
|
||||
(
|
||||
sup,
|
||||
sub,
|
||||
self.find_arg_with_region(sub, sup).unwrap(),
|
||||
self.find_param_with_region(sub, sup).unwrap(),
|
||||
self.tcx().is_suitable_region(sub).unwrap(),
|
||||
)
|
||||
} else {
|
||||
|
@ -49,20 +49,20 @@ impl<'a, 'tcx> NiceRegionError<'a, 'tcx> {
|
|||
|
||||
debug!("try_report_named_anon_conflict: named = {:?}", named);
|
||||
debug!(
|
||||
"try_report_named_anon_conflict: anon_arg_info = {:?}",
|
||||
anon_arg_info
|
||||
"try_report_named_anon_conflict: anon_param_info = {:?}",
|
||||
anon_param_info
|
||||
);
|
||||
debug!(
|
||||
"try_report_named_anon_conflict: region_info = {:?}",
|
||||
region_info
|
||||
);
|
||||
|
||||
let (arg, new_ty, new_ty_span, br, is_first, scope_def_id, is_impl_item) = (
|
||||
anon_arg_info.arg,
|
||||
anon_arg_info.arg_ty,
|
||||
anon_arg_info.arg_ty_span,
|
||||
anon_arg_info.bound_region,
|
||||
anon_arg_info.is_first,
|
||||
let (param, new_ty, new_ty_span, br, is_first, scope_def_id, is_impl_item) = (
|
||||
anon_param_info.param,
|
||||
anon_param_info.param_ty,
|
||||
anon_param_info.param_ty_span,
|
||||
anon_param_info.bound_region,
|
||||
anon_param_info.is_first,
|
||||
region_info.def_id,
|
||||
region_info.is_impl_item,
|
||||
);
|
||||
|
@ -95,7 +95,7 @@ impl<'a, 'tcx> NiceRegionError<'a, 'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
let (error_var, span_label_var) = match arg.pat.simple_ident() {
|
||||
let (error_var, span_label_var) = match param.pat.simple_ident() {
|
||||
Some(simple_ident) => (
|
||||
format!("the type of `{}`", simple_ident),
|
||||
format!("the type of `{}`", simple_ident),
|
||||
|
|
|
@ -10,37 +10,37 @@ use syntax_pos::Span;
|
|||
// The struct contains the information about the anonymous region
|
||||
// we are searching for.
|
||||
#[derive(Debug)]
|
||||
pub(super) struct AnonymousArgInfo<'tcx> {
|
||||
// the argument corresponding to the anonymous region
|
||||
pub arg: &'tcx hir::Arg,
|
||||
// the type corresponding to the anonymopus region argument
|
||||
pub arg_ty: Ty<'tcx>,
|
||||
pub(super) struct AnonymousParamInfo<'tcx> {
|
||||
// the parameter corresponding to the anonymous region
|
||||
pub param: &'tcx hir::Param,
|
||||
// the type corresponding to the anonymopus region parameter
|
||||
pub param_ty: Ty<'tcx>,
|
||||
// the ty::BoundRegion corresponding to the anonymous region
|
||||
pub bound_region: ty::BoundRegion,
|
||||
// arg_ty_span contains span of argument type
|
||||
pub arg_ty_span : Span,
|
||||
// param_ty_span contains span of parameter type
|
||||
pub param_ty_span : Span,
|
||||
// corresponds to id the argument is the first parameter
|
||||
// in the declaration
|
||||
pub is_first: bool,
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> NiceRegionError<'a, 'tcx> {
|
||||
// This method walks the Type of the function body arguments using
|
||||
// This method walks the Type of the function body parameters using
|
||||
// `fold_regions()` function and returns the
|
||||
// &hir::Arg of the function argument corresponding to the anonymous
|
||||
// &hir::Param of the function parameter corresponding to the anonymous
|
||||
// region and the Ty corresponding to the named region.
|
||||
// Currently only the case where the function declaration consists of
|
||||
// one named region and one anonymous region is handled.
|
||||
// Consider the example `fn foo<'a>(x: &'a i32, y: &i32) -> &'a i32`
|
||||
// Here, we would return the hir::Arg for y, we return the type &'a
|
||||
// Here, we would return the hir::Param for y, we return the type &'a
|
||||
// i32, which is the type of y but with the anonymous region replaced
|
||||
// with 'a, the corresponding bound region and is_first which is true if
|
||||
// the hir::Arg is the first argument in the function declaration.
|
||||
pub(super) fn find_arg_with_region(
|
||||
// the hir::Param is the first parameter in the function declaration.
|
||||
pub(super) fn find_param_with_region(
|
||||
&self,
|
||||
anon_region: Region<'tcx>,
|
||||
replace_region: Region<'tcx>,
|
||||
) -> Option<AnonymousArgInfo<'_>> {
|
||||
) -> Option<AnonymousParamInfo<'_>> {
|
||||
let (id, bound_region) = match *anon_region {
|
||||
ty::ReFree(ref free_region) => (free_region.scope, free_region.bound_region),
|
||||
ty::ReEarlyBound(ebr) => (
|
||||
|
@ -57,16 +57,16 @@ impl<'a, 'tcx> NiceRegionError<'a, 'tcx> {
|
|||
let owner_id = hir.body_owner(body_id);
|
||||
let fn_decl = hir.fn_decl_by_hir_id(owner_id).unwrap();
|
||||
if let Some(tables) = self.tables {
|
||||
body.arguments
|
||||
body.params
|
||||
.iter()
|
||||
.enumerate()
|
||||
.filter_map(|(index, arg)| {
|
||||
.filter_map(|(index, param)| {
|
||||
// May return None; sometimes the tables are not yet populated.
|
||||
let ty_hir_id = fn_decl.inputs[index].hir_id;
|
||||
let arg_ty_span = hir.span(ty_hir_id);
|
||||
let ty = tables.node_type_opt(arg.hir_id)?;
|
||||
let param_ty_span = hir.span(ty_hir_id);
|
||||
let ty = tables.node_type_opt(param.hir_id)?;
|
||||
let mut found_anon_region = false;
|
||||
let new_arg_ty = self.tcx().fold_regions(&ty, &mut false, |r, _| {
|
||||
let new_param_ty = self.tcx().fold_regions(&ty, &mut false, |r, _| {
|
||||
if *r == *anon_region {
|
||||
found_anon_region = true;
|
||||
replace_region
|
||||
|
@ -76,10 +76,10 @@ impl<'a, 'tcx> NiceRegionError<'a, 'tcx> {
|
|||
});
|
||||
if found_anon_region {
|
||||
let is_first = index == 0;
|
||||
Some(AnonymousArgInfo {
|
||||
arg: arg,
|
||||
arg_ty: new_arg_ty,
|
||||
arg_ty_span : arg_ty_span,
|
||||
Some(AnonymousParamInfo {
|
||||
param: param,
|
||||
param_ty: new_param_ty,
|
||||
param_ty_span : param_ty_span,
|
||||
bound_region: bound_region,
|
||||
is_first: is_first,
|
||||
})
|
||||
|
|
|
@ -1460,7 +1460,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
let copy_def_id = self.tcx.require_lang_item(lang_items::CopyTraitLangItem);
|
||||
let copy_def_id = self.tcx.require_lang_item(lang_items::CopyTraitLangItem, None);
|
||||
|
||||
// this can get called from typeck (by euv), and moves_by_default
|
||||
// rightly refuses to work with inference variables, but
|
||||
|
|
|
@ -62,6 +62,7 @@
|
|||
#![feature(log_syntax)]
|
||||
#![feature(mem_take)]
|
||||
#![feature(associated_type_bounds)]
|
||||
#![feature(rustc_attrs)]
|
||||
|
||||
#![recursion_limit="512"]
|
||||
|
||||
|
@ -109,6 +110,7 @@ pub mod middle {
|
|||
pub mod cstore;
|
||||
pub mod dead;
|
||||
pub mod dependency_format;
|
||||
pub mod diagnostic_items;
|
||||
pub mod entry;
|
||||
pub mod exported_symbols;
|
||||
pub mod free_region;
|
||||
|
|
|
@ -966,10 +966,10 @@ for LateContextAndPass<'a, 'tcx, T> {
|
|||
self.context.tables = old_tables;
|
||||
}
|
||||
|
||||
fn visit_arg(&mut self, arg: &'tcx hir::Arg) {
|
||||
self.with_lint_attrs(arg.hir_id, &arg.attrs, |cx| {
|
||||
lint_callback!(cx, check_arg, arg);
|
||||
hir_visit::walk_arg(cx, arg);
|
||||
fn visit_param(&mut self, param: &'tcx hir::Param) {
|
||||
self.with_lint_attrs(param.hir_id, ¶m.attrs, |cx| {
|
||||
lint_callback!(cx, check_param, param);
|
||||
hir_visit::walk_param(cx, param);
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -1040,13 +1040,13 @@ for LateContextAndPass<'a, 'tcx, T> {
|
|||
|
||||
fn visit_variant_data(&mut self,
|
||||
s: &'tcx hir::VariantData,
|
||||
name: ast::Name,
|
||||
g: &'tcx hir::Generics,
|
||||
item_id: hir::HirId,
|
||||
_: ast::Name,
|
||||
_: &'tcx hir::Generics,
|
||||
_: hir::HirId,
|
||||
_: Span) {
|
||||
lint_callback!(self, check_struct_def, s, name, g, item_id);
|
||||
lint_callback!(self, check_struct_def, s);
|
||||
hir_visit::walk_struct_def(self, s);
|
||||
lint_callback!(self, check_struct_def_post, s, name, g, item_id);
|
||||
lint_callback!(self, check_struct_def_post, s);
|
||||
}
|
||||
|
||||
fn visit_struct_field(&mut self, s: &'tcx hir::StructField) {
|
||||
|
@ -1061,9 +1061,9 @@ for LateContextAndPass<'a, 'tcx, T> {
|
|||
g: &'tcx hir::Generics,
|
||||
item_id: hir::HirId) {
|
||||
self.with_lint_attrs(v.id, &v.attrs, |cx| {
|
||||
lint_callback!(cx, check_variant, v, g);
|
||||
lint_callback!(cx, check_variant, v);
|
||||
hir_visit::walk_variant(cx, v, g, item_id);
|
||||
lint_callback!(cx, check_variant_post, v, g);
|
||||
lint_callback!(cx, check_variant_post, v);
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -1163,10 +1163,10 @@ for LateContextAndPass<'a, 'tcx, T> {
|
|||
}
|
||||
|
||||
impl<'a, T: EarlyLintPass> ast_visit::Visitor<'a> for EarlyContextAndPass<'a, T> {
|
||||
fn visit_arg(&mut self, arg: &'a ast::Arg) {
|
||||
self.with_lint_attrs(arg.id, &arg.attrs, |cx| {
|
||||
run_early_pass!(cx, check_arg, arg);
|
||||
ast_visit::walk_arg(cx, arg);
|
||||
fn visit_param(&mut self, param: &'a ast::Param) {
|
||||
self.with_lint_attrs(param.id, ¶m.attrs, |cx| {
|
||||
run_early_pass!(cx, check_param, param);
|
||||
ast_visit::walk_param(cx, param);
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -1214,18 +1214,13 @@ impl<'a, T: EarlyLintPass> ast_visit::Visitor<'a> for EarlyContextAndPass<'a, T>
|
|||
run_early_pass!(self, check_fn_post, fk, decl, span, id);
|
||||
}
|
||||
|
||||
fn visit_variant_data(&mut self,
|
||||
s: &'a ast::VariantData,
|
||||
ident: ast::Ident,
|
||||
g: &'a ast::Generics,
|
||||
item_id: ast::NodeId,
|
||||
_: Span) {
|
||||
run_early_pass!(self, check_struct_def, s, ident, g, item_id);
|
||||
fn visit_variant_data(&mut self, s: &'a ast::VariantData) {
|
||||
run_early_pass!(self, check_struct_def, s);
|
||||
if let Some(ctor_hir_id) = s.ctor_id() {
|
||||
self.check_id(ctor_hir_id);
|
||||
}
|
||||
ast_visit::walk_struct_def(self, s);
|
||||
run_early_pass!(self, check_struct_def_post, s, ident, g, item_id);
|
||||
run_early_pass!(self, check_struct_def_post, s);
|
||||
}
|
||||
|
||||
fn visit_struct_field(&mut self, s: &'a ast::StructField) {
|
||||
|
@ -1235,11 +1230,11 @@ impl<'a, T: EarlyLintPass> ast_visit::Visitor<'a> for EarlyContextAndPass<'a, T>
|
|||
})
|
||||
}
|
||||
|
||||
fn visit_variant(&mut self, v: &'a ast::Variant, g: &'a ast::Generics, item_id: ast::NodeId) {
|
||||
self.with_lint_attrs(item_id, &v.attrs, |cx| {
|
||||
run_early_pass!(cx, check_variant, v, g);
|
||||
ast_visit::walk_variant(cx, v, g, item_id);
|
||||
run_early_pass!(cx, check_variant_post, v, g);
|
||||
fn visit_variant(&mut self, v: &'a ast::Variant) {
|
||||
self.with_lint_attrs(v.id, &v.attrs, |cx| {
|
||||
run_early_pass!(cx, check_variant, v);
|
||||
ast_visit::walk_variant(cx, v);
|
||||
run_early_pass!(cx, check_variant_post, v);
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
@ -159,29 +159,23 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for TyTyKind {
|
|||
}
|
||||
|
||||
fn lint_ty_kind_usage(cx: &LateContext<'_, '_>, segment: &PathSegment) -> bool {
|
||||
if segment.ident.name == sym::TyKind {
|
||||
if let Some(res) = segment.res {
|
||||
if let Some(did) = res.opt_def_id() {
|
||||
return cx.match_def_path(did, TYKIND_PATH);
|
||||
}
|
||||
if let Some(res) = segment.res {
|
||||
if let Some(did) = res.opt_def_id() {
|
||||
return cx.tcx.is_diagnostic_item(sym::TyKind, did);
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
const TYKIND_PATH: &[Symbol] = &[sym::rustc, sym::ty, sym::sty, sym::TyKind];
|
||||
const TY_PATH: &[Symbol] = &[sym::rustc, sym::ty, sym::Ty];
|
||||
const TYCTXT_PATH: &[Symbol] = &[sym::rustc, sym::ty, sym::context, sym::TyCtxt];
|
||||
|
||||
fn is_ty_or_ty_ctxt(cx: &LateContext<'_, '_>, ty: &Ty) -> Option<String> {
|
||||
match &ty.node {
|
||||
TyKind::Path(qpath) => {
|
||||
if let QPath::Resolved(_, path) = qpath {
|
||||
let did = path.res.opt_def_id()?;
|
||||
if cx.match_def_path(did, TY_PATH) {
|
||||
if cx.tcx.is_diagnostic_item(sym::Ty, did) {
|
||||
return Some(format!("Ty{}", gen_args(path.segments.last().unwrap())));
|
||||
} else if cx.match_def_path(did, TYCTXT_PATH) {
|
||||
} else if cx.tcx.is_diagnostic_item(sym::TyCtxt, did) {
|
||||
return Some(format!("TyCtxt{}", gen_args(path.segments.last().unwrap())));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -206,7 +206,7 @@ macro_rules! declare_lint_pass {
|
|||
macro_rules! late_lint_methods {
|
||||
($macro:path, $args:tt, [$hir:tt]) => (
|
||||
$macro!($args, [$hir], [
|
||||
fn check_arg(a: &$hir hir::Arg);
|
||||
fn check_param(a: &$hir hir::Param);
|
||||
fn check_body(a: &$hir hir::Body);
|
||||
fn check_body_post(a: &$hir hir::Body);
|
||||
fn check_name(a: Span, b: ast::Name);
|
||||
|
@ -248,21 +248,11 @@ macro_rules! late_lint_methods {
|
|||
fn check_trait_item_post(a: &$hir hir::TraitItem);
|
||||
fn check_impl_item(a: &$hir hir::ImplItem);
|
||||
fn check_impl_item_post(a: &$hir hir::ImplItem);
|
||||
fn check_struct_def(
|
||||
a: &$hir hir::VariantData,
|
||||
b: ast::Name,
|
||||
c: &$hir hir::Generics,
|
||||
d: hir::HirId
|
||||
);
|
||||
fn check_struct_def_post(
|
||||
a: &$hir hir::VariantData,
|
||||
b: ast::Name,
|
||||
c: &$hir hir::Generics,
|
||||
d: hir::HirId
|
||||
);
|
||||
fn check_struct_def(a: &$hir hir::VariantData);
|
||||
fn check_struct_def_post(a: &$hir hir::VariantData);
|
||||
fn check_struct_field(a: &$hir hir::StructField);
|
||||
fn check_variant(a: &$hir hir::Variant, b: &$hir hir::Generics);
|
||||
fn check_variant_post(a: &$hir hir::Variant, b: &$hir hir::Generics);
|
||||
fn check_variant(a: &$hir hir::Variant);
|
||||
fn check_variant_post(a: &$hir hir::Variant);
|
||||
fn check_lifetime(a: &$hir hir::Lifetime);
|
||||
fn check_path(a: &$hir hir::Path, b: hir::HirId);
|
||||
fn check_attribute(a: &$hir ast::Attribute);
|
||||
|
@ -359,7 +349,7 @@ macro_rules! declare_combined_late_lint_pass {
|
|||
macro_rules! early_lint_methods {
|
||||
($macro:path, $args:tt) => (
|
||||
$macro!($args, [
|
||||
fn check_arg(a: &ast::Arg);
|
||||
fn check_param(a: &ast::Param);
|
||||
fn check_ident(a: ast::Ident);
|
||||
fn check_crate(a: &ast::Crate);
|
||||
fn check_crate_post(a: &ast::Crate);
|
||||
|
@ -395,21 +385,11 @@ macro_rules! early_lint_methods {
|
|||
fn check_trait_item_post(a: &ast::TraitItem);
|
||||
fn check_impl_item(a: &ast::ImplItem);
|
||||
fn check_impl_item_post(a: &ast::ImplItem);
|
||||
fn check_struct_def(
|
||||
a: &ast::VariantData,
|
||||
b: ast::Ident,
|
||||
c: &ast::Generics,
|
||||
d: ast::NodeId
|
||||
);
|
||||
fn check_struct_def_post(
|
||||
a: &ast::VariantData,
|
||||
b: ast::Ident,
|
||||
c: &ast::Generics,
|
||||
d: ast::NodeId
|
||||
);
|
||||
fn check_struct_def(a: &ast::VariantData);
|
||||
fn check_struct_def_post(a: &ast::VariantData);
|
||||
fn check_struct_field(a: &ast::StructField);
|
||||
fn check_variant(a: &ast::Variant, b: &ast::Generics);
|
||||
fn check_variant_post(a: &ast::Variant, b: &ast::Generics);
|
||||
fn check_variant(a: &ast::Variant);
|
||||
fn check_variant_post(a: &ast::Variant);
|
||||
fn check_lifetime(a: &ast::Lifetime);
|
||||
fn check_path(a: &ast::Path, b: ast::NodeId);
|
||||
fn check_attribute(a: &ast::Attribute);
|
||||
|
@ -812,9 +792,9 @@ impl intravisit::Visitor<'tcx> for LintLevelMapBuilder<'tcx> {
|
|||
intravisit::NestedVisitorMap::All(&self.tcx.hir())
|
||||
}
|
||||
|
||||
fn visit_arg(&mut self, arg: &'tcx hir::Arg) {
|
||||
self.with_lint_attrs(arg.hir_id, &arg.attrs, |builder| {
|
||||
intravisit::walk_arg(builder, arg);
|
||||
fn visit_param(&mut self, param: &'tcx hir::Param) {
|
||||
self.with_lint_attrs(param.hir_id, ¶m.attrs, |builder| {
|
||||
intravisit::walk_param(builder, param);
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -30,10 +30,11 @@ fn should_explore(tcx: TyCtxt<'_>, hir_id: hir::HirId) -> bool {
|
|||
Some(Node::Item(..)) |
|
||||
Some(Node::ImplItem(..)) |
|
||||
Some(Node::ForeignItem(..)) |
|
||||
Some(Node::TraitItem(..)) =>
|
||||
true,
|
||||
_ =>
|
||||
false
|
||||
Some(Node::TraitItem(..)) |
|
||||
Some(Node::Variant(..)) |
|
||||
Some(Node::AnonConst(..)) |
|
||||
Some(Node::Pat(..)) => true,
|
||||
_ => false
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -75,7 +76,7 @@ impl<'a, 'tcx> MarkSymbolVisitor<'a, 'tcx> {
|
|||
self.check_def_id(res.def_id());
|
||||
}
|
||||
_ if self.in_pat => {},
|
||||
Res::PrimTy(..) | Res::SelfTy(..) | Res::SelfCtor(..) |
|
||||
Res::PrimTy(..) | Res::SelfCtor(..) |
|
||||
Res::Local(..) => {}
|
||||
Res::Def(DefKind::Ctor(CtorOf::Variant, ..), ctor_def_id) => {
|
||||
let variant_id = self.tcx.parent(ctor_def_id).unwrap();
|
||||
|
@ -92,6 +93,14 @@ impl<'a, 'tcx> MarkSymbolVisitor<'a, 'tcx> {
|
|||
self.check_def_id(variant_id);
|
||||
}
|
||||
}
|
||||
Res::SelfTy(t, i) => {
|
||||
if let Some(t) = t {
|
||||
self.check_def_id(t);
|
||||
}
|
||||
if let Some(i) = i {
|
||||
self.check_def_id(i);
|
||||
}
|
||||
}
|
||||
Res::ToolMod | Res::NonMacroAttr(..) | Res::Err => {}
|
||||
_ => {
|
||||
self.check_def_id(res.def_id());
|
||||
|
@ -271,7 +280,7 @@ impl<'a, 'tcx> Visitor<'tcx> for MarkSymbolVisitor<'a, 'tcx> {
|
|||
let res = self.tables.qpath_res(path, pat.hir_id);
|
||||
self.handle_field_pattern_match(pat, res, fields);
|
||||
}
|
||||
PatKind::Path(ref qpath @ hir::QPath::TypeRelative(..)) => {
|
||||
PatKind::Path(ref qpath) => {
|
||||
let res = self.tables.qpath_res(qpath, pat.hir_id);
|
||||
self.handle_res(res);
|
||||
}
|
||||
|
@ -298,6 +307,11 @@ impl<'a, 'tcx> Visitor<'tcx> for MarkSymbolVisitor<'a, 'tcx> {
|
|||
}
|
||||
intravisit::walk_ty(self, ty);
|
||||
}
|
||||
|
||||
fn visit_anon_const(&mut self, c: &'tcx hir::AnonConst) {
|
||||
self.live_symbols.insert(c.hir_id);
|
||||
intravisit::walk_anon_const(self, c);
|
||||
}
|
||||
}
|
||||
|
||||
fn has_allow_dead_code_or_lang_attr(
|
||||
|
|
123
src/librustc/middle/diagnostic_items.rs
Normal file
123
src/librustc/middle/diagnostic_items.rs
Normal file
|
@ -0,0 +1,123 @@
|
|||
//! Detecting diagnostic items.
|
||||
//!
|
||||
//! Diagnostic items are items that are not language-inherent, but can reasonably be expected to
|
||||
//! exist for diagnostic purposes. This allows diagnostic authors to refer to specific items
|
||||
//! directly, without having to guess module paths and crates.
|
||||
//! Examples are:
|
||||
//!
|
||||
//! * Traits like `Debug`, that have no bearing on language semantics
|
||||
//!
|
||||
//! * Compiler internal types like `Ty` and `TyCtxt`
|
||||
|
||||
use crate::hir::def_id::{DefId, LOCAL_CRATE};
|
||||
use crate::ty::TyCtxt;
|
||||
use crate::util::nodemap::FxHashMap;
|
||||
|
||||
use syntax::ast;
|
||||
use syntax::symbol::{Symbol, sym};
|
||||
use crate::hir::itemlikevisit::ItemLikeVisitor;
|
||||
use crate::hir;
|
||||
|
||||
struct DiagnosticItemCollector<'tcx> {
|
||||
// items from this crate
|
||||
items: FxHashMap<Symbol, DefId>,
|
||||
tcx: TyCtxt<'tcx>,
|
||||
}
|
||||
|
||||
impl<'v, 'tcx> ItemLikeVisitor<'v> for DiagnosticItemCollector<'tcx> {
|
||||
fn visit_item(&mut self, item: &hir::Item) {
|
||||
self.observe_item(&item.attrs, item.hir_id);
|
||||
}
|
||||
|
||||
fn visit_trait_item(&mut self, trait_item: &hir::TraitItem) {
|
||||
self.observe_item(&trait_item.attrs, trait_item.hir_id);
|
||||
}
|
||||
|
||||
fn visit_impl_item(&mut self, impl_item: &hir::ImplItem) {
|
||||
self.observe_item(&impl_item.attrs, impl_item.hir_id);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> DiagnosticItemCollector<'tcx> {
|
||||
fn new(tcx: TyCtxt<'tcx>) -> DiagnosticItemCollector<'tcx> {
|
||||
DiagnosticItemCollector {
|
||||
tcx,
|
||||
items: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
fn observe_item(&mut self, attrs: &[ast::Attribute], hir_id: hir::HirId) {
|
||||
if let Some(name) = extract(attrs) {
|
||||
let def_id = self.tcx.hir().local_def_id(hir_id);
|
||||
// insert into our table
|
||||
collect_item(self.tcx, &mut self.items, name, def_id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn collect_item(
|
||||
tcx: TyCtxt<'_>,
|
||||
items: &mut FxHashMap<Symbol, DefId>,
|
||||
name: Symbol,
|
||||
item_def_id: DefId,
|
||||
) {
|
||||
// Check for duplicates.
|
||||
if let Some(original_def_id) = items.insert(name, item_def_id) {
|
||||
if original_def_id != item_def_id {
|
||||
let mut err = match tcx.hir().span_if_local(item_def_id) {
|
||||
Some(span) => tcx.sess.struct_span_err(
|
||||
span,
|
||||
&format!("duplicate diagnostic item found: `{}`.", name)),
|
||||
None => tcx.sess.struct_err(&format!(
|
||||
"duplicate diagnostic item in crate `{}`: `{}`.",
|
||||
tcx.crate_name(item_def_id.krate),
|
||||
name)),
|
||||
};
|
||||
if let Some(span) = tcx.hir().span_if_local(original_def_id) {
|
||||
span_note!(&mut err, span, "first defined here.");
|
||||
} else {
|
||||
err.note(&format!("first defined in crate `{}`.",
|
||||
tcx.crate_name(original_def_id.krate)));
|
||||
}
|
||||
err.emit();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Extract the first `rustc_diagnostic_item = "$name"` out of a list of attributes.
|
||||
fn extract(attrs: &[ast::Attribute]) -> Option<Symbol> {
|
||||
attrs.iter().find_map(|attr| {
|
||||
if attr.check_name(sym::rustc_diagnostic_item) {
|
||||
attr.value_str()
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/// Traverse and collect the diagnostic items in the current
|
||||
pub fn collect<'tcx>(tcx: TyCtxt<'tcx>) -> &'tcx FxHashMap<Symbol, DefId> {
|
||||
// Initialize the collector.
|
||||
let mut collector = DiagnosticItemCollector::new(tcx);
|
||||
|
||||
// Collect diagnostic items in this crate.
|
||||
tcx.hir().krate().visit_all_item_likes(&mut collector);
|
||||
|
||||
tcx.arena.alloc(collector.items)
|
||||
}
|
||||
|
||||
|
||||
/// Traverse and collect all the diagnostic items in all crates.
|
||||
pub fn collect_all<'tcx>(tcx: TyCtxt<'tcx>) -> &'tcx FxHashMap<Symbol, DefId> {
|
||||
// Initialize the collector.
|
||||
let mut collector = FxHashMap::default();
|
||||
|
||||
// Collect diagnostic items in other crates.
|
||||
for &cnum in tcx.crates().iter().chain(std::iter::once(&LOCAL_CRATE)) {
|
||||
for (&name, &def_id) in tcx.diagnostic_items(cnum).iter() {
|
||||
collect_item(tcx, &mut collector, name, def_id);
|
||||
}
|
||||
}
|
||||
|
||||
tcx.arena.alloc(collector)
|
||||
}
|
|
@ -313,9 +313,9 @@ impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx> {
|
|||
pub fn consume_body(&mut self, body: &hir::Body) {
|
||||
debug!("consume_body(body={:?})", body);
|
||||
|
||||
for arg in &body.arguments {
|
||||
let arg_ty = return_if_err!(self.mc.pat_ty_adjusted(&arg.pat));
|
||||
debug!("consume_body: arg_ty = {:?}", arg_ty);
|
||||
for param in &body.params {
|
||||
let param_ty = return_if_err!(self.mc.pat_ty_adjusted(¶m.pat));
|
||||
debug!("consume_body: param_ty = {:?}", param_ty);
|
||||
|
||||
let fn_body_scope_r =
|
||||
self.tcx().mk_region(ty::ReScope(
|
||||
|
@ -323,13 +323,13 @@ impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx> {
|
|||
id: body.value.hir_id.local_id,
|
||||
data: region::ScopeData::Node
|
||||
}));
|
||||
let arg_cmt = Rc::new(self.mc.cat_rvalue(
|
||||
arg.hir_id,
|
||||
arg.pat.span,
|
||||
fn_body_scope_r, // Args live only as long as the fn body.
|
||||
arg_ty));
|
||||
let param_cmt = Rc::new(self.mc.cat_rvalue(
|
||||
param.hir_id,
|
||||
param.pat.span,
|
||||
fn_body_scope_r, // Parameters live only as long as the fn body.
|
||||
param_ty));
|
||||
|
||||
self.walk_irrefutable_pat(arg_cmt, &arg.pat);
|
||||
self.walk_irrefutable_pat(param_cmt, ¶m.pat);
|
||||
}
|
||||
|
||||
self.consume_expr(&body.value);
|
||||
|
|
|
@ -367,8 +367,6 @@ language_item_table! {
|
|||
|
||||
MaybeUninitLangItem, "maybe_uninit", maybe_uninit, Target::Union;
|
||||
|
||||
DebugTraitLangItem, "debug_trait", debug_trait, Target::Trait;
|
||||
|
||||
// Align offset for stride != 1, must not panic.
|
||||
AlignOffsetLangItem, "align_offset", align_offset_fn, Target::Fn;
|
||||
|
||||
|
@ -381,9 +379,13 @@ language_item_table! {
|
|||
impl<'tcx> TyCtxt<'tcx> {
|
||||
/// Returns the `DefId` for a given `LangItem`.
|
||||
/// If not found, fatally abort compilation.
|
||||
pub fn require_lang_item(&self, lang_item: LangItem) -> DefId {
|
||||
pub fn require_lang_item(&self, lang_item: LangItem, span: Option<Span>) -> DefId {
|
||||
self.lang_items().require(lang_item).unwrap_or_else(|msg| {
|
||||
self.sess.fatal(&msg)
|
||||
if let Some(span) = span {
|
||||
self.sess.span_fatal(span, &msg)
|
||||
} else {
|
||||
self.sess.fatal(&msg)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -242,7 +242,7 @@ struct LocalInfo {
|
|||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
enum VarKind {
|
||||
Arg(HirId, ast::Name),
|
||||
Param(HirId, ast::Name),
|
||||
Local(LocalInfo),
|
||||
CleanExit
|
||||
}
|
||||
|
@ -298,7 +298,7 @@ impl IrMaps<'tcx> {
|
|||
self.num_vars += 1;
|
||||
|
||||
match vk {
|
||||
Local(LocalInfo { id: node_id, .. }) | Arg(node_id, _) => {
|
||||
Local(LocalInfo { id: node_id, .. }) | Param(node_id, _) => {
|
||||
self.variable_map.insert(node_id, v);
|
||||
},
|
||||
CleanExit => {}
|
||||
|
@ -320,7 +320,7 @@ impl IrMaps<'tcx> {
|
|||
|
||||
fn variable_name(&self, var: Variable) -> String {
|
||||
match self.var_kinds[var.get()] {
|
||||
Local(LocalInfo { name, .. }) | Arg(_, name) => {
|
||||
Local(LocalInfo { name, .. }) | Param(_, name) => {
|
||||
name.to_string()
|
||||
},
|
||||
CleanExit => "<clean-exit>".to_owned()
|
||||
|
@ -330,7 +330,7 @@ impl IrMaps<'tcx> {
|
|||
fn variable_is_shorthand(&self, var: Variable) -> bool {
|
||||
match self.var_kinds[var.get()] {
|
||||
Local(LocalInfo { is_shorthand, .. }) => is_shorthand,
|
||||
Arg(..) | CleanExit => false
|
||||
Param(..) | CleanExit => false
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -371,13 +371,13 @@ fn visit_fn<'tcx>(
|
|||
|
||||
let body = ir.tcx.hir().body(body_id);
|
||||
|
||||
for arg in &body.arguments {
|
||||
let is_shorthand = match arg.pat.node {
|
||||
for param in &body.params {
|
||||
let is_shorthand = match param.pat.node {
|
||||
crate::hir::PatKind::Struct(..) => true,
|
||||
_ => false,
|
||||
};
|
||||
arg.pat.each_binding(|_bm, hir_id, _x, ident| {
|
||||
debug!("adding argument {:?}", hir_id);
|
||||
param.pat.each_binding(|_bm, hir_id, _x, ident| {
|
||||
debug!("adding parameters {:?}", hir_id);
|
||||
let var = if is_shorthand {
|
||||
Local(LocalInfo {
|
||||
id: hir_id,
|
||||
|
@ -385,7 +385,7 @@ fn visit_fn<'tcx>(
|
|||
is_shorthand: true,
|
||||
})
|
||||
} else {
|
||||
Arg(hir_id, ident.name)
|
||||
Param(hir_id, ident.name)
|
||||
};
|
||||
fn_maps.add_variable(var);
|
||||
})
|
||||
|
@ -1525,8 +1525,8 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> {
|
|||
}
|
||||
|
||||
fn warn_about_unused_args(&self, body: &hir::Body, entry_ln: LiveNode) {
|
||||
for arg in &body.arguments {
|
||||
arg.pat.each_binding(|_bm, hir_id, _, ident| {
|
||||
for param in &body.params {
|
||||
param.pat.each_binding(|_bm, hir_id, _, ident| {
|
||||
let sp = ident.span;
|
||||
let var = self.variable(hir_id, sp);
|
||||
// Ignore unused self.
|
||||
|
|
|
@ -1383,8 +1383,8 @@ impl<'tcx> Visitor<'tcx> for RegionResolutionVisitor<'tcx> {
|
|||
|
||||
// The arguments and `self` are parented to the fn.
|
||||
self.cx.var_parent = self.cx.parent.take();
|
||||
for argument in &body.arguments {
|
||||
self.visit_pat(&argument.pat);
|
||||
for param in &body.params {
|
||||
self.visit_pat(¶m.pat);
|
||||
}
|
||||
|
||||
// The body of the every fn is a root scope.
|
||||
|
|
|
@ -2557,7 +2557,7 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> {
|
|||
} = info;
|
||||
|
||||
let help_name = if let Some(ident) = parent.and_then(|body| {
|
||||
self.tcx.hir().body(body).arguments[index].pat.simple_ident()
|
||||
self.tcx.hir().body(body).params[index].pat.simple_ident()
|
||||
}) {
|
||||
format!("`{}`", ident)
|
||||
} else {
|
||||
|
|
|
@ -13,18 +13,33 @@ use rustc_data_structures::sorted_map::SortedMap;
|
|||
use rustc_target::abi::HasDataLayout;
|
||||
use std::borrow::Cow;
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable)]
|
||||
// NOTE: When adding new fields, make sure to adjust the Snapshot impl in
|
||||
// `src/librustc_mir/interpret/snapshot.rs`.
|
||||
#[derive(
|
||||
Clone,
|
||||
Debug,
|
||||
Eq,
|
||||
PartialEq,
|
||||
PartialOrd,
|
||||
Ord,
|
||||
Hash,
|
||||
RustcEncodable,
|
||||
RustcDecodable,
|
||||
HashStable,
|
||||
)]
|
||||
pub struct Allocation<Tag=(),Extra=()> {
|
||||
/// The actual bytes of the allocation.
|
||||
/// Note that the bytes of a pointer represent the offset of the pointer
|
||||
pub bytes: Vec<u8>,
|
||||
/// Note that the bytes of a pointer represent the offset of the pointer.
|
||||
bytes: Vec<u8>,
|
||||
/// Maps from byte addresses to extra data for each pointer.
|
||||
/// Only the first byte of a pointer is inserted into the map; i.e.,
|
||||
/// every entry in this map applies to `pointer_size` consecutive bytes starting
|
||||
/// at the given offset.
|
||||
pub relocations: Relocations<Tag>,
|
||||
/// Denotes undefined memory. Reading from undefined memory is forbidden in miri
|
||||
pub undef_mask: UndefMask,
|
||||
relocations: Relocations<Tag>,
|
||||
/// Denotes which part of this allocation is initialized.
|
||||
undef_mask: UndefMask,
|
||||
/// The size of the allocation. Currently, must always equal `bytes.len()`.
|
||||
pub size: Size,
|
||||
/// The alignment of the allocation to detect unaligned reads.
|
||||
pub align: Align,
|
||||
/// Whether the allocation is mutable.
|
||||
|
@ -85,11 +100,12 @@ impl<Tag> Allocation<Tag> {
|
|||
/// Creates a read-only allocation initialized by the given bytes
|
||||
pub fn from_bytes<'a>(slice: impl Into<Cow<'a, [u8]>>, align: Align) -> Self {
|
||||
let bytes = slice.into().into_owned();
|
||||
let undef_mask = UndefMask::new(Size::from_bytes(bytes.len() as u64), true);
|
||||
let size = Size::from_bytes(bytes.len() as u64);
|
||||
Self {
|
||||
bytes,
|
||||
relocations: Relocations::new(),
|
||||
undef_mask,
|
||||
undef_mask: UndefMask::new(size, true),
|
||||
size,
|
||||
align,
|
||||
mutability: Mutability::Immutable,
|
||||
extra: (),
|
||||
|
@ -106,6 +122,7 @@ impl<Tag> Allocation<Tag> {
|
|||
bytes: vec![0; size.bytes() as usize],
|
||||
relocations: Relocations::new(),
|
||||
undef_mask: UndefMask::new(size, false),
|
||||
size,
|
||||
align,
|
||||
mutability: Mutability::Mutable,
|
||||
extra: (),
|
||||
|
@ -113,6 +130,31 @@ impl<Tag> Allocation<Tag> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Raw accessors. Provide access to otherwise private bytes.
|
||||
impl<Tag, Extra> Allocation<Tag, Extra> {
|
||||
pub fn len(&self) -> usize {
|
||||
self.size.bytes() as usize
|
||||
}
|
||||
|
||||
/// Looks at a slice which may describe undefined bytes or describe a relocation. This differs
|
||||
/// from `get_bytes_with_undef_and_ptr` in that it does no relocation checks (even on the
|
||||
/// edges) at all. It further ignores `AllocationExtra` callbacks.
|
||||
/// This must not be used for reads affecting the interpreter execution.
|
||||
pub fn inspect_with_undef_and_ptr_outside_interpreter(&self, range: Range<usize>) -> &[u8] {
|
||||
&self.bytes[range]
|
||||
}
|
||||
|
||||
/// Returns the undef mask.
|
||||
pub fn undef_mask(&self) -> &UndefMask {
|
||||
&self.undef_mask
|
||||
}
|
||||
|
||||
/// Returns the relocation list.
|
||||
pub fn relocations(&self) -> &Relocations<Tag> {
|
||||
&self.relocations
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> rustc_serialize::UseSpecializedDecodable for &'tcx Allocation {}
|
||||
|
||||
/// Byte accessors
|
||||
|
@ -132,9 +174,9 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
|
|||
);
|
||||
let end = end.bytes() as usize;
|
||||
assert!(
|
||||
end <= self.bytes.len(),
|
||||
end <= self.len(),
|
||||
"Out-of-bounds access at offset {}, size {} in allocation of size {}",
|
||||
offset.bytes(), size.bytes(), self.bytes.len()
|
||||
offset.bytes(), size.bytes(), self.len()
|
||||
);
|
||||
(offset.bytes() as usize)..end
|
||||
}
|
||||
|
@ -422,7 +464,7 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
|
|||
/// Relocations
|
||||
impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
|
||||
/// Returns all relocations overlapping with the given ptr-offset pair.
|
||||
pub fn relocations(
|
||||
pub fn get_relocations(
|
||||
&self,
|
||||
cx: &impl HasDataLayout,
|
||||
ptr: Pointer<Tag>,
|
||||
|
@ -443,7 +485,7 @@ impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
|
|||
ptr: Pointer<Tag>,
|
||||
size: Size,
|
||||
) -> InterpResult<'tcx> {
|
||||
if self.relocations(cx, ptr, size).is_empty() {
|
||||
if self.get_relocations(cx, ptr, size).is_empty() {
|
||||
Ok(())
|
||||
} else {
|
||||
throw_unsup!(ReadPointerAsBytes)
|
||||
|
@ -465,7 +507,7 @@ impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
|
|||
// Find the start and end of the given range and its outermost relocations.
|
||||
let (first, last) = {
|
||||
// Find all relocations overlapping the given range.
|
||||
let relocations = self.relocations(cx, ptr, size);
|
||||
let relocations = self.get_relocations(cx, ptr, size);
|
||||
if relocations.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
@ -536,6 +578,94 @@ impl<'tcx, Tag, Extra> Allocation<Tag, Extra> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Run-length encoding of the undef mask.
|
||||
/// Used to copy parts of a mask multiple times to another allocation.
|
||||
pub struct AllocationDefinedness {
|
||||
/// The definedness of the first range.
|
||||
initial: bool,
|
||||
/// The lengths of ranges that are run-length encoded.
|
||||
/// The definedness of the ranges alternate starting with `initial`.
|
||||
ranges: smallvec::SmallVec::<[u64; 1]>,
|
||||
}
|
||||
|
||||
/// Transferring the definedness mask to other allocations.
|
||||
impl<Tag, Extra> Allocation<Tag, Extra> {
|
||||
/// Creates a run-length encoding of the undef_mask.
|
||||
pub fn compress_undef_range(
|
||||
&self,
|
||||
src: Pointer<Tag>,
|
||||
size: Size,
|
||||
) -> AllocationDefinedness {
|
||||
// Since we are copying `size` bytes from `src` to `dest + i * size` (`for i in 0..repeat`),
|
||||
// a naive undef mask copying algorithm would repeatedly have to read the undef mask from
|
||||
// the source and write it to the destination. Even if we optimized the memory accesses,
|
||||
// we'd be doing all of this `repeat` times.
|
||||
// Therefor we precompute a compressed version of the undef mask of the source value and
|
||||
// then write it back `repeat` times without computing any more information from the source.
|
||||
|
||||
// a precomputed cache for ranges of defined/undefined bits
|
||||
// 0000010010001110 will become
|
||||
// [5, 1, 2, 1, 3, 3, 1]
|
||||
// where each element toggles the state
|
||||
|
||||
let mut ranges = smallvec::SmallVec::<[u64; 1]>::new();
|
||||
let initial = self.undef_mask.get(src.offset);
|
||||
let mut cur_len = 1;
|
||||
let mut cur = initial;
|
||||
|
||||
for i in 1..size.bytes() {
|
||||
// FIXME: optimize to bitshift the current undef block's bits and read the top bit
|
||||
if self.undef_mask.get(src.offset + Size::from_bytes(i)) == cur {
|
||||
cur_len += 1;
|
||||
} else {
|
||||
ranges.push(cur_len);
|
||||
cur_len = 1;
|
||||
cur = !cur;
|
||||
}
|
||||
}
|
||||
|
||||
ranges.push(cur_len);
|
||||
|
||||
AllocationDefinedness { ranges, initial, }
|
||||
}
|
||||
|
||||
/// Apply multiple instances of the run-length encoding to the undef_mask.
|
||||
pub fn mark_compressed_undef_range(
|
||||
&mut self,
|
||||
defined: &AllocationDefinedness,
|
||||
dest: Pointer<Tag>,
|
||||
size: Size,
|
||||
repeat: u64,
|
||||
) {
|
||||
// an optimization where we can just overwrite an entire range of definedness bits if
|
||||
// they are going to be uniformly `1` or `0`.
|
||||
if defined.ranges.len() <= 1 {
|
||||
self.undef_mask.set_range_inbounds(
|
||||
dest.offset,
|
||||
dest.offset + size * repeat,
|
||||
defined.initial,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
for mut j in 0..repeat {
|
||||
j *= size.bytes();
|
||||
j += dest.offset.bytes();
|
||||
let mut cur = defined.initial;
|
||||
for range in &defined.ranges {
|
||||
let old_j = j;
|
||||
j += range;
|
||||
self.undef_mask.set_range_inbounds(
|
||||
Size::from_bytes(old_j),
|
||||
Size::from_bytes(j),
|
||||
cur,
|
||||
);
|
||||
cur = !cur;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Relocations
|
||||
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable)]
|
||||
pub struct Relocations<Tag=(), Id=AllocId>(SortedMap<Size, (Tag, Id)>);
|
||||
|
@ -566,6 +696,59 @@ impl<Tag> DerefMut for Relocations<Tag> {
|
|||
}
|
||||
}
|
||||
|
||||
/// A partial, owned list of relocations to transfer into another allocation.
|
||||
pub struct AllocationRelocations<Tag> {
|
||||
relative_relocations: Vec<(Size, (Tag, AllocId))>,
|
||||
}
|
||||
|
||||
impl<Tag: Copy, Extra> Allocation<Tag, Extra> {
|
||||
pub fn prepare_relocation_copy(
|
||||
&self,
|
||||
cx: &impl HasDataLayout,
|
||||
src: Pointer<Tag>,
|
||||
size: Size,
|
||||
dest: Pointer<Tag>,
|
||||
length: u64,
|
||||
) -> AllocationRelocations<Tag> {
|
||||
let relocations = self.get_relocations(cx, src, size);
|
||||
if relocations.is_empty() {
|
||||
return AllocationRelocations { relative_relocations: Vec::new() };
|
||||
}
|
||||
|
||||
let mut new_relocations = Vec::with_capacity(relocations.len() * (length as usize));
|
||||
|
||||
for i in 0..length {
|
||||
new_relocations.extend(
|
||||
relocations
|
||||
.iter()
|
||||
.map(|&(offset, reloc)| {
|
||||
// compute offset for current repetition
|
||||
let dest_offset = dest.offset + (i * size);
|
||||
(
|
||||
// shift offsets from source allocation to destination allocation
|
||||
offset + dest_offset - src.offset,
|
||||
reloc,
|
||||
)
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
AllocationRelocations {
|
||||
relative_relocations: new_relocations,
|
||||
}
|
||||
}
|
||||
|
||||
/// Apply a relocation copy.
|
||||
/// The affected range, as defined in the parameters to `prepare_relocation_copy` is expected
|
||||
/// to be clear of relocations.
|
||||
pub fn mark_relocation_range(
|
||||
&mut self,
|
||||
relocations: AllocationRelocations<Tag>,
|
||||
) {
|
||||
self.relocations.insert_presorted(relocations.relative_relocations);
|
||||
}
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
// Undefined byte tracking
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
|
|
@ -108,11 +108,6 @@ pub struct Body<'tcx> {
|
|||
/// needn't) be tracked across crates.
|
||||
pub source_scope_local_data: ClearCrossCrate<IndexVec<SourceScope, SourceScopeLocalData>>,
|
||||
|
||||
/// Rvalues promoted from this function, such as borrows of constants.
|
||||
/// Each of them is the Body of a constant with the fn's type parameters
|
||||
/// in scope, but a separate set of locals.
|
||||
pub promoted: IndexVec<Promoted, Body<'tcx>>,
|
||||
|
||||
/// Yields type of the function, if it is a generator.
|
||||
pub yield_ty: Option<Ty<'tcx>>,
|
||||
|
||||
|
@ -174,7 +169,6 @@ impl<'tcx> Body<'tcx> {
|
|||
basic_blocks: IndexVec<BasicBlock, BasicBlockData<'tcx>>,
|
||||
source_scopes: IndexVec<SourceScope, SourceScopeData>,
|
||||
source_scope_local_data: ClearCrossCrate<IndexVec<SourceScope, SourceScopeLocalData>>,
|
||||
promoted: IndexVec<Promoted, Body<'tcx>>,
|
||||
yield_ty: Option<Ty<'tcx>>,
|
||||
local_decls: LocalDecls<'tcx>,
|
||||
user_type_annotations: CanonicalUserTypeAnnotations<'tcx>,
|
||||
|
@ -196,7 +190,6 @@ impl<'tcx> Body<'tcx> {
|
|||
basic_blocks,
|
||||
source_scopes,
|
||||
source_scope_local_data,
|
||||
promoted,
|
||||
yield_ty,
|
||||
generator_drop: None,
|
||||
generator_layout: None,
|
||||
|
@ -418,7 +411,6 @@ impl_stable_hash_for!(struct Body<'tcx> {
|
|||
basic_blocks,
|
||||
source_scopes,
|
||||
source_scope_local_data,
|
||||
promoted,
|
||||
yield_ty,
|
||||
generator_drop,
|
||||
generator_layout,
|
||||
|
@ -1555,7 +1547,7 @@ pub struct Statement<'tcx> {
|
|||
#[cfg(target_arch = "x86_64")]
|
||||
static_assert_size!(Statement<'_>, 56);
|
||||
|
||||
impl<'tcx> Statement<'tcx> {
|
||||
impl Statement<'_> {
|
||||
/// Changes a statement to a nop. This is both faster than deleting instructions and avoids
|
||||
/// invalidating statement indices in `Location`s.
|
||||
pub fn make_nop(&mut self) {
|
||||
|
@ -1677,7 +1669,7 @@ pub struct InlineAsm<'tcx> {
|
|||
pub inputs: Box<[(Span, Operand<'tcx>)]>,
|
||||
}
|
||||
|
||||
impl<'tcx> Debug for Statement<'tcx> {
|
||||
impl Debug for Statement<'_> {
|
||||
fn fmt(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
|
||||
use self::StatementKind::*;
|
||||
match self.kind {
|
||||
|
@ -1737,23 +1729,32 @@ pub enum PlaceBase<'tcx> {
|
|||
}
|
||||
|
||||
/// We store the normalized type to avoid requiring normalization when reading MIR
|
||||
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable)]
|
||||
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable)]
|
||||
pub struct Static<'tcx> {
|
||||
pub ty: Ty<'tcx>,
|
||||
pub kind: StaticKind,
|
||||
pub kind: StaticKind<'tcx>,
|
||||
/// The `DefId` of the item this static was declared in. For promoted values, usually, this is
|
||||
/// the same as the `DefId` of the `mir::Body` containing the `Place` this promoted appears in.
|
||||
/// However, after inlining, that might no longer be the case as inlined `Place`s are copied
|
||||
/// into the calling frame.
|
||||
pub def_id: DefId,
|
||||
}
|
||||
|
||||
#[derive(
|
||||
Clone, PartialEq, Eq, PartialOrd, Ord, Hash, HashStable, RustcEncodable, RustcDecodable,
|
||||
Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, HashStable, RustcEncodable, RustcDecodable,
|
||||
)]
|
||||
pub enum StaticKind {
|
||||
Promoted(Promoted),
|
||||
Static(DefId),
|
||||
pub enum StaticKind<'tcx> {
|
||||
/// Promoted references consist of an id (`Promoted`) and the substs necessary to monomorphize
|
||||
/// it. Usually, these substs are just the identity substs for the item. However, the inliner
|
||||
/// will adjust these substs when it inlines a function based on the substs at the callsite.
|
||||
Promoted(Promoted, SubstsRef<'tcx>),
|
||||
Static,
|
||||
}
|
||||
|
||||
impl_stable_hash_for!(struct Static<'tcx> {
|
||||
ty,
|
||||
kind
|
||||
kind,
|
||||
def_id
|
||||
});
|
||||
|
||||
/// The `Projection` data structure defines things of the form `base.x`, `*b` or `b[index]`.
|
||||
|
@ -2047,7 +2048,7 @@ impl<'p, 'tcx> Iterator for ProjectionsIter<'p, 'tcx> {
|
|||
|
||||
impl<'p, 'tcx> FusedIterator for ProjectionsIter<'p, 'tcx> {}
|
||||
|
||||
impl<'tcx> Debug for Place<'tcx> {
|
||||
impl Debug for Place<'_> {
|
||||
fn fmt(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
|
||||
self.iterate(|_place_base, place_projections| {
|
||||
// FIXME: remove this collect once we have migrated to slices
|
||||
|
@ -2114,10 +2115,12 @@ impl Debug for PlaceBase<'_> {
|
|||
fn fmt(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
|
||||
match *self {
|
||||
PlaceBase::Local(id) => write!(fmt, "{:?}", id),
|
||||
PlaceBase::Static(box self::Static { ty, kind: StaticKind::Static(def_id) }) => {
|
||||
PlaceBase::Static(box self::Static { ty, kind: StaticKind::Static, def_id }) => {
|
||||
write!(fmt, "({}: {:?})", ty::tls::with(|tcx| tcx.def_path_str(def_id)), ty)
|
||||
}
|
||||
PlaceBase::Static(box self::Static { ty, kind: StaticKind::Promoted(promoted) }) => {
|
||||
PlaceBase::Static(box self::Static {
|
||||
ty, kind: StaticKind::Promoted(promoted, _), def_id: _
|
||||
}) => {
|
||||
write!(fmt, "({:?}: {:?})", promoted, ty)
|
||||
}
|
||||
}
|
||||
|
@ -3032,7 +3035,6 @@ BraceStructTypeFoldableImpl! {
|
|||
basic_blocks,
|
||||
source_scopes,
|
||||
source_scope_local_data,
|
||||
promoted,
|
||||
yield_ty,
|
||||
generator_drop,
|
||||
generator_layout,
|
||||
|
@ -3226,13 +3228,63 @@ impl<'tcx> TypeFoldable<'tcx> for Terminator<'tcx> {
|
|||
impl<'tcx> TypeFoldable<'tcx> for Place<'tcx> {
|
||||
fn super_fold_with<F: TypeFolder<'tcx>>(&self, folder: &mut F) -> Self {
|
||||
Place {
|
||||
base: self.base.clone(),
|
||||
base: self.base.fold_with(folder),
|
||||
projection: self.projection.fold_with(folder),
|
||||
}
|
||||
}
|
||||
|
||||
fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
|
||||
self.projection.visit_with(visitor)
|
||||
self.base.visit_with(visitor) || self.projection.visit_with(visitor)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> TypeFoldable<'tcx> for PlaceBase<'tcx> {
|
||||
fn super_fold_with<F: TypeFolder<'tcx>>(&self, folder: &mut F) -> Self {
|
||||
match self {
|
||||
PlaceBase::Local(local) => PlaceBase::Local(local.fold_with(folder)),
|
||||
PlaceBase::Static(static_) => PlaceBase::Static(static_.fold_with(folder)),
|
||||
}
|
||||
}
|
||||
|
||||
fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
|
||||
match self {
|
||||
PlaceBase::Local(local) => local.visit_with(visitor),
|
||||
PlaceBase::Static(static_) => (**static_).visit_with(visitor),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> TypeFoldable<'tcx> for Static<'tcx> {
|
||||
fn super_fold_with<F: TypeFolder<'tcx>>(&self, folder: &mut F) -> Self {
|
||||
Static {
|
||||
ty: self.ty.fold_with(folder),
|
||||
kind: self.kind.fold_with(folder),
|
||||
def_id: self.def_id,
|
||||
}
|
||||
}
|
||||
|
||||
fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
|
||||
let Static { ty, kind, def_id: _ } = self;
|
||||
|
||||
ty.visit_with(visitor) || kind.visit_with(visitor)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> TypeFoldable<'tcx> for StaticKind<'tcx> {
|
||||
fn super_fold_with<F: TypeFolder<'tcx>>(&self, folder: &mut F) -> Self {
|
||||
match self {
|
||||
StaticKind::Promoted(promoted, substs) =>
|
||||
StaticKind::Promoted(promoted.fold_with(folder), substs.fold_with(folder)),
|
||||
StaticKind::Static => StaticKind::Static
|
||||
}
|
||||
}
|
||||
|
||||
fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
|
||||
match self {
|
||||
StaticKind::Promoted(promoted, substs) =>
|
||||
promoted.visit_with(visitor) || substs.visit_with(visitor),
|
||||
StaticKind::Static => { false }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -708,7 +708,7 @@ macro_rules! make_mir_visitor {
|
|||
PlaceBase::Local(local) => {
|
||||
self.visit_local(local, context, location);
|
||||
}
|
||||
PlaceBase::Static(box Static { kind: _, ty }) => {
|
||||
PlaceBase::Static(box Static { kind: _, ty, def_id: _ }) => {
|
||||
self.visit_ty(& $($mutability)? *ty, TyContext::Location(location));
|
||||
}
|
||||
}
|
||||
|
@ -724,10 +724,6 @@ macro_rules! make_mir_visitor {
|
|||
}
|
||||
|
||||
match & $($mutability)? proj.elem {
|
||||
ProjectionElem::Deref => {
|
||||
}
|
||||
ProjectionElem::Subslice { from: _, to: _ } => {
|
||||
}
|
||||
ProjectionElem::Field(_field, ty) => {
|
||||
self.visit_ty(ty, TyContext::Location(location));
|
||||
}
|
||||
|
@ -738,11 +734,12 @@ macro_rules! make_mir_visitor {
|
|||
location
|
||||
);
|
||||
}
|
||||
ProjectionElem::Deref |
|
||||
ProjectionElem::Subslice { from: _, to: _ } |
|
||||
ProjectionElem::ConstantIndex { offset: _,
|
||||
min_length: _,
|
||||
from_end: _ } => {
|
||||
}
|
||||
ProjectionElem::Downcast(_name, _variant_index) => {
|
||||
from_end: _ } |
|
||||
ProjectionElem::Downcast(_, _) => {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -110,7 +110,11 @@ rustc_queries! {
|
|||
no_hash
|
||||
}
|
||||
|
||||
query mir_validated(_: DefId) -> &'tcx Steal<mir::Body<'tcx>> {
|
||||
query mir_validated(_: DefId) ->
|
||||
(
|
||||
&'tcx Steal<mir::Body<'tcx>>,
|
||||
&'tcx Steal<IndexVec<mir::Promoted, mir::Body<'tcx>>>
|
||||
) {
|
||||
no_hash
|
||||
}
|
||||
|
||||
|
@ -125,7 +129,17 @@ rustc_queries! {
|
|||
}
|
||||
}
|
||||
|
||||
query promoted_mir(key: DefId) -> &'tcx IndexVec<mir::Promoted, mir::Body<'tcx>> { }
|
||||
query promoted_mir(key: DefId) -> &'tcx IndexVec<mir::Promoted, mir::Body<'tcx>> {
|
||||
cache_on_disk_if { key.is_local() }
|
||||
load_cached(tcx, id) {
|
||||
let promoted: Option<
|
||||
rustc_data_structures::indexed_vec::IndexVec<
|
||||
crate::mir::Promoted,
|
||||
crate::mir::Body<'tcx>
|
||||
>> = tcx.queries.on_disk_cache.try_load_query_result(tcx, id);
|
||||
promoted.map(|p| &*tcx.arena.alloc(p))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
TypeChecking {
|
||||
|
@ -790,7 +804,7 @@ rustc_queries! {
|
|||
}
|
||||
|
||||
BorrowChecking {
|
||||
// Lifetime resolution. See `middle::resolve_lifetimes`.
|
||||
/// Lifetime resolution. See `middle::resolve_lifetimes`.
|
||||
query resolve_lifetimes(_: CrateNum) -> &'tcx ResolveLifetimes {
|
||||
desc { "resolving lifetimes" }
|
||||
}
|
||||
|
@ -832,13 +846,30 @@ rustc_queries! {
|
|||
-> &'tcx [(Symbol, Option<Symbol>)] {
|
||||
desc { "calculating the lib features defined in a crate" }
|
||||
}
|
||||
/// Returns the lang items defined in another crate by loading it from metadata.
|
||||
// FIXME: It is illegal to pass a `CrateNum` other than `LOCAL_CRATE` here, just get rid
|
||||
// of that argument?
|
||||
query get_lang_items(_: CrateNum) -> &'tcx LanguageItems {
|
||||
eval_always
|
||||
desc { "calculating the lang items map" }
|
||||
}
|
||||
|
||||
/// Returns all diagnostic items defined in all crates
|
||||
query all_diagnostic_items(_: CrateNum) -> &'tcx FxHashMap<Symbol, DefId> {
|
||||
eval_always
|
||||
desc { "calculating the diagnostic items map" }
|
||||
}
|
||||
|
||||
/// Returns the lang items defined in another crate by loading it from metadata.
|
||||
query defined_lang_items(_: CrateNum) -> &'tcx [(DefId, usize)] {
|
||||
desc { "calculating the lang items defined in a crate" }
|
||||
}
|
||||
|
||||
/// Returns the diagnostic items defined in a crate
|
||||
query diagnostic_items(_: CrateNum) -> &'tcx FxHashMap<Symbol, DefId> {
|
||||
desc { "calculating the diagnostic items map in a crate" }
|
||||
}
|
||||
|
||||
query missing_lang_items(_: CrateNum) -> &'tcx [LangItem] {
|
||||
desc { "calculating the missing lang items in a crate" }
|
||||
}
|
||||
|
|
|
@ -1292,6 +1292,8 @@ options! {DebuggingOptions, DebuggingSetter, basic_debugging_options,
|
|||
"show macro backtraces even for non-local macros"),
|
||||
teach: bool = (false, parse_bool, [TRACKED],
|
||||
"show extended diagnostic help"),
|
||||
terminal_width: Option<usize> = (None, parse_opt_uint, [UNTRACKED],
|
||||
"set the current terminal width"),
|
||||
continue_parse_after_error: bool = (false, parse_bool, [TRACKED],
|
||||
"attempt to recover from parse errors (experimental)"),
|
||||
dep_tasks: bool = (false, parse_bool, [UNTRACKED],
|
||||
|
@ -1719,13 +1721,7 @@ pub fn rustc_short_optgroups() -> Vec<RustcOptGroup> {
|
|||
static, framework, or dylib (the default).",
|
||||
"[KIND=]NAME",
|
||||
),
|
||||
opt::multi_s(
|
||||
"",
|
||||
"crate-type",
|
||||
"Comma separated list of types of crates
|
||||
for the compiler to emit",
|
||||
"[bin|lib|rlib|dylib|cdylib|staticlib|proc-macro]",
|
||||
),
|
||||
make_crate_type_option(),
|
||||
opt::opt_s(
|
||||
"",
|
||||
"crate-name",
|
||||
|
@ -2506,6 +2502,16 @@ pub fn build_session_options_and_crate_config(
|
|||
)
|
||||
}
|
||||
|
||||
pub fn make_crate_type_option() -> RustcOptGroup {
|
||||
opt::multi_s(
|
||||
"",
|
||||
"crate-type",
|
||||
"Comma separated list of types of crates
|
||||
for the compiler to emit",
|
||||
"[bin|lib|rlib|dylib|cdylib|staticlib|proc-macro]",
|
||||
)
|
||||
}
|
||||
|
||||
pub fn parse_crate_types_from_list(list_list: Vec<String>) -> Result<Vec<CrateType>, String> {
|
||||
let mut crate_types: Vec<CrateType> = Vec::new();
|
||||
for unparsed_crate_type in &list_list {
|
||||
|
|
|
@ -1055,6 +1055,7 @@ fn default_emitter(
|
|||
Some(source_map.clone()),
|
||||
short,
|
||||
sopts.debugging_opts.teach,
|
||||
sopts.debugging_opts.terminal_width,
|
||||
),
|
||||
Some(dst) => EmitterWriter::new(
|
||||
dst,
|
||||
|
@ -1062,6 +1063,7 @@ fn default_emitter(
|
|||
short,
|
||||
false, // no teach messages when writing to a buffer
|
||||
false, // no colors when writing to a buffer
|
||||
None, // no terminal width
|
||||
),
|
||||
};
|
||||
Box::new(emitter.ui_testing(sopts.debugging_opts.ui_testing))
|
||||
|
@ -1375,7 +1377,7 @@ pub fn early_error(output: config::ErrorOutputType, msg: &str) -> ! {
|
|||
let emitter: Box<dyn Emitter + sync::Send> = match output {
|
||||
config::ErrorOutputType::HumanReadable(kind) => {
|
||||
let (short, color_config) = kind.unzip();
|
||||
Box::new(EmitterWriter::stderr(color_config, None, short, false))
|
||||
Box::new(EmitterWriter::stderr(color_config, None, short, false, None))
|
||||
}
|
||||
config::ErrorOutputType::Json { pretty, json_rendered } =>
|
||||
Box::new(JsonEmitter::basic(pretty, json_rendered)),
|
||||
|
@ -1389,7 +1391,7 @@ pub fn early_warn(output: config::ErrorOutputType, msg: &str) {
|
|||
let emitter: Box<dyn Emitter + sync::Send> = match output {
|
||||
config::ErrorOutputType::HumanReadable(kind) => {
|
||||
let (short, color_config) = kind.unzip();
|
||||
Box::new(EmitterWriter::stderr(color_config, None, short, false))
|
||||
Box::new(EmitterWriter::stderr(color_config, None, short, false, None))
|
||||
}
|
||||
config::ErrorOutputType::Json { pretty, json_rendered } =>
|
||||
Box::new(JsonEmitter::basic(pretty, json_rendered)),
|
||||
|
|
|
@ -1,20 +1,21 @@
|
|||
use super::{
|
||||
ConstEvalFailure,
|
||||
EvaluationResult,
|
||||
FulfillmentError,
|
||||
FulfillmentErrorCode,
|
||||
MismatchedProjectionTypes,
|
||||
ObjectSafetyViolation,
|
||||
Obligation,
|
||||
ObligationCause,
|
||||
ObligationCauseCode,
|
||||
OnUnimplementedDirective,
|
||||
OnUnimplementedNote,
|
||||
OutputTypeParameterMismatch,
|
||||
TraitNotObjectSafe,
|
||||
ConstEvalFailure,
|
||||
Overflow,
|
||||
PredicateObligation,
|
||||
SelectionContext,
|
||||
SelectionError,
|
||||
ObjectSafetyViolation,
|
||||
Overflow,
|
||||
TraitNotObjectSafe,
|
||||
};
|
||||
|
||||
use crate::hir;
|
||||
|
@ -35,7 +36,7 @@ use crate::util::nodemap::{FxHashMap, FxHashSet};
|
|||
use errors::{Applicability, DiagnosticBuilder};
|
||||
use std::fmt;
|
||||
use syntax::ast;
|
||||
use syntax::symbol::sym;
|
||||
use syntax::symbol::{sym, kw};
|
||||
use syntax_pos::{DUMMY_SP, Span, ExpnKind};
|
||||
|
||||
impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
|
||||
|
@ -657,19 +658,22 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
|
|||
span,
|
||||
E0277,
|
||||
"{}",
|
||||
message.unwrap_or_else(||
|
||||
format!("the trait bound `{}` is not satisfied{}",
|
||||
trait_ref.to_predicate(), post_message)
|
||||
));
|
||||
message.unwrap_or_else(|| format!(
|
||||
"the trait bound `{}` is not satisfied{}",
|
||||
trait_ref.to_predicate(),
|
||||
post_message,
|
||||
)));
|
||||
|
||||
let explanation =
|
||||
if obligation.cause.code == ObligationCauseCode::MainFunctionType {
|
||||
"consider using `()`, or a `Result`".to_owned()
|
||||
} else {
|
||||
format!("{}the trait `{}` is not implemented for `{}`",
|
||||
pre_message,
|
||||
trait_ref,
|
||||
trait_ref.self_ty())
|
||||
format!(
|
||||
"{}the trait `{}` is not implemented for `{}`",
|
||||
pre_message,
|
||||
trait_ref,
|
||||
trait_ref.self_ty(),
|
||||
)
|
||||
};
|
||||
|
||||
if let Some(ref s) = label {
|
||||
|
@ -686,6 +690,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
|
|||
}
|
||||
|
||||
self.suggest_borrow_on_unsized_slice(&obligation.cause.code, &mut err);
|
||||
self.suggest_fn_call(&obligation, &mut err, &trait_ref);
|
||||
self.suggest_remove_reference(&obligation, &mut err, &trait_ref);
|
||||
self.suggest_semicolon_removal(&obligation, &mut err, span, &trait_ref);
|
||||
|
||||
|
@ -953,6 +958,57 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
fn suggest_fn_call(
|
||||
&self,
|
||||
obligation: &PredicateObligation<'tcx>,
|
||||
err: &mut DiagnosticBuilder<'tcx>,
|
||||
trait_ref: &ty::Binder<ty::TraitRef<'tcx>>,
|
||||
) {
|
||||
let self_ty = trait_ref.self_ty();
|
||||
match self_ty.sty {
|
||||
ty::FnDef(def_id, _) => {
|
||||
// We tried to apply the bound to an `fn`. Check whether calling it would evaluate
|
||||
// to a type that *would* satisfy the trait binding. If it would, suggest calling
|
||||
// it: `bar(foo)` -> `bar(foo)`. This case is *very* likely to be hit if `foo` is
|
||||
// `async`.
|
||||
let output_ty = self_ty.fn_sig(self.tcx).output();
|
||||
let new_trait_ref = ty::TraitRef {
|
||||
def_id: trait_ref.def_id(),
|
||||
substs: self.tcx.mk_substs_trait(output_ty.skip_binder(), &[]),
|
||||
};
|
||||
let obligation = Obligation::new(
|
||||
obligation.cause.clone(),
|
||||
obligation.param_env,
|
||||
new_trait_ref.to_predicate(),
|
||||
);
|
||||
match self.evaluate_obligation(&obligation) {
|
||||
Ok(EvaluationResult::EvaluatedToOk) |
|
||||
Ok(EvaluationResult::EvaluatedToOkModuloRegions) |
|
||||
Ok(EvaluationResult::EvaluatedToAmbig) => {
|
||||
if let Some(hir::Node::Item(hir::Item {
|
||||
ident,
|
||||
node: hir::ItemKind::Fn(.., body_id),
|
||||
..
|
||||
})) = self.tcx.hir().get_if_local(def_id) {
|
||||
let body = self.tcx.hir().body(*body_id);
|
||||
err.help(&format!(
|
||||
"use parentheses to call the function: `{}({})`",
|
||||
ident,
|
||||
body.params.iter()
|
||||
.map(|arg| match &arg.pat.node {
|
||||
hir::PatKind::Binding(_, _, ident, None)
|
||||
if ident.name != kw::SelfLower => ident.to_string(),
|
||||
_ => "_".to_string(),
|
||||
}).collect::<Vec<_>>().join(", ")));
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
/// Whenever references are used by mistake, like `for (i, e) in &vec.iter().enumerate()`,
|
||||
/// suggest removing these references until we reach a type that implements the trait.
|
||||
fn suggest_remove_reference(
|
||||
|
@ -1044,7 +1100,8 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
|
|||
node: hir::ExprKind::Closure(_, ref _decl, id, span, _),
|
||||
..
|
||||
}) => {
|
||||
(self.tcx.sess.source_map().def_span(span), self.tcx.hir().body(id).arguments.iter()
|
||||
(self.tcx.sess.source_map().def_span(span),
|
||||
self.tcx.hir().body(id).params.iter()
|
||||
.map(|arg| {
|
||||
if let hir::Pat {
|
||||
node: hir::PatKind::Tuple(ref args, _),
|
||||
|
@ -1534,17 +1591,23 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
|
|||
err.note("only the last element of a tuple may have a dynamically sized type");
|
||||
}
|
||||
ObligationCauseCode::ProjectionWf(data) => {
|
||||
err.note(&format!("required so that the projection `{}` is well-formed",
|
||||
data));
|
||||
err.note(&format!(
|
||||
"required so that the projection `{}` is well-formed",
|
||||
data,
|
||||
));
|
||||
}
|
||||
ObligationCauseCode::ReferenceOutlivesReferent(ref_ty) => {
|
||||
err.note(&format!("required so that reference `{}` does not outlive its referent",
|
||||
ref_ty));
|
||||
err.note(&format!(
|
||||
"required so that reference `{}` does not outlive its referent",
|
||||
ref_ty,
|
||||
));
|
||||
}
|
||||
ObligationCauseCode::ObjectTypeBound(object_ty, region) => {
|
||||
err.note(&format!("required so that the lifetime bound of `{}` for `{}` \
|
||||
is satisfied",
|
||||
region, object_ty));
|
||||
err.note(&format!(
|
||||
"required so that the lifetime bound of `{}` for `{}` is satisfied",
|
||||
region,
|
||||
object_ty,
|
||||
));
|
||||
}
|
||||
ObligationCauseCode::ItemObligation(item_def_id) => {
|
||||
let item_name = tcx.def_path_str(item_def_id);
|
||||
|
@ -1552,7 +1615,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
|
|||
|
||||
if let Some(sp) = tcx.hir().span_if_local(item_def_id) {
|
||||
let sp = tcx.sess.source_map().def_span(sp);
|
||||
err.span_note(sp, &msg);
|
||||
err.span_label(sp, &msg);
|
||||
} else {
|
||||
err.note(&msg);
|
||||
}
|
||||
|
|
|
@ -15,7 +15,6 @@ use super::util;
|
|||
use crate::hir::def_id::DefId;
|
||||
use crate::infer::{InferCtxt, InferOk, LateBoundRegionConversionTime};
|
||||
use crate::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind};
|
||||
use crate::mir::interpret::{GlobalId, ConstValue};
|
||||
use rustc_data_structures::snapshot_map::{Snapshot, SnapshotMap};
|
||||
use rustc_macros::HashStable;
|
||||
use syntax::ast::Ident;
|
||||
|
@ -397,40 +396,7 @@ impl<'a, 'b, 'tcx> TypeFolder<'tcx> for AssocTypeNormalizer<'a, 'b, 'tcx> {
|
|||
}
|
||||
|
||||
fn fold_const(&mut self, constant: &'tcx ty::Const<'tcx>) -> &'tcx ty::Const<'tcx> {
|
||||
if let ConstValue::Unevaluated(def_id, substs) = constant.val {
|
||||
let tcx = self.selcx.tcx().global_tcx();
|
||||
let param_env = self.param_env;
|
||||
if !param_env.has_local_value() {
|
||||
if substs.needs_infer() || substs.has_placeholders() {
|
||||
let identity_substs = InternalSubsts::identity_for_item(tcx, def_id);
|
||||
let instance = ty::Instance::resolve(tcx, param_env, def_id, identity_substs);
|
||||
if let Some(instance) = instance {
|
||||
let cid = GlobalId {
|
||||
instance,
|
||||
promoted: None
|
||||
};
|
||||
if let Ok(evaluated) = tcx.const_eval(param_env.and(cid)) {
|
||||
let evaluated = evaluated.subst(tcx, substs);
|
||||
return evaluated;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if !substs.has_local_value() {
|
||||
let instance = ty::Instance::resolve(tcx, param_env, def_id, substs);
|
||||
if let Some(instance) = instance {
|
||||
let cid = GlobalId {
|
||||
instance,
|
||||
promoted: None
|
||||
};
|
||||
if let Ok(evaluated) = tcx.const_eval(param_env.and(cid)) {
|
||||
return evaluated;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
constant
|
||||
constant.eval(self.selcx.tcx(), self.param_env)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -5,11 +5,10 @@
|
|||
use crate::infer::at::At;
|
||||
use crate::infer::canonical::OriginalQueryValues;
|
||||
use crate::infer::{InferCtxt, InferOk};
|
||||
use crate::mir::interpret::{GlobalId, ConstValue};
|
||||
use crate::traits::project::Normalized;
|
||||
use crate::traits::{Obligation, ObligationCause, PredicateObligation, Reveal};
|
||||
use crate::ty::fold::{TypeFoldable, TypeFolder};
|
||||
use crate::ty::subst::{Subst, InternalSubsts};
|
||||
use crate::ty::subst::Subst;
|
||||
use crate::ty::{self, Ty, TyCtxt};
|
||||
|
||||
use super::NoSolution;
|
||||
|
@ -191,40 +190,7 @@ impl<'cx, 'tcx> TypeFolder<'tcx> for QueryNormalizer<'cx, 'tcx> {
|
|||
}
|
||||
|
||||
fn fold_const(&mut self, constant: &'tcx ty::Const<'tcx>) -> &'tcx ty::Const<'tcx> {
|
||||
if let ConstValue::Unevaluated(def_id, substs) = constant.val {
|
||||
let tcx = self.infcx.tcx.global_tcx();
|
||||
let param_env = self.param_env;
|
||||
if !param_env.has_local_value() {
|
||||
if substs.needs_infer() || substs.has_placeholders() {
|
||||
let identity_substs = InternalSubsts::identity_for_item(tcx, def_id);
|
||||
let instance = ty::Instance::resolve(tcx, param_env, def_id, identity_substs);
|
||||
if let Some(instance) = instance {
|
||||
let cid = GlobalId {
|
||||
instance,
|
||||
promoted: None,
|
||||
};
|
||||
if let Ok(evaluated) = tcx.const_eval(param_env.and(cid)) {
|
||||
let evaluated = evaluated.subst(tcx, substs);
|
||||
return evaluated;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if !substs.has_local_value() {
|
||||
let instance = ty::Instance::resolve(tcx, param_env, def_id, substs);
|
||||
if let Some(instance) = instance {
|
||||
let cid = GlobalId {
|
||||
instance,
|
||||
promoted: None,
|
||||
};
|
||||
if let Ok(evaluated) = tcx.const_eval(param_env.and(cid)) {
|
||||
return evaluated;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
constant
|
||||
constant.eval(self.infcx.tcx, self.param_env)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -3513,7 +3513,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
|||
|
||||
// We can only make objects from sized types.
|
||||
let tr = ty::TraitRef {
|
||||
def_id: tcx.require_lang_item(lang_items::SizedTraitLangItem),
|
||||
def_id: tcx.require_lang_item(lang_items::SizedTraitLangItem, None),
|
||||
substs: tcx.mk_substs_trait(source, &[]),
|
||||
};
|
||||
nested.push(predicate_to_obligation(tr.to_predicate()));
|
||||
|
|
|
@ -21,7 +21,7 @@ use crate::middle::cstore::EncodedMetadata;
|
|||
use crate::middle::lang_items;
|
||||
use crate::middle::resolve_lifetime::{self, ObjectLifetimeDefault};
|
||||
use crate::middle::stability;
|
||||
use crate::mir::{Body, interpret, ProjectionKind};
|
||||
use crate::mir::{Body, interpret, ProjectionKind, Promoted};
|
||||
use crate::mir::interpret::{ConstValue, Allocation, Scalar};
|
||||
use crate::ty::subst::{Kind, InternalSubsts, SubstsRef, Subst};
|
||||
use crate::ty::ReprOptions;
|
||||
|
@ -978,6 +978,7 @@ pub struct FreeRegionInfo {
|
|||
///
|
||||
/// [rustc guide]: https://rust-lang.github.io/rustc-guide/ty.html
|
||||
#[derive(Copy, Clone)]
|
||||
#[cfg_attr(not(bootstrap), rustc_diagnostic_item = "TyCtxt")]
|
||||
pub struct TyCtxt<'tcx> {
|
||||
gcx: &'tcx GlobalCtxt<'tcx>,
|
||||
}
|
||||
|
@ -1096,6 +1097,16 @@ impl<'tcx> TyCtxt<'tcx> {
|
|||
self.arena.alloc(Steal::new(mir))
|
||||
}
|
||||
|
||||
pub fn alloc_steal_promoted(self, promoted: IndexVec<Promoted, Body<'tcx>>) ->
|
||||
&'tcx Steal<IndexVec<Promoted, Body<'tcx>>> {
|
||||
self.arena.alloc(Steal::new(promoted))
|
||||
}
|
||||
|
||||
pub fn intern_promoted(self, promoted: IndexVec<Promoted, Body<'tcx>>) ->
|
||||
&'tcx IndexVec<Promoted, Body<'tcx>> {
|
||||
self.arena.alloc(promoted)
|
||||
}
|
||||
|
||||
pub fn alloc_adt_def(
|
||||
self,
|
||||
did: DefId,
|
||||
|
@ -1298,10 +1309,22 @@ impl<'tcx> TyCtxt<'tcx> {
|
|||
self.get_lib_features(LOCAL_CRATE)
|
||||
}
|
||||
|
||||
/// Obtain all lang items of this crate and all dependencies (recursively)
|
||||
pub fn lang_items(self) -> &'tcx middle::lang_items::LanguageItems {
|
||||
self.get_lang_items(LOCAL_CRATE)
|
||||
}
|
||||
|
||||
/// Obtain the given diagnostic item's `DefId`. Use `is_diagnostic_item` if you just want to
|
||||
/// compare against another `DefId`, since `is_diagnostic_item` is cheaper.
|
||||
pub fn get_diagnostic_item(self, name: Symbol) -> Option<DefId> {
|
||||
self.all_diagnostic_items(LOCAL_CRATE).get(&name).copied()
|
||||
}
|
||||
|
||||
/// Check whether the diagnostic item with the given `name` has the given `DefId`.
|
||||
pub fn is_diagnostic_item(self, name: Symbol, did: DefId) -> bool {
|
||||
self.diagnostic_items(did.krate).get(&name) == Some(&did)
|
||||
}
|
||||
|
||||
pub fn stability(self) -> &'tcx stability::Index<'tcx> {
|
||||
self.stability_index(LOCAL_CRATE)
|
||||
}
|
||||
|
@ -2375,13 +2398,19 @@ impl<'tcx> TyCtxt<'tcx> {
|
|||
|
||||
#[inline]
|
||||
pub fn mk_box(self, ty: Ty<'tcx>) -> Ty<'tcx> {
|
||||
let def_id = self.require_lang_item(lang_items::OwnedBoxLangItem);
|
||||
let def_id = self.require_lang_item(lang_items::OwnedBoxLangItem, None);
|
||||
self.mk_generic_adt(def_id, ty)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn mk_lang_item(self, ty: Ty<'tcx>, item: lang_items::LangItem) -> Ty<'tcx> {
|
||||
let def_id = self.require_lang_item(item, None);
|
||||
self.mk_generic_adt(def_id, ty)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn mk_maybe_uninit(self, ty: Ty<'tcx>) -> Ty<'tcx> {
|
||||
let def_id = self.require_lang_item(lang_items::MaybeUninitLangItem);
|
||||
let def_id = self.require_lang_item(lang_items::MaybeUninitLangItem, None);
|
||||
self.mk_generic_adt(def_id, ty)
|
||||
}
|
||||
|
||||
|
@ -2886,6 +2915,14 @@ pub fn provide(providers: &mut ty::query::Providers<'_>) {
|
|||
assert_eq!(id, LOCAL_CRATE);
|
||||
tcx.arena.alloc(middle::lang_items::collect(tcx))
|
||||
};
|
||||
providers.diagnostic_items = |tcx, id| {
|
||||
assert_eq!(id, LOCAL_CRATE);
|
||||
middle::diagnostic_items::collect(tcx)
|
||||
};
|
||||
providers.all_diagnostic_items = |tcx, id| {
|
||||
assert_eq!(id, LOCAL_CRATE);
|
||||
middle::diagnostic_items::collect_all(tcx)
|
||||
};
|
||||
providers.maybe_unused_trait_import = |tcx, id| {
|
||||
tcx.maybe_unused_trait_imports.contains(&id)
|
||||
};
|
||||
|
|
|
@ -46,6 +46,8 @@ pub enum TypeError<'tcx> {
|
|||
ExistentialMismatch(ExpectedFound<&'tcx ty::List<ty::ExistentialPredicate<'tcx>>>),
|
||||
|
||||
ConstMismatch(ExpectedFound<&'tcx ty::Const<'tcx>>),
|
||||
|
||||
IntrinsicCast,
|
||||
}
|
||||
|
||||
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)]
|
||||
|
@ -179,6 +181,9 @@ impl<'tcx> fmt::Display for TypeError<'tcx> {
|
|||
ConstMismatch(ref values) => {
|
||||
write!(f, "expected `{}`, found `{}`", values.expected, values.found)
|
||||
}
|
||||
IntrinsicCast => {
|
||||
write!(f, "cannot coerce intrinsics to function pointers")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -247,13 +252,15 @@ impl<'tcx> ty::TyS<'tcx> {
|
|||
}
|
||||
|
||||
impl<'tcx> TyCtxt<'tcx> {
|
||||
pub fn note_and_explain_type_err(self,
|
||||
db: &mut DiagnosticBuilder<'_>,
|
||||
err: &TypeError<'tcx>,
|
||||
sp: Span) {
|
||||
pub fn note_and_explain_type_err(
|
||||
self,
|
||||
db: &mut DiagnosticBuilder<'_>,
|
||||
err: &TypeError<'tcx>,
|
||||
sp: Span,
|
||||
) {
|
||||
use self::TypeError::*;
|
||||
|
||||
match err.clone() {
|
||||
match err {
|
||||
Sorts(values) => {
|
||||
let expected_str = values.expected.sort_string(self);
|
||||
let found_str = values.found.sort_string(self);
|
||||
|
@ -261,6 +268,16 @@ impl<'tcx> TyCtxt<'tcx> {
|
|||
db.note("no two closures, even if identical, have the same type");
|
||||
db.help("consider boxing your closure and/or using it as a trait object");
|
||||
}
|
||||
if expected_str == found_str && expected_str == "opaque type" { // Issue #63167
|
||||
db.note("distinct uses of `impl Trait` result in different opaque types");
|
||||
let e_str = values.expected.to_string();
|
||||
let f_str = values.found.to_string();
|
||||
if &e_str == &f_str && &e_str == "impl std::future::Future" {
|
||||
// FIXME: use non-string based check.
|
||||
db.help("if both `Future`s have the same `Output` type, consider \
|
||||
`.await`ing on both of them");
|
||||
}
|
||||
}
|
||||
if let (ty::Infer(ty::IntVar(_)), ty::Float(_)) =
|
||||
(&values.found.sty, &values.expected.sty) // Issue #53280
|
||||
{
|
||||
|
|
|
@ -327,7 +327,7 @@ impl<'tcx> Instance<'tcx> {
|
|||
}
|
||||
|
||||
pub fn resolve_drop_in_place(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> ty::Instance<'tcx> {
|
||||
let def_id = tcx.require_lang_item(DropInPlaceFnLangItem);
|
||||
let def_id = tcx.require_lang_item(DropInPlaceFnLangItem, None);
|
||||
let substs = tcx.intern_substs(&[ty.into()]);
|
||||
Instance::resolve(tcx, ty::ParamEnv::reveal_all(), def_id, substs).unwrap()
|
||||
}
|
||||
|
|
|
@ -273,14 +273,12 @@ impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> {
|
|||
repr: &ReprOptions,
|
||||
kind: StructKind) -> Result<LayoutDetails, LayoutError<'tcx>> {
|
||||
let dl = self.data_layout();
|
||||
let packed = repr.packed();
|
||||
if packed && repr.align > 0 {
|
||||
let pack = repr.pack;
|
||||
if pack.is_some() && repr.align.is_some() {
|
||||
bug!("struct cannot be packed and aligned");
|
||||
}
|
||||
|
||||
let pack = Align::from_bytes(repr.pack as u64).unwrap();
|
||||
|
||||
let mut align = if packed {
|
||||
let mut align = if pack.is_some() {
|
||||
dl.i8_align
|
||||
} else {
|
||||
dl.aggregate_align
|
||||
|
@ -303,7 +301,7 @@ impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> {
|
|||
};
|
||||
let optimizing = &mut inverse_memory_index[..end];
|
||||
let field_align = |f: &TyLayout<'_>| {
|
||||
if packed { f.align.abi.min(pack) } else { f.align.abi }
|
||||
if let Some(pack) = pack { f.align.abi.min(pack) } else { f.align.abi }
|
||||
};
|
||||
match kind {
|
||||
StructKind::AlwaysSized |
|
||||
|
@ -334,7 +332,7 @@ impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> {
|
|||
let mut largest_niche_available = 0;
|
||||
|
||||
if let StructKind::Prefixed(prefix_size, prefix_align) = kind {
|
||||
let prefix_align = if packed {
|
||||
let prefix_align = if let Some(pack) = pack {
|
||||
prefix_align.min(pack)
|
||||
} else {
|
||||
prefix_align
|
||||
|
@ -355,7 +353,7 @@ impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> {
|
|||
}
|
||||
|
||||
// Invariant: offset < dl.obj_size_bound() <= 1<<61
|
||||
let field_align = if packed {
|
||||
let field_align = if let Some(pack) = pack {
|
||||
field.align.min(AbiAndPrefAlign::new(pack))
|
||||
} else {
|
||||
field.align
|
||||
|
@ -379,10 +377,8 @@ impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> {
|
|||
.ok_or(LayoutError::SizeOverflow(ty))?;
|
||||
}
|
||||
|
||||
if repr.align > 0 {
|
||||
let repr_align = repr.align as u64;
|
||||
align = align.max(AbiAndPrefAlign::new(Align::from_bytes(repr_align).unwrap()));
|
||||
debug!("univariant repr_align: {:?}", repr_align);
|
||||
if let Some(repr_align) = repr.align {
|
||||
align = align.max(AbiAndPrefAlign::new(repr_align));
|
||||
}
|
||||
|
||||
debug!("univariant min_size: {:?}", offset);
|
||||
|
@ -730,23 +726,18 @@ impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> {
|
|||
}).collect::<Result<IndexVec<VariantIdx, _>, _>>()?;
|
||||
|
||||
if def.is_union() {
|
||||
let packed = def.repr.packed();
|
||||
if packed && def.repr.align > 0 {
|
||||
bug!("Union cannot be packed and aligned");
|
||||
if def.repr.pack.is_some() && def.repr.align.is_some() {
|
||||
bug!("union cannot be packed and aligned");
|
||||
}
|
||||
|
||||
let pack = Align::from_bytes(def.repr.pack as u64).unwrap();
|
||||
|
||||
let mut align = if packed {
|
||||
let mut align = if def.repr.pack.is_some() {
|
||||
dl.i8_align
|
||||
} else {
|
||||
dl.aggregate_align
|
||||
};
|
||||
|
||||
if def.repr.align > 0 {
|
||||
let repr_align = def.repr.align as u64;
|
||||
align = align.max(
|
||||
AbiAndPrefAlign::new(Align::from_bytes(repr_align).unwrap()));
|
||||
if let Some(repr_align) = def.repr.align {
|
||||
align = align.max(AbiAndPrefAlign::new(repr_align));
|
||||
}
|
||||
|
||||
let optimize = !def.repr.inhibit_union_abi_opt();
|
||||
|
@ -755,13 +746,7 @@ impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> {
|
|||
let index = VariantIdx::new(0);
|
||||
for field in &variants[index] {
|
||||
assert!(!field.is_unsized());
|
||||
|
||||
let field_align = if packed {
|
||||
field.align.min(AbiAndPrefAlign::new(pack))
|
||||
} else {
|
||||
field.align
|
||||
};
|
||||
align = align.max(field_align);
|
||||
align = align.max(field.align);
|
||||
|
||||
// If all non-ZST fields have the same ABI, forward this ABI
|
||||
if optimize && !field.is_zst() {
|
||||
|
@ -796,6 +781,10 @@ impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> {
|
|||
size = cmp::max(size, field.size);
|
||||
}
|
||||
|
||||
if let Some(pack) = def.repr.pack {
|
||||
align = align.min(AbiAndPrefAlign::new(pack));
|
||||
}
|
||||
|
||||
return Ok(tcx.intern_layout(LayoutDetails {
|
||||
variants: Variants::Single { index },
|
||||
fields: FieldPlacement::Union(variants[index].len()),
|
||||
|
@ -1637,7 +1626,7 @@ impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> {
|
|||
};
|
||||
|
||||
let adt_kind = adt_def.adt_kind();
|
||||
let adt_packed = adt_def.repr.packed();
|
||||
let adt_packed = adt_def.repr.pack.is_some();
|
||||
|
||||
let build_variant_info = |n: Option<Ident>,
|
||||
flds: &[ast::Name],
|
||||
|
|
|
@ -33,6 +33,7 @@ use arena::SyncDroplessArena;
|
|||
use crate::session::DataTypeKind;
|
||||
|
||||
use rustc_serialize::{self, Encodable, Encoder};
|
||||
use rustc_target::abi::Align;
|
||||
use std::cell::RefCell;
|
||||
use std::cmp::{self, Ordering};
|
||||
use std::fmt;
|
||||
|
@ -580,6 +581,7 @@ impl<'a, 'tcx> HashStable<StableHashingContext<'a>> for ty::TyS<'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(not(bootstrap), rustc_diagnostic_item = "Ty")]
|
||||
pub type Ty<'tcx> = &'tcx TyS<'tcx>;
|
||||
|
||||
impl<'tcx> rustc_serialize::UseSpecializedEncodable for Ty<'tcx> {}
|
||||
|
@ -2057,8 +2059,8 @@ impl_stable_hash_for!(struct ReprFlags {
|
|||
#[derive(Copy, Clone, Debug, Eq, PartialEq, RustcEncodable, RustcDecodable, Default)]
|
||||
pub struct ReprOptions {
|
||||
pub int: Option<attr::IntType>,
|
||||
pub align: u32,
|
||||
pub pack: u32,
|
||||
pub align: Option<Align>,
|
||||
pub pack: Option<Align>,
|
||||
pub flags: ReprFlags,
|
||||
}
|
||||
|
||||
|
@ -2073,18 +2075,19 @@ impl ReprOptions {
|
|||
pub fn new(tcx: TyCtxt<'_>, did: DefId) -> ReprOptions {
|
||||
let mut flags = ReprFlags::empty();
|
||||
let mut size = None;
|
||||
let mut max_align = 0;
|
||||
let mut min_pack = 0;
|
||||
let mut max_align: Option<Align> = None;
|
||||
let mut min_pack: Option<Align> = None;
|
||||
for attr in tcx.get_attrs(did).iter() {
|
||||
for r in attr::find_repr_attrs(&tcx.sess.parse_sess, attr) {
|
||||
flags.insert(match r {
|
||||
attr::ReprC => ReprFlags::IS_C,
|
||||
attr::ReprPacked(pack) => {
|
||||
min_pack = if min_pack > 0 {
|
||||
cmp::min(pack, min_pack)
|
||||
let pack = Align::from_bytes(pack as u64).unwrap();
|
||||
min_pack = Some(if let Some(min_pack) = min_pack {
|
||||
min_pack.min(pack)
|
||||
} else {
|
||||
pack
|
||||
};
|
||||
});
|
||||
ReprFlags::empty()
|
||||
},
|
||||
attr::ReprTransparent => ReprFlags::IS_TRANSPARENT,
|
||||
|
@ -2094,7 +2097,7 @@ impl ReprOptions {
|
|||
ReprFlags::empty()
|
||||
},
|
||||
attr::ReprAlign(align) => {
|
||||
max_align = cmp::max(align, max_align);
|
||||
max_align = max_align.max(Some(Align::from_bytes(align as u64).unwrap()));
|
||||
ReprFlags::empty()
|
||||
},
|
||||
});
|
||||
|
@ -2113,7 +2116,7 @@ impl ReprOptions {
|
|||
#[inline]
|
||||
pub fn c(&self) -> bool { self.flags.contains(ReprFlags::IS_C) }
|
||||
#[inline]
|
||||
pub fn packed(&self) -> bool { self.pack > 0 }
|
||||
pub fn packed(&self) -> bool { self.pack.is_some() }
|
||||
#[inline]
|
||||
pub fn transparent(&self) -> bool { self.flags.contains(ReprFlags::IS_TRANSPARENT) }
|
||||
#[inline]
|
||||
|
@ -2133,8 +2136,12 @@ impl ReprOptions {
|
|||
/// Returns `true` if this `#[repr()]` should inhibit struct field reordering
|
||||
/// optimizations, such as with `repr(C)`, `repr(packed(1))`, or `repr(<int>)`.
|
||||
pub fn inhibit_struct_field_reordering_opt(&self) -> bool {
|
||||
self.flags.intersects(ReprFlags::IS_UNOPTIMISABLE) || self.pack == 1 ||
|
||||
self.int.is_some()
|
||||
if let Some(pack) = self.pack {
|
||||
if pack.bytes() == 1 {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
self.flags.intersects(ReprFlags::IS_UNOPTIMISABLE) || self.int.is_some()
|
||||
}
|
||||
|
||||
/// Returns `true` if this `#[repr()]` should inhibit union ABI optimisations.
|
||||
|
@ -2588,12 +2595,12 @@ impl<'tcx> ClosureKind {
|
|||
|
||||
pub fn trait_did(&self, tcx: TyCtxt<'tcx>) -> DefId {
|
||||
match *self {
|
||||
ClosureKind::Fn => tcx.require_lang_item(FnTraitLangItem),
|
||||
ClosureKind::Fn => tcx.require_lang_item(FnTraitLangItem, None),
|
||||
ClosureKind::FnMut => {
|
||||
tcx.require_lang_item(FnMutTraitLangItem)
|
||||
tcx.require_lang_item(FnMutTraitLangItem, None)
|
||||
}
|
||||
ClosureKind::FnOnce => {
|
||||
tcx.require_lang_item(FnOnceTraitLangItem)
|
||||
tcx.require_lang_item(FnOnceTraitLangItem, None)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -944,10 +944,16 @@ pub trait PrettyPrinter<'tcx>:
|
|||
.get_bytes(&self.tcx(), ptr, Size::from_bytes(n)).unwrap())
|
||||
},
|
||||
(ConstValue::Slice { data, start, end }, ty::Slice(t)) if *t == u8 => {
|
||||
Some(&data.bytes[start..end])
|
||||
// The `inspect` here is okay since we checked the bounds, and there are no
|
||||
// relocations (we have an active slice reference here). We don't use this
|
||||
// result to affect interpreter execution.
|
||||
Some(data.inspect_with_undef_and_ptr_outside_interpreter(start..end))
|
||||
},
|
||||
(ConstValue::Slice { data, start, end }, ty::Str) => {
|
||||
let slice = &data.bytes[start..end];
|
||||
// The `inspect` here is okay since we checked the bounds, and there are no
|
||||
// relocations (we have an active `str` reference here). We don't use this
|
||||
// result to affect interpreter execution.
|
||||
let slice = data.inspect_with_undef_and_ptr_outside_interpreter(start..end);
|
||||
let s = ::std::str::from_utf8(slice)
|
||||
.expect("non utf8 str from miri");
|
||||
p!(write("{:?}", s));
|
||||
|
|
|
@ -20,10 +20,10 @@ use rustc_data_structures::thin_vec::ThinVec;
|
|||
use rustc_data_structures::sync::{Lrc, Lock, HashMapExt, Once};
|
||||
use rustc_data_structures::indexed_vec::{IndexVec, Idx};
|
||||
use std::mem;
|
||||
use syntax::ast::NodeId;
|
||||
use syntax::ast::{Ident, NodeId};
|
||||
use syntax::source_map::{SourceMap, StableSourceFileId};
|
||||
use syntax_pos::{BytePos, Span, DUMMY_SP, SourceFile};
|
||||
use syntax_pos::hygiene::{ExpnId, SyntaxContext, ExpnData};
|
||||
use syntax_pos::hygiene::{ExpnId, SyntaxContext};
|
||||
|
||||
const TAG_FILE_FOOTER: u128 = 0xC0FFEE_C0FFEE_C0FFEE_C0FFEE_C0FFEE;
|
||||
|
||||
|
@ -591,10 +591,11 @@ impl<'a, 'tcx> SpecializedDecoder<Span> for CacheDecoder<'a, 'tcx> {
|
|||
// FIXME(mw): This method does not restore `ExpnData::parent` or
|
||||
// `SyntaxContextData::prev_ctxt` or `SyntaxContextData::opaque`. These things
|
||||
// don't seem to be used after HIR lowering, so everything should be fine
|
||||
// as long as incremental compilation does not kick in before that.
|
||||
// until we want incremental compilation to serialize Spans that we need
|
||||
// full hygiene information for.
|
||||
let location = || Span::with_root_ctxt(lo, hi);
|
||||
let recover_from_expn_data = |this: &Self, expn_data, pos| {
|
||||
let span = location().fresh_expansion(expn_data);
|
||||
let recover_from_expn_data = |this: &Self, expn_data, transparency, pos| {
|
||||
let span = location().fresh_expansion_with_transparency(expn_data, transparency);
|
||||
this.synthetic_syntax_contexts.borrow_mut().insert(pos, span.ctxt());
|
||||
span
|
||||
};
|
||||
|
@ -603,9 +604,9 @@ impl<'a, 'tcx> SpecializedDecoder<Span> for CacheDecoder<'a, 'tcx> {
|
|||
location()
|
||||
}
|
||||
TAG_EXPN_DATA_INLINE => {
|
||||
let expn_data = Decodable::decode(self)?;
|
||||
let (expn_data, transparency) = Decodable::decode(self)?;
|
||||
recover_from_expn_data(
|
||||
self, expn_data, AbsoluteBytePos::new(self.opaque.position())
|
||||
self, expn_data, transparency, AbsoluteBytePos::new(self.opaque.position())
|
||||
)
|
||||
}
|
||||
TAG_EXPN_DATA_SHORTHAND => {
|
||||
|
@ -614,9 +615,9 @@ impl<'a, 'tcx> SpecializedDecoder<Span> for CacheDecoder<'a, 'tcx> {
|
|||
if let Some(ctxt) = cached_ctxt {
|
||||
Span::new(lo, hi, ctxt)
|
||||
} else {
|
||||
let expn_data =
|
||||
self.with_position(pos.to_usize(), |this| ExpnData::decode(this))?;
|
||||
recover_from_expn_data(self, expn_data, pos)
|
||||
let (expn_data, transparency) =
|
||||
self.with_position(pos.to_usize(), |this| Decodable::decode(this))?;
|
||||
recover_from_expn_data(self, expn_data, transparency, pos)
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
|
@ -626,6 +627,13 @@ impl<'a, 'tcx> SpecializedDecoder<Span> for CacheDecoder<'a, 'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> SpecializedDecoder<Ident> for CacheDecoder<'a, 'tcx> {
|
||||
fn specialized_decode(&mut self) -> Result<Ident, Self::Error> {
|
||||
// FIXME: Handle hygiene in incremental
|
||||
bug!("Trying to decode Ident for incremental");
|
||||
}
|
||||
}
|
||||
|
||||
// This impl makes sure that we get a runtime error when we try decode a
|
||||
// DefIndex that is not contained in a DefId. Such a case would be problematic
|
||||
// because we would not know how to transform the DefIndex to the current
|
||||
|
@ -819,7 +827,7 @@ where
|
|||
if span_data.ctxt == SyntaxContext::root() {
|
||||
TAG_NO_EXPN_DATA.encode(self)
|
||||
} else {
|
||||
let (expn_id, expn_data) = span_data.ctxt.outer_expn_with_data();
|
||||
let (expn_id, transparency, expn_data) = span_data.ctxt.outer_mark_with_data();
|
||||
if let Some(pos) = self.expn_data_shorthands.get(&expn_id).cloned() {
|
||||
TAG_EXPN_DATA_SHORTHAND.encode(self)?;
|
||||
pos.encode(self)
|
||||
|
@ -827,12 +835,25 @@ where
|
|||
TAG_EXPN_DATA_INLINE.encode(self)?;
|
||||
let pos = AbsoluteBytePos::new(self.position());
|
||||
self.expn_data_shorthands.insert(expn_id, pos);
|
||||
expn_data.encode(self)
|
||||
(expn_data, transparency).encode(self)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx, E> SpecializedEncoder<Ident> for CacheEncoder<'a, 'tcx, E>
|
||||
where
|
||||
E: 'a + ty_codec::TyEncoder,
|
||||
{
|
||||
fn specialized_encode(&mut self, _: &Ident) -> Result<(), Self::Error> {
|
||||
// We don't currently encode enough information to ensure hygiene works
|
||||
// with incremental, so panic rather than risk incremental bugs.
|
||||
|
||||
// FIXME: Handle hygiene in incremental
|
||||
bug!("Trying to encode Ident for incremental")
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx, E> ty_codec::TyEncoder for CacheEncoder<'a, 'tcx, E>
|
||||
where
|
||||
E: 'a + ty_codec::TyEncoder,
|
||||
|
|
|
@ -8,7 +8,7 @@ use crate::hir::def_id::DefId;
|
|||
use crate::ty::subst::{Kind, UnpackedKind, SubstsRef};
|
||||
use crate::ty::{self, Ty, TyCtxt, TypeFoldable};
|
||||
use crate::ty::error::{ExpectedFound, TypeError};
|
||||
use crate::mir::interpret::{ConstValue, Scalar, GlobalId};
|
||||
use crate::mir::interpret::{ConstValue, Scalar};
|
||||
use std::rc::Rc;
|
||||
use std::iter;
|
||||
use rustc_target::spec::abi;
|
||||
|
@ -551,26 +551,8 @@ pub fn super_relate_consts<R: TypeRelation<'tcx>>(
|
|||
let tcx = relation.tcx();
|
||||
|
||||
let eagerly_eval = |x: &'tcx ty::Const<'tcx>| {
|
||||
if let ConstValue::Unevaluated(def_id, substs) = x.val {
|
||||
// FIXME(eddyb) get the right param_env.
|
||||
let param_env = ty::ParamEnv::empty();
|
||||
if !substs.has_local_value() {
|
||||
let instance = ty::Instance::resolve(
|
||||
tcx.global_tcx(),
|
||||
param_env,
|
||||
def_id,
|
||||
substs,
|
||||
);
|
||||
if let Some(instance) = instance {
|
||||
let cid = GlobalId {
|
||||
instance,
|
||||
promoted: None,
|
||||
};
|
||||
if let Ok(ct) = tcx.const_eval(param_env.and(cid)) {
|
||||
return ct.val;
|
||||
}
|
||||
}
|
||||
}
|
||||
if !x.val.has_local_value() {
|
||||
return x.eval(tcx, relation.param_env()).val;
|
||||
}
|
||||
x.val
|
||||
};
|
||||
|
|
|
@ -748,6 +748,7 @@ impl<'a, 'tcx> Lift<'tcx> for ty::error::TypeError<'a> {
|
|||
Sorts(ref x) => return tcx.lift(x).map(Sorts),
|
||||
ExistentialMismatch(ref x) => return tcx.lift(x).map(ExistentialMismatch),
|
||||
ConstMismatch(ref x) => return tcx.lift(x).map(ConstMismatch),
|
||||
IntrinsicCast => IntrinsicCast,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -1338,6 +1339,7 @@ EnumTypeFoldableImpl! {
|
|||
(ty::error::TypeError::Sorts)(x),
|
||||
(ty::error::TypeError::ExistentialMismatch)(x),
|
||||
(ty::error::TypeError::ConstMismatch)(x),
|
||||
(ty::error::TypeError::IntrinsicCast),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -86,6 +86,7 @@ impl BoundRegion {
|
|||
/// AST structure in `libsyntax/ast.rs` as well.
|
||||
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash,
|
||||
RustcEncodable, RustcDecodable, HashStable, Debug)]
|
||||
#[cfg_attr(not(bootstrap), rustc_diagnostic_item = "TyKind")]
|
||||
pub enum TyKind<'tcx> {
|
||||
/// The primitive boolean type. Written as `bool`.
|
||||
Bool,
|
||||
|
@ -385,7 +386,7 @@ impl<'tcx> ClosureSubsts<'tcx> {
|
|||
let ty = self.closure_sig_ty(def_id, tcx);
|
||||
match ty.sty {
|
||||
ty::FnPtr(sig) => sig,
|
||||
_ => bug!("closure_sig_ty is not a fn-ptr: {:?}", ty),
|
||||
_ => bug!("closure_sig_ty is not a fn-ptr: {:?}", ty.sty),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2299,23 +2300,33 @@ impl<'tcx> Const<'tcx> {
|
|||
assert_eq!(self.ty, ty);
|
||||
// if `ty` does not depend on generic parameters, use an empty param_env
|
||||
let size = tcx.layout_of(param_env.with_reveal_all().and(ty)).ok()?.size;
|
||||
self.eval(tcx, param_env).val.try_to_bits(size)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn eval(
|
||||
&self,
|
||||
tcx: TyCtxt<'tcx>,
|
||||
param_env: ParamEnv<'tcx>,
|
||||
) -> &Const<'tcx> {
|
||||
// FIXME(const_generics): this doesn't work right now,
|
||||
// because it tries to relate an `Infer` to a `Param`.
|
||||
match self.val {
|
||||
// FIXME(const_generics): this doesn't work right now,
|
||||
// because it tries to relate an `Infer` to a `Param`.
|
||||
ConstValue::Unevaluated(did, substs) => {
|
||||
// if `substs` has no unresolved components, use and empty param_env
|
||||
let (param_env, substs) = param_env.with_reveal_all().and(substs).into_parts();
|
||||
// try to resolve e.g. associated constants to their definition on an impl
|
||||
let instance = ty::Instance::resolve(tcx, param_env, did, substs)?;
|
||||
let instance = match ty::Instance::resolve(tcx, param_env, did, substs) {
|
||||
Some(instance) => instance,
|
||||
None => return self,
|
||||
};
|
||||
let gid = GlobalId {
|
||||
instance,
|
||||
promoted: None,
|
||||
};
|
||||
let evaluated = tcx.const_eval(param_env.and(gid)).ok()?;
|
||||
evaluated.val.try_to_bits(size)
|
||||
tcx.const_eval(param_env.and(gid)).unwrap_or(self)
|
||||
},
|
||||
// otherwise just extract a `ConstValue`'s bits if possible
|
||||
_ => self.val.try_to_bits(size),
|
||||
_ => self,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue