Merge branch 'master' into future_imports
This commit is contained in:
commit
bd8497884c
341 changed files with 8991 additions and 4050 deletions
|
@ -16,6 +16,7 @@ Read ["Installation"] from [The Book].
|
||||||
## Building from Source
|
## Building from Source
|
||||||
[building-from-source]: #building-from-source
|
[building-from-source]: #building-from-source
|
||||||
|
|
||||||
|
### Building on *nix
|
||||||
1. Make sure you have installed the dependencies:
|
1. Make sure you have installed the dependencies:
|
||||||
|
|
||||||
* `g++` 4.7 or later or `clang++` 3.x or later
|
* `g++` 4.7 or later or `clang++` 3.x or later
|
||||||
|
@ -193,7 +194,7 @@ Snapshot binaries are currently built and tested on several platforms:
|
||||||
You may find that other platforms work, but these are our officially
|
You may find that other platforms work, but these are our officially
|
||||||
supported build environments that are most likely to work.
|
supported build environments that are most likely to work.
|
||||||
|
|
||||||
Rust currently needs between 600MiB and 1.5GiB to build, depending on platform.
|
Rust currently needs between 600MiB and 1.5GiB of RAM to build, depending on platform.
|
||||||
If it hits swap, it will take a very long time to build.
|
If it hits swap, it will take a very long time to build.
|
||||||
|
|
||||||
There is more advice about hacking on Rust in [CONTRIBUTING.md].
|
There is more advice about hacking on Rust in [CONTRIBUTING.md].
|
||||||
|
|
12
RELEASES.md
12
RELEASES.md
|
@ -3,12 +3,6 @@ Version 1.21.0 (2017-10-12)
|
||||||
|
|
||||||
Language
|
Language
|
||||||
--------
|
--------
|
||||||
- [Relaxed path syntax. You can now add type parameters to values][43540]
|
|
||||||
Example:
|
|
||||||
```rust
|
|
||||||
my_macro!(Vec<i32>::new); // Always worked
|
|
||||||
my_macro!(Vec::<i32>::new); // Now works
|
|
||||||
```
|
|
||||||
- [You can now use static references for literals.][43838]
|
- [You can now use static references for literals.][43838]
|
||||||
Example:
|
Example:
|
||||||
```rust
|
```rust
|
||||||
|
@ -16,6 +10,12 @@ Language
|
||||||
let x: &'static u32 = &0;
|
let x: &'static u32 = &0;
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
- [Relaxed path syntax. Optional `::` before `<` is now allowed in all contexts.][43540]
|
||||||
|
Example:
|
||||||
|
```rust
|
||||||
|
my_macro!(Vec<i32>::new); // Always worked
|
||||||
|
my_macro!(Vec::<i32>::new); // Now works
|
||||||
|
```
|
||||||
|
|
||||||
Compiler
|
Compiler
|
||||||
--------
|
--------
|
||||||
|
|
|
@ -250,14 +250,11 @@
|
||||||
# Whether or not `panic!`s generate backtraces (RUST_BACKTRACE)
|
# Whether or not `panic!`s generate backtraces (RUST_BACKTRACE)
|
||||||
#backtrace = true
|
#backtrace = true
|
||||||
|
|
||||||
# The default linker that will be used by the generated compiler. Note that this
|
# The default linker that will be hard-coded into the generated compiler for
|
||||||
# is not the linker used to link said compiler.
|
# targets that don't specify linker explicitly in their target specifications.
|
||||||
|
# Note that this is not the linker used to link said compiler.
|
||||||
#default-linker = "cc"
|
#default-linker = "cc"
|
||||||
|
|
||||||
# The default ar utility that will be used by the generated compiler if LLVM
|
|
||||||
# cannot be used. Note that this is not used to assemble said compiler.
|
|
||||||
#default-ar = "ar"
|
|
||||||
|
|
||||||
# The "channel" for the Rust build to produce. The stable/beta channels only
|
# The "channel" for the Rust build to produce. The stable/beta channels only
|
||||||
# allow using stable features, whereas the nightly and dev channels allow using
|
# allow using stable features, whereas the nightly and dev channels allow using
|
||||||
# nightly features
|
# nightly features
|
||||||
|
@ -303,7 +300,7 @@
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
[target.x86_64-unknown-linux-gnu]
|
[target.x86_64-unknown-linux-gnu]
|
||||||
|
|
||||||
# C compiler to be used to compiler C code and link Rust code. Note that the
|
# C compiler to be used to compiler C code. Note that the
|
||||||
# default value is platform specific, and if not specified it may also depend on
|
# default value is platform specific, and if not specified it may also depend on
|
||||||
# what platform is crossing to what platform.
|
# what platform is crossing to what platform.
|
||||||
#cc = "cc"
|
#cc = "cc"
|
||||||
|
@ -312,6 +309,15 @@
|
||||||
# This is only used for host targets.
|
# This is only used for host targets.
|
||||||
#cxx = "c++"
|
#cxx = "c++"
|
||||||
|
|
||||||
|
# Archiver to be used to assemble static libraries compiled from C/C++ code.
|
||||||
|
# Note: an absolute path should be used, otherwise LLVM build will break.
|
||||||
|
#ar = "ar"
|
||||||
|
|
||||||
|
# Linker to be used to link Rust code. Note that the
|
||||||
|
# default value is platform specific, and if not specified it may also depend on
|
||||||
|
# what platform is crossing to what platform.
|
||||||
|
#linker = "cc"
|
||||||
|
|
||||||
# Path to the `llvm-config` binary of the installation of a custom LLVM to link
|
# Path to the `llvm-config` binary of the installation of a custom LLVM to link
|
||||||
# against. Note that if this is specifed we don't compile LLVM at all for this
|
# against. Note that if this is specifed we don't compile LLVM at all for this
|
||||||
# target.
|
# target.
|
||||||
|
|
47
src/Cargo.lock
generated
47
src/Cargo.lock
generated
|
@ -42,7 +42,7 @@ dependencies = [
|
||||||
"alloc 0.0.0",
|
"alloc 0.0.0",
|
||||||
"alloc_system 0.0.0",
|
"alloc_system 0.0.0",
|
||||||
"build_helper 0.1.0",
|
"build_helper 0.1.0",
|
||||||
"cc 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"cc 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"core 0.0.0",
|
"core 0.0.0",
|
||||||
"libc 0.0.0",
|
"libc 0.0.0",
|
||||||
]
|
]
|
||||||
|
@ -99,7 +99,7 @@ name = "backtrace-sys"
|
||||||
version = "0.1.14"
|
version = "0.1.14"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cc 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"cc 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)",
|
"libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -136,7 +136,7 @@ name = "bootstrap"
|
||||||
version = "0.0.0"
|
version = "0.0.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"build_helper 0.1.0",
|
"build_helper 0.1.0",
|
||||||
"cc 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"cc 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"cmake 0.1.26 (registry+https://github.com/rust-lang/crates.io-index)",
|
"cmake 0.1.26 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"filetime 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)",
|
"filetime 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"getopts 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)",
|
"getopts 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
@ -199,7 +199,7 @@ dependencies = [
|
||||||
"hex 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"hex 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"home 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"home 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"ignore 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"ignore 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"jobserver 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
"jobserver 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)",
|
"libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"libgit2-sys 0.6.15 (registry+https://github.com/rust-lang/crates.io-index)",
|
"libgit2-sys 0.6.15 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
@ -246,7 +246,7 @@ version = "0.1.0"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cc"
|
name = "cc"
|
||||||
version = "1.0.0"
|
version = "1.0.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -275,7 +275,7 @@ name = "cmake"
|
||||||
version = "0.1.26"
|
version = "0.1.26"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cc 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"cc 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -290,7 +290,7 @@ dependencies = [
|
||||||
name = "compiler_builtins"
|
name = "compiler_builtins"
|
||||||
version = "0.0.0"
|
version = "0.0.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cc 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"cc 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"core 0.0.0",
|
"core 0.0.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -407,7 +407,7 @@ name = "curl-sys"
|
||||||
version = "0.3.15"
|
version = "0.3.15"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cc 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"cc 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)",
|
"libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"libz-sys 1.0.17 (registry+https://github.com/rust-lang/crates.io-index)",
|
"libz-sys 1.0.17 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"openssl-sys 0.9.19 (registry+https://github.com/rust-lang/crates.io-index)",
|
"openssl-sys 0.9.19 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
@ -759,7 +759,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "jobserver"
|
name = "jobserver"
|
||||||
version = "0.1.6"
|
version = "0.1.7"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)",
|
"libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
@ -833,7 +833,7 @@ name = "libgit2-sys"
|
||||||
version = "0.6.15"
|
version = "0.6.15"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cc 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"cc 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"cmake 0.1.26 (registry+https://github.com/rust-lang/crates.io-index)",
|
"cmake 0.1.26 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"curl-sys 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)",
|
"curl-sys 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)",
|
"libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
@ -860,7 +860,7 @@ name = "libz-sys"
|
||||||
version = "1.0.17"
|
version = "1.0.17"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cc 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"cc 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)",
|
"libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
"pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"vcpkg 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"vcpkg 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
@ -880,7 +880,7 @@ name = "lzma-sys"
|
||||||
version = "0.1.9"
|
version = "0.1.9"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cc 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"cc 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"filetime 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)",
|
"filetime 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)",
|
"libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
"pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
@ -968,7 +968,7 @@ name = "miniz-sys"
|
||||||
version = "0.1.10"
|
version = "0.1.10"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cc 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"cc 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)",
|
"libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -1104,7 +1104,7 @@ name = "openssl-sys"
|
||||||
version = "0.9.19"
|
version = "0.9.19"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cc 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"cc 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)",
|
"libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
"pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"vcpkg 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"vcpkg 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
@ -1208,7 +1208,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
name = "profiler_builtins"
|
name = "profiler_builtins"
|
||||||
version = "0.0.0"
|
version = "0.0.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cc 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"cc 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"core 0.0.0",
|
"core 0.0.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -1429,7 +1429,7 @@ dependencies = [
|
||||||
"flate2 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)",
|
"flate2 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"fmt_macros 0.0.0",
|
"fmt_macros 0.0.0",
|
||||||
"graphviz 0.0.0",
|
"graphviz 0.0.0",
|
||||||
"jobserver 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
"jobserver 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
"log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"owning_ref 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
"owning_ref 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"rustc_back 0.0.0",
|
"rustc_back 0.0.0",
|
||||||
|
@ -1504,6 +1504,7 @@ dependencies = [
|
||||||
"graphviz 0.0.0",
|
"graphviz 0.0.0",
|
||||||
"log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
"log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"rustc 0.0.0",
|
"rustc 0.0.0",
|
||||||
|
"rustc_back 0.0.0",
|
||||||
"rustc_errors 0.0.0",
|
"rustc_errors 0.0.0",
|
||||||
"rustc_mir 0.0.0",
|
"rustc_mir 0.0.0",
|
||||||
"syntax 0.0.0",
|
"syntax 0.0.0",
|
||||||
|
@ -1611,7 +1612,6 @@ version = "0.0.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
"log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"rustc 0.0.0",
|
"rustc 0.0.0",
|
||||||
"rustc_back 0.0.0",
|
|
||||||
"rustc_const_eval 0.0.0",
|
"rustc_const_eval 0.0.0",
|
||||||
"syntax 0.0.0",
|
"syntax 0.0.0",
|
||||||
"syntax_pos 0.0.0",
|
"syntax_pos 0.0.0",
|
||||||
|
@ -1623,7 +1623,7 @@ version = "0.0.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bitflags 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"bitflags 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"build_helper 0.1.0",
|
"build_helper 0.1.0",
|
||||||
"cc 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"cc 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"rustc_cratesio_shim 0.0.0",
|
"rustc_cratesio_shim 0.0.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -1755,9 +1755,9 @@ name = "rustc_trans"
|
||||||
version = "0.0.0"
|
version = "0.0.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bitflags 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"bitflags 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"cc 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"cc 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"flate2 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)",
|
"flate2 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"jobserver 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
"jobserver 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
"log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"num_cpus 1.6.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"num_cpus 1.6.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"owning_ref 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
"owning_ref 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
@ -1824,7 +1824,7 @@ name = "rustdoc"
|
||||||
version = "0.0.0"
|
version = "0.0.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"build_helper 0.1.0",
|
"build_helper 0.1.0",
|
||||||
"cc 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"cc 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"env_logger 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
"env_logger 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"html-diff 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
"html-diff 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
"log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
@ -1994,7 +1994,6 @@ dependencies = [
|
||||||
"alloc_jemalloc 0.0.0",
|
"alloc_jemalloc 0.0.0",
|
||||||
"alloc_system 0.0.0",
|
"alloc_system 0.0.0",
|
||||||
"build_helper 0.1.0",
|
"build_helper 0.1.0",
|
||||||
"cc 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"collections 0.0.0",
|
"collections 0.0.0",
|
||||||
"compiler_builtins 0.0.0",
|
"compiler_builtins 0.0.0",
|
||||||
"core 0.0.0",
|
"core 0.0.0",
|
||||||
|
@ -2485,7 +2484,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
"checksum bitflags 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4efd02e230a02e18f92fc2735f44597385ed02ad8f831e7c1c1156ee5e1ab3a5"
|
"checksum bitflags 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4efd02e230a02e18f92fc2735f44597385ed02ad8f831e7c1c1156ee5e1ab3a5"
|
||||||
"checksum bitflags 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f5cde24d1b2e2216a726368b2363a273739c91f4e3eb4e0dd12d672d396ad989"
|
"checksum bitflags 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f5cde24d1b2e2216a726368b2363a273739c91f4e3eb4e0dd12d672d396ad989"
|
||||||
"checksum bufstream 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "f2f382711e76b9de6c744cc00d0497baba02fb00a787f088c879f01d09468e32"
|
"checksum bufstream 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "f2f382711e76b9de6c744cc00d0497baba02fb00a787f088c879f01d09468e32"
|
||||||
"checksum cc 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7db2f146208d7e0fbee761b09cd65a7f51ccc38705d4e7262dad4d73b12a76b1"
|
"checksum cc 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2c674f0870e3dbd4105184ea035acb1c32c8ae69939c9e228d2b11bbfe29efad"
|
||||||
"checksum cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "d4c819a1287eb618df47cc647173c5c4c66ba19d888a6e50d605672aed3140de"
|
"checksum cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "d4c819a1287eb618df47cc647173c5c4c66ba19d888a6e50d605672aed3140de"
|
||||||
"checksum clap 2.26.2 (registry+https://github.com/rust-lang/crates.io-index)" = "3451e409013178663435d6f15fdb212f14ee4424a3d74f979d081d0a66b6f1f2"
|
"checksum clap 2.26.2 (registry+https://github.com/rust-lang/crates.io-index)" = "3451e409013178663435d6f15fdb212f14ee4424a3d74f979d081d0a66b6f1f2"
|
||||||
"checksum cmake 0.1.26 (registry+https://github.com/rust-lang/crates.io-index)" = "357c07e7a1fc95732793c1edb5901e1a1f305cfcf63a90eb12dbd22bdb6b789d"
|
"checksum cmake 0.1.26 (registry+https://github.com/rust-lang/crates.io-index)" = "357c07e7a1fc95732793c1edb5901e1a1f305cfcf63a90eb12dbd22bdb6b789d"
|
||||||
|
@ -2530,7 +2529,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
"checksum idna 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "014b298351066f1512874135335d62a789ffe78a9974f94b43ed5621951eaf7d"
|
"checksum idna 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "014b298351066f1512874135335d62a789ffe78a9974f94b43ed5621951eaf7d"
|
||||||
"checksum ignore 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "b3fcaf2365eb14b28ec7603c98c06cc531f19de9eb283d89a3dff8417c8c99f5"
|
"checksum ignore 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "b3fcaf2365eb14b28ec7603c98c06cc531f19de9eb283d89a3dff8417c8c99f5"
|
||||||
"checksum itoa 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "8324a32baf01e2ae060e9de58ed0bc2320c9a2833491ee36cd3b4c414de4db8c"
|
"checksum itoa 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "8324a32baf01e2ae060e9de58ed0bc2320c9a2833491ee36cd3b4c414de4db8c"
|
||||||
"checksum jobserver 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "443ae8bc0af6c106e6e8b77e04684faecc1a5ce94e058f4c2b0a037b0ea1b133"
|
"checksum jobserver 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "094f87ed101b6832def8632f43db43dc204d27897eb95aca69b26ce2e4011e84"
|
||||||
"checksum jsonrpc-core 7.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b1acd0f9934da94466d2370f36832b9b19271b4abdfdb5e69f0bcd991ebcd515"
|
"checksum jsonrpc-core 7.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b1acd0f9934da94466d2370f36832b9b19271b4abdfdb5e69f0bcd991ebcd515"
|
||||||
"checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
|
"checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
|
||||||
"checksum kuchiki 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ef2ea4f2f7883cd7c6772b06c14abca01a2cc1f75c426cebffcf6b3b925ef9fc"
|
"checksum kuchiki 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ef2ea4f2f7883cd7c6772b06c14abca01a2cc1f75c426cebffcf6b3b925ef9fc"
|
||||||
|
|
|
@ -34,7 +34,7 @@ cmake = "0.1.23"
|
||||||
filetime = "0.1"
|
filetime = "0.1"
|
||||||
num_cpus = "1.0"
|
num_cpus = "1.0"
|
||||||
getopts = "0.2"
|
getopts = "0.2"
|
||||||
cc = "1.0"
|
cc = "1.0.1"
|
||||||
libc = "0.2"
|
libc = "0.2"
|
||||||
serde = "1.0.8"
|
serde = "1.0.8"
|
||||||
serde_derive = "1.0.8"
|
serde_derive = "1.0.8"
|
||||||
|
|
|
@ -39,7 +39,7 @@ The script accepts commands, flags, and arguments to determine what to do:
|
||||||
```
|
```
|
||||||
|
|
||||||
If files are dirty that would normally be rebuilt from stage 0, that can be
|
If files are dirty that would normally be rebuilt from stage 0, that can be
|
||||||
overidden using `--keep-stage 0`. Using `--keep-stage n` will skip all steps
|
overridden using `--keep-stage 0`. Using `--keep-stage n` will skip all steps
|
||||||
that belong to stage n or earlier:
|
that belong to stage n or earlier:
|
||||||
|
|
||||||
```
|
```
|
||||||
|
@ -126,7 +126,7 @@ install a nightly, presumably using `rustup`. You will then want to
|
||||||
configure your directory to use this build, like so:
|
configure your directory to use this build, like so:
|
||||||
|
|
||||||
```
|
```
|
||||||
# configure to use local rust instead of downloding a beta.
|
# configure to use local rust instead of downloading a beta.
|
||||||
# `--local-rust-root` is optional here. If elided, we will
|
# `--local-rust-root` is optional here. If elided, we will
|
||||||
# use whatever rustc we find on your PATH.
|
# use whatever rustc we find on your PATH.
|
||||||
> configure --enable-rustbuild --local-rust-root=~/.cargo/ --enable-local-rebuild
|
> configure --enable-rustbuild --local-rust-root=~/.cargo/ --enable-local-rebuild
|
||||||
|
|
|
@ -31,8 +31,6 @@ extern crate bootstrap;
|
||||||
|
|
||||||
use std::env;
|
use std::env;
|
||||||
use std::ffi::OsString;
|
use std::ffi::OsString;
|
||||||
use std::io;
|
|
||||||
use std::io::prelude::*;
|
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::process::{Command, ExitStatus};
|
use std::process::{Command, ExitStatus};
|
||||||
|
@ -122,19 +120,14 @@ fn main() {
|
||||||
cmd.arg("-L").arg(&root);
|
cmd.arg("-L").arg(&root);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Pass down extra flags, commonly used to configure `-Clinker` when
|
// Override linker if necessary.
|
||||||
// cross compiling.
|
if let Ok(target_linker) = env::var("RUSTC_TARGET_LINKER") {
|
||||||
if let Ok(s) = env::var("RUSTC_FLAGS") {
|
cmd.arg(format!("-Clinker={}", target_linker));
|
||||||
cmd.args(&s.split(" ").filter(|s| !s.is_empty()).collect::<Vec<_>>());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Pass down incremental directory, if any.
|
// Pass down incremental directory, if any.
|
||||||
if let Ok(dir) = env::var("RUSTC_INCREMENTAL") {
|
if let Ok(dir) = env::var("RUSTC_INCREMENTAL") {
|
||||||
cmd.arg(format!("-Zincremental={}", dir));
|
cmd.arg(format!("-Zincremental={}", dir));
|
||||||
|
|
||||||
if verbose > 0 {
|
|
||||||
cmd.arg("-Zincremental-info");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let crate_name = args.windows(2)
|
let crate_name = args.windows(2)
|
||||||
|
@ -258,6 +251,11 @@ fn main() {
|
||||||
if env::var_os("RUSTC_FORCE_UNSTABLE").is_some() {
|
if env::var_os("RUSTC_FORCE_UNSTABLE").is_some() {
|
||||||
cmd.arg("-Z").arg("force-unstable-if-unmarked");
|
cmd.arg("-Z").arg("force-unstable-if-unmarked");
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
// Override linker if necessary.
|
||||||
|
if let Ok(host_linker) = env::var("RUSTC_HOST_LINKER") {
|
||||||
|
cmd.arg(format!("-Clinker={}", host_linker));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let color = match env::var("RUSTC_COLOR") {
|
let color = match env::var("RUSTC_COLOR") {
|
||||||
|
@ -270,7 +268,7 @@ fn main() {
|
||||||
}
|
}
|
||||||
|
|
||||||
if verbose > 1 {
|
if verbose > 1 {
|
||||||
writeln!(&mut io::stderr(), "rustc command: {:?}", cmd).unwrap();
|
eprintln!("rustc command: {:?}", cmd);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Actually run the compiler!
|
// Actually run the compiler!
|
||||||
|
|
|
@ -47,6 +47,17 @@ fn main() {
|
||||||
if env::var_os("RUSTC_FORCE_UNSTABLE").is_some() {
|
if env::var_os("RUSTC_FORCE_UNSTABLE").is_some() {
|
||||||
cmd.arg("-Z").arg("force-unstable-if-unmarked");
|
cmd.arg("-Z").arg("force-unstable-if-unmarked");
|
||||||
}
|
}
|
||||||
|
if let Some(linker) = env::var_os("RUSTC_TARGET_LINKER") {
|
||||||
|
cmd.arg("--linker").arg(linker).arg("-Z").arg("unstable-options");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Bootstrap's Cargo-command builder sets this variable to the current Rust version; let's pick
|
||||||
|
// it up so we can make rustdoc print this into the docs
|
||||||
|
if let Some(version) = env::var_os("RUSTDOC_CRATE_VERSION") {
|
||||||
|
// This "unstable-options" can be removed when `--crate-version` is stabilized
|
||||||
|
cmd.arg("-Z").arg("unstable-options")
|
||||||
|
.arg("--crate-version").arg(version);
|
||||||
|
}
|
||||||
|
|
||||||
std::process::exit(match cmd.status() {
|
std::process::exit(match cmd.status() {
|
||||||
Ok(s) => s.code().unwrap_or(1),
|
Ok(s) => s.code().unwrap_or(1),
|
||||||
|
|
|
@ -302,6 +302,7 @@ def default_build_triple():
|
||||||
|
|
||||||
return "{}-{}".format(cputype, ostype)
|
return "{}-{}".format(cputype, ostype)
|
||||||
|
|
||||||
|
|
||||||
class RustBuild(object):
|
class RustBuild(object):
|
||||||
"""Provide all the methods required to build Rust"""
|
"""Provide all the methods required to build Rust"""
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
@ -498,7 +499,7 @@ class RustBuild(object):
|
||||||
|
|
||||||
If the key does not exists, the result is None:
|
If the key does not exists, the result is None:
|
||||||
|
|
||||||
>>> rb.get_toml("key3") == None
|
>>> rb.get_toml("key3") is None
|
||||||
True
|
True
|
||||||
"""
|
"""
|
||||||
for line in self.config_toml.splitlines():
|
for line in self.config_toml.splitlines():
|
||||||
|
|
|
@ -413,12 +413,15 @@ impl<'a> Builder<'a> {
|
||||||
pub fn rustdoc_cmd(&self, host: Interned<String>) -> Command {
|
pub fn rustdoc_cmd(&self, host: Interned<String>) -> Command {
|
||||||
let mut cmd = Command::new(&self.out.join("bootstrap/debug/rustdoc"));
|
let mut cmd = Command::new(&self.out.join("bootstrap/debug/rustdoc"));
|
||||||
let compiler = self.compiler(self.top_stage, host);
|
let compiler = self.compiler(self.top_stage, host);
|
||||||
cmd
|
cmd.env("RUSTC_STAGE", compiler.stage.to_string())
|
||||||
.env("RUSTC_STAGE", compiler.stage.to_string())
|
|
||||||
.env("RUSTC_SYSROOT", self.sysroot(compiler))
|
.env("RUSTC_SYSROOT", self.sysroot(compiler))
|
||||||
.env("RUSTC_LIBDIR", self.sysroot_libdir(compiler, self.build.build))
|
.env("RUSTC_LIBDIR", self.sysroot_libdir(compiler, self.build.build))
|
||||||
.env("CFG_RELEASE_CHANNEL", &self.build.config.channel)
|
.env("CFG_RELEASE_CHANNEL", &self.build.config.channel)
|
||||||
.env("RUSTDOC_REAL", self.rustdoc(host));
|
.env("RUSTDOC_REAL", self.rustdoc(host))
|
||||||
|
.env("RUSTDOC_CRATE_VERSION", self.build.rust_version());
|
||||||
|
if let Some(linker) = self.build.linker(host) {
|
||||||
|
cmd.env("RUSTC_TARGET_LINKER", linker);
|
||||||
|
}
|
||||||
cmd
|
cmd
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -481,8 +484,14 @@ impl<'a> Builder<'a> {
|
||||||
} else {
|
} else {
|
||||||
PathBuf::from("/path/to/nowhere/rustdoc/not/required")
|
PathBuf::from("/path/to/nowhere/rustdoc/not/required")
|
||||||
})
|
})
|
||||||
.env("TEST_MIRI", self.config.test_miri.to_string())
|
.env("TEST_MIRI", self.config.test_miri.to_string());
|
||||||
.env("RUSTC_FLAGS", self.rustc_flags(target).join(" "));
|
|
||||||
|
if let Some(host_linker) = self.build.linker(compiler.host) {
|
||||||
|
cargo.env("RUSTC_HOST_LINKER", host_linker);
|
||||||
|
}
|
||||||
|
if let Some(target_linker) = self.build.linker(target) {
|
||||||
|
cargo.env("RUSTC_TARGET_LINKER", target_linker);
|
||||||
|
}
|
||||||
|
|
||||||
if mode != Mode::Tool {
|
if mode != Mode::Tool {
|
||||||
// Tools don't get debuginfo right now, e.g. cargo and rls don't
|
// Tools don't get debuginfo right now, e.g. cargo and rls don't
|
||||||
|
@ -556,17 +565,35 @@ impl<'a> Builder<'a> {
|
||||||
|
|
||||||
cargo.env("RUSTC_VERBOSE", format!("{}", self.verbosity));
|
cargo.env("RUSTC_VERBOSE", format!("{}", self.verbosity));
|
||||||
|
|
||||||
// Specify some various options for build scripts used throughout
|
// Throughout the build Cargo can execute a number of build scripts
|
||||||
// the build.
|
// compiling C/C++ code and we need to pass compilers, archivers, flags, etc
|
||||||
|
// obtained previously to those build scripts.
|
||||||
|
// Build scripts use either the `cc` crate or `configure/make` so we pass
|
||||||
|
// the options through environment variables that are fetched and understood by both.
|
||||||
//
|
//
|
||||||
// FIXME: the guard against msvc shouldn't need to be here
|
// FIXME: the guard against msvc shouldn't need to be here
|
||||||
if !target.contains("msvc") {
|
if !target.contains("msvc") {
|
||||||
cargo.env(format!("CC_{}", target), self.cc(target))
|
let cc = self.cc(target);
|
||||||
.env(format!("AR_{}", target), self.ar(target).unwrap()) // only msvc is None
|
cargo.env(format!("CC_{}", target), cc)
|
||||||
.env(format!("CFLAGS_{}", target), self.cflags(target).join(" "));
|
.env("CC", cc);
|
||||||
|
|
||||||
|
let cflags = self.cflags(target).join(" ");
|
||||||
|
cargo.env(format!("CFLAGS_{}", target), cflags.clone())
|
||||||
|
.env("CFLAGS", cflags.clone());
|
||||||
|
|
||||||
|
if let Some(ar) = self.ar(target) {
|
||||||
|
let ranlib = format!("{} s", ar.display());
|
||||||
|
cargo.env(format!("AR_{}", target), ar)
|
||||||
|
.env("AR", ar)
|
||||||
|
.env(format!("RANLIB_{}", target), ranlib.clone())
|
||||||
|
.env("RANLIB", ranlib);
|
||||||
|
}
|
||||||
|
|
||||||
if let Ok(cxx) = self.cxx(target) {
|
if let Ok(cxx) = self.cxx(target) {
|
||||||
cargo.env(format!("CXX_{}", target), cxx);
|
cargo.env(format!("CXX_{}", target), cxx)
|
||||||
|
.env("CXX", cxx)
|
||||||
|
.env(format!("CXXFLAGS_{}", target), cflags.clone())
|
||||||
|
.env("CXXFLAGS", cflags);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -574,6 +601,9 @@ impl<'a> Builder<'a> {
|
||||||
cargo.env("RUSTC_SAVE_ANALYSIS", "api".to_string());
|
cargo.env("RUSTC_SAVE_ANALYSIS", "api".to_string());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// For `cargo doc` invocations, make rustdoc print the Rust version into the docs
|
||||||
|
cargo.env("RUSTDOC_CRATE_VERSION", self.build.rust_version());
|
||||||
|
|
||||||
// Environment variables *required* throughout the build
|
// Environment variables *required* throughout the build
|
||||||
//
|
//
|
||||||
// FIXME: should update code to not require this env var
|
// FIXME: should update code to not require this env var
|
||||||
|
|
|
@ -31,20 +31,51 @@
|
||||||
//! ever be probed for. Instead the compilers found here will be used for
|
//! ever be probed for. Instead the compilers found here will be used for
|
||||||
//! everything.
|
//! everything.
|
||||||
|
|
||||||
|
use std::collections::HashSet;
|
||||||
|
use std::{env, iter};
|
||||||
|
use std::path::{Path, PathBuf};
|
||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
use std::iter;
|
|
||||||
|
|
||||||
use build_helper::{cc2ar, output};
|
use build_helper::output;
|
||||||
use cc;
|
use cc;
|
||||||
|
|
||||||
use Build;
|
use Build;
|
||||||
use config::Target;
|
use config::Target;
|
||||||
use cache::Interned;
|
use cache::Interned;
|
||||||
|
|
||||||
|
// The `cc` crate doesn't provide a way to obtain a path to the detected archiver,
|
||||||
|
// so use some simplified logic here. First we respect the environment variable `AR`, then
|
||||||
|
// try to infer the archiver path from the C compiler path.
|
||||||
|
// In the future this logic should be replaced by calling into the `cc` crate.
|
||||||
|
fn cc2ar(cc: &Path, target: &str) -> Option<PathBuf> {
|
||||||
|
if let Some(ar) = env::var_os("AR") {
|
||||||
|
Some(PathBuf::from(ar))
|
||||||
|
} else if target.contains("msvc") {
|
||||||
|
None
|
||||||
|
} else if target.contains("musl") {
|
||||||
|
Some(PathBuf::from("ar"))
|
||||||
|
} else if target.contains("openbsd") {
|
||||||
|
Some(PathBuf::from("ar"))
|
||||||
|
} else {
|
||||||
|
let parent = cc.parent().unwrap();
|
||||||
|
let file = cc.file_name().unwrap().to_str().unwrap();
|
||||||
|
for suffix in &["gcc", "cc", "clang"] {
|
||||||
|
if let Some(idx) = file.rfind(suffix) {
|
||||||
|
let mut file = file[..idx].to_owned();
|
||||||
|
file.push_str("ar");
|
||||||
|
return Some(parent.join(&file));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Some(parent.join(file))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn find(build: &mut Build) {
|
pub fn find(build: &mut Build) {
|
||||||
// For all targets we're going to need a C compiler for building some shims
|
// For all targets we're going to need a C compiler for building some shims
|
||||||
// and such as well as for being a linker for Rust code.
|
// and such as well as for being a linker for Rust code.
|
||||||
for target in build.targets.iter().chain(&build.hosts).cloned().chain(iter::once(build.build)) {
|
let targets = build.targets.iter().chain(&build.hosts).cloned().chain(iter::once(build.build))
|
||||||
|
.collect::<HashSet<_>>();
|
||||||
|
for target in targets.into_iter() {
|
||||||
let mut cfg = cc::Build::new();
|
let mut cfg = cc::Build::new();
|
||||||
cfg.cargo_metadata(false).opt_level(0).warnings(false).debug(false)
|
cfg.cargo_metadata(false).opt_level(0).warnings(false).debug(false)
|
||||||
.target(&target).host(&build.build);
|
.target(&target).host(&build.build);
|
||||||
|
@ -57,16 +88,23 @@ pub fn find(build: &mut Build) {
|
||||||
}
|
}
|
||||||
|
|
||||||
let compiler = cfg.get_compiler();
|
let compiler = cfg.get_compiler();
|
||||||
let ar = cc2ar(compiler.path(), &target);
|
let ar = if let ar @ Some(..) = config.and_then(|c| c.ar.clone()) {
|
||||||
|
ar
|
||||||
|
} else {
|
||||||
|
cc2ar(compiler.path(), &target)
|
||||||
|
};
|
||||||
|
|
||||||
build.verbose(&format!("CC_{} = {:?}", &target, compiler.path()));
|
build.verbose(&format!("CC_{} = {:?}", &target, compiler.path()));
|
||||||
if let Some(ref ar) = ar {
|
build.cc.insert(target, compiler);
|
||||||
|
if let Some(ar) = ar {
|
||||||
build.verbose(&format!("AR_{} = {:?}", &target, ar));
|
build.verbose(&format!("AR_{} = {:?}", &target, ar));
|
||||||
|
build.ar.insert(target, ar);
|
||||||
}
|
}
|
||||||
build.cc.insert(target, (compiler, ar));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// For all host triples we need to find a C++ compiler as well
|
// For all host triples we need to find a C++ compiler as well
|
||||||
for host in build.hosts.iter().cloned().chain(iter::once(build.build)) {
|
let hosts = build.hosts.iter().cloned().chain(iter::once(build.build)).collect::<HashSet<_>>();
|
||||||
|
for host in hosts.into_iter() {
|
||||||
let mut cfg = cc::Build::new();
|
let mut cfg = cc::Build::new();
|
||||||
cfg.cargo_metadata(false).opt_level(0).warnings(false).debug(false).cpp(true)
|
cfg.cargo_metadata(false).opt_level(0).warnings(false).debug(false).cpp(true)
|
||||||
.target(&host).host(&build.build);
|
.target(&host).host(&build.build);
|
||||||
|
|
|
@ -246,8 +246,11 @@ impl Step for Rls {
|
||||||
let compiler = builder.compiler(stage, host);
|
let compiler = builder.compiler(stage, host);
|
||||||
|
|
||||||
builder.ensure(tool::Rls { compiler, target: self.host });
|
builder.ensure(tool::Rls { compiler, target: self.host });
|
||||||
let mut cargo = builder.cargo(compiler, Mode::Tool, host, "test");
|
let mut cargo = tool::prepare_tool_cargo(builder,
|
||||||
cargo.arg("--manifest-path").arg(build.src.join("src/tools/rls/Cargo.toml"));
|
compiler,
|
||||||
|
host,
|
||||||
|
"test",
|
||||||
|
"src/tools/rls");
|
||||||
|
|
||||||
// Don't build tests dynamically, just a pain to work with
|
// Don't build tests dynamically, just a pain to work with
|
||||||
cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
|
cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
|
||||||
|
@ -291,8 +294,11 @@ impl Step for Rustfmt {
|
||||||
let compiler = builder.compiler(stage, host);
|
let compiler = builder.compiler(stage, host);
|
||||||
|
|
||||||
builder.ensure(tool::Rustfmt { compiler, target: self.host });
|
builder.ensure(tool::Rustfmt { compiler, target: self.host });
|
||||||
let mut cargo = builder.cargo(compiler, Mode::Tool, host, "test");
|
let mut cargo = tool::prepare_tool_cargo(builder,
|
||||||
cargo.arg("--manifest-path").arg(build.src.join("src/tools/rustfmt/Cargo.toml"));
|
compiler,
|
||||||
|
host,
|
||||||
|
"test",
|
||||||
|
"src/tools/rustfmt");
|
||||||
|
|
||||||
// Don't build tests dynamically, just a pain to work with
|
// Don't build tests dynamically, just a pain to work with
|
||||||
cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
|
cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
|
||||||
|
@ -358,6 +364,7 @@ impl Step for Miri {
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
||||||
pub struct Clippy {
|
pub struct Clippy {
|
||||||
|
stage: u32,
|
||||||
host: Interned<String>,
|
host: Interned<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -372,6 +379,7 @@ impl Step for Clippy {
|
||||||
|
|
||||||
fn make_run(run: RunConfig) {
|
fn make_run(run: RunConfig) {
|
||||||
run.builder.ensure(Clippy {
|
run.builder.ensure(Clippy {
|
||||||
|
stage: run.builder.top_stage,
|
||||||
host: run.target,
|
host: run.target,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -379,10 +387,11 @@ impl Step for Clippy {
|
||||||
/// Runs `cargo test` for clippy.
|
/// Runs `cargo test` for clippy.
|
||||||
fn run(self, builder: &Builder) {
|
fn run(self, builder: &Builder) {
|
||||||
let build = builder.build;
|
let build = builder.build;
|
||||||
|
let stage = self.stage;
|
||||||
let host = self.host;
|
let host = self.host;
|
||||||
let compiler = builder.compiler(1, host);
|
let compiler = builder.compiler(stage, host);
|
||||||
|
|
||||||
let _clippy = builder.ensure(tool::Clippy { compiler, target: self.host });
|
let clippy = builder.ensure(tool::Clippy { compiler, target: self.host });
|
||||||
let mut cargo = builder.cargo(compiler, Mode::Tool, host, "test");
|
let mut cargo = builder.cargo(compiler, Mode::Tool, host, "test");
|
||||||
cargo.arg("--manifest-path").arg(build.src.join("src/tools/clippy/Cargo.toml"));
|
cargo.arg("--manifest-path").arg(build.src.join("src/tools/clippy/Cargo.toml"));
|
||||||
|
|
||||||
|
@ -390,6 +399,8 @@ impl Step for Clippy {
|
||||||
cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
|
cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
|
||||||
// clippy tests need to know about the stage sysroot
|
// clippy tests need to know about the stage sysroot
|
||||||
cargo.env("SYSROOT", builder.sysroot(compiler));
|
cargo.env("SYSROOT", builder.sysroot(compiler));
|
||||||
|
// clippy tests need to find the driver
|
||||||
|
cargo.env("CLIPPY_DRIVER_PATH", clippy);
|
||||||
|
|
||||||
builder.add_rustc_lib_path(compiler, &mut cargo);
|
builder.add_rustc_lib_path(compiler, &mut cargo);
|
||||||
|
|
||||||
|
@ -736,12 +747,14 @@ impl Step for Compiletest {
|
||||||
flags.push("-g".to_string());
|
flags.push("-g".to_string());
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut hostflags = build.rustc_flags(compiler.host);
|
if let Some(linker) = build.linker(target) {
|
||||||
hostflags.extend(flags.clone());
|
cmd.arg("--linker").arg(linker);
|
||||||
|
}
|
||||||
|
|
||||||
|
let hostflags = flags.clone();
|
||||||
cmd.arg("--host-rustcflags").arg(hostflags.join(" "));
|
cmd.arg("--host-rustcflags").arg(hostflags.join(" "));
|
||||||
|
|
||||||
let mut targetflags = build.rustc_flags(target);
|
let mut targetflags = flags.clone();
|
||||||
targetflags.extend(flags);
|
|
||||||
targetflags.push(format!("-Lnative={}",
|
targetflags.push(format!("-Lnative={}",
|
||||||
build.test_helpers_out(target).display()));
|
build.test_helpers_out(target).display()));
|
||||||
cmd.arg("--target-rustcflags").arg(targetflags.join(" "));
|
cmd.arg("--target-rustcflags").arg(targetflags.join(" "));
|
||||||
|
@ -795,6 +808,9 @@ impl Step for Compiletest {
|
||||||
.arg("--cflags").arg(build.cflags(target).join(" "))
|
.arg("--cflags").arg(build.cflags(target).join(" "))
|
||||||
.arg("--llvm-components").arg(llvm_components.trim())
|
.arg("--llvm-components").arg(llvm_components.trim())
|
||||||
.arg("--llvm-cxxflags").arg(llvm_cxxflags.trim());
|
.arg("--llvm-cxxflags").arg(llvm_cxxflags.trim());
|
||||||
|
if let Some(ar) = build.ar(target) {
|
||||||
|
cmd.arg("--ar").arg(ar);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if suite == "run-make" && !build.config.llvm_enabled {
|
if suite == "run-make" && !build.config.llvm_enabled {
|
||||||
|
@ -820,7 +836,7 @@ impl Step for Compiletest {
|
||||||
// Note that if we encounter `PATH` we make sure to append to our own `PATH`
|
// Note that if we encounter `PATH` we make sure to append to our own `PATH`
|
||||||
// rather than stomp over it.
|
// rather than stomp over it.
|
||||||
if target.contains("msvc") {
|
if target.contains("msvc") {
|
||||||
for &(ref k, ref v) in build.cc[&target].0.env() {
|
for &(ref k, ref v) in build.cc[&target].env() {
|
||||||
if k != "PATH" {
|
if k != "PATH" {
|
||||||
cmd.env(k, v);
|
cmd.env(k, v);
|
||||||
}
|
}
|
||||||
|
|
|
@ -560,9 +560,6 @@ pub fn rustc_cargo(build: &Build,
|
||||||
if let Some(ref s) = build.config.rustc_default_linker {
|
if let Some(ref s) = build.config.rustc_default_linker {
|
||||||
cargo.env("CFG_DEFAULT_LINKER", s);
|
cargo.env("CFG_DEFAULT_LINKER", s);
|
||||||
}
|
}
|
||||||
if let Some(ref s) = build.config.rustc_default_ar {
|
|
||||||
cargo.env("CFG_DEFAULT_AR", s);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
||||||
|
|
|
@ -88,7 +88,6 @@ pub struct Config {
|
||||||
pub rust_debuginfo_only_std: bool,
|
pub rust_debuginfo_only_std: bool,
|
||||||
pub rust_rpath: bool,
|
pub rust_rpath: bool,
|
||||||
pub rustc_default_linker: Option<String>,
|
pub rustc_default_linker: Option<String>,
|
||||||
pub rustc_default_ar: Option<String>,
|
|
||||||
pub rust_optimize_tests: bool,
|
pub rust_optimize_tests: bool,
|
||||||
pub rust_debuginfo_tests: bool,
|
pub rust_debuginfo_tests: bool,
|
||||||
pub rust_dist_src: bool,
|
pub rust_dist_src: bool,
|
||||||
|
@ -144,6 +143,8 @@ pub struct Target {
|
||||||
pub jemalloc: Option<PathBuf>,
|
pub jemalloc: Option<PathBuf>,
|
||||||
pub cc: Option<PathBuf>,
|
pub cc: Option<PathBuf>,
|
||||||
pub cxx: Option<PathBuf>,
|
pub cxx: Option<PathBuf>,
|
||||||
|
pub ar: Option<PathBuf>,
|
||||||
|
pub linker: Option<PathBuf>,
|
||||||
pub ndk: Option<PathBuf>,
|
pub ndk: Option<PathBuf>,
|
||||||
pub crt_static: Option<bool>,
|
pub crt_static: Option<bool>,
|
||||||
pub musl_root: Option<PathBuf>,
|
pub musl_root: Option<PathBuf>,
|
||||||
|
@ -262,7 +263,6 @@ struct Rust {
|
||||||
use_jemalloc: Option<bool>,
|
use_jemalloc: Option<bool>,
|
||||||
backtrace: Option<bool>,
|
backtrace: Option<bool>,
|
||||||
default_linker: Option<String>,
|
default_linker: Option<String>,
|
||||||
default_ar: Option<String>,
|
|
||||||
channel: Option<String>,
|
channel: Option<String>,
|
||||||
musl_root: Option<String>,
|
musl_root: Option<String>,
|
||||||
rpath: Option<bool>,
|
rpath: Option<bool>,
|
||||||
|
@ -284,6 +284,8 @@ struct TomlTarget {
|
||||||
jemalloc: Option<String>,
|
jemalloc: Option<String>,
|
||||||
cc: Option<String>,
|
cc: Option<String>,
|
||||||
cxx: Option<String>,
|
cxx: Option<String>,
|
||||||
|
ar: Option<String>,
|
||||||
|
linker: Option<String>,
|
||||||
android_ndk: Option<String>,
|
android_ndk: Option<String>,
|
||||||
crt_static: Option<bool>,
|
crt_static: Option<bool>,
|
||||||
musl_root: Option<String>,
|
musl_root: Option<String>,
|
||||||
|
@ -464,7 +466,6 @@ impl Config {
|
||||||
set(&mut config.quiet_tests, rust.quiet_tests);
|
set(&mut config.quiet_tests, rust.quiet_tests);
|
||||||
set(&mut config.test_miri, rust.test_miri);
|
set(&mut config.test_miri, rust.test_miri);
|
||||||
config.rustc_default_linker = rust.default_linker.clone();
|
config.rustc_default_linker = rust.default_linker.clone();
|
||||||
config.rustc_default_ar = rust.default_ar.clone();
|
|
||||||
config.musl_root = rust.musl_root.clone().map(PathBuf::from);
|
config.musl_root = rust.musl_root.clone().map(PathBuf::from);
|
||||||
|
|
||||||
match rust.codegen_units {
|
match rust.codegen_units {
|
||||||
|
@ -487,8 +488,10 @@ impl Config {
|
||||||
if let Some(ref s) = cfg.android_ndk {
|
if let Some(ref s) = cfg.android_ndk {
|
||||||
target.ndk = Some(env::current_dir().unwrap().join(s));
|
target.ndk = Some(env::current_dir().unwrap().join(s));
|
||||||
}
|
}
|
||||||
target.cxx = cfg.cxx.clone().map(PathBuf::from);
|
|
||||||
target.cc = cfg.cc.clone().map(PathBuf::from);
|
target.cc = cfg.cc.clone().map(PathBuf::from);
|
||||||
|
target.cxx = cfg.cxx.clone().map(PathBuf::from);
|
||||||
|
target.ar = cfg.ar.clone().map(PathBuf::from);
|
||||||
|
target.linker = cfg.linker.clone().map(PathBuf::from);
|
||||||
target.crt_static = cfg.crt_static.clone();
|
target.crt_static = cfg.crt_static.clone();
|
||||||
target.musl_root = cfg.musl_root.clone().map(PathBuf::from);
|
target.musl_root = cfg.musl_root.clone().map(PathBuf::from);
|
||||||
target.qemu_rootfs = cfg.qemu_rootfs.clone().map(PathBuf::from);
|
target.qemu_rootfs = cfg.qemu_rootfs.clone().map(PathBuf::from);
|
||||||
|
|
|
@ -20,6 +20,7 @@ rust_dir = os.path.dirname(rust_dir)
|
||||||
sys.path.append(os.path.join(rust_dir, "src", "bootstrap"))
|
sys.path.append(os.path.join(rust_dir, "src", "bootstrap"))
|
||||||
import bootstrap
|
import bootstrap
|
||||||
|
|
||||||
|
|
||||||
class Option(object):
|
class Option(object):
|
||||||
def __init__(self, name, rustbuild, desc, value):
|
def __init__(self, name, rustbuild, desc, value):
|
||||||
self.name = name
|
self.name = name
|
||||||
|
@ -27,14 +28,18 @@ class Option(object):
|
||||||
self.desc = desc
|
self.desc = desc
|
||||||
self.value = value
|
self.value = value
|
||||||
|
|
||||||
|
|
||||||
options = []
|
options = []
|
||||||
|
|
||||||
|
|
||||||
def o(*args):
|
def o(*args):
|
||||||
options.append(Option(*args, value=False))
|
options.append(Option(*args, value=False))
|
||||||
|
|
||||||
|
|
||||||
def v(*args):
|
def v(*args):
|
||||||
options.append(Option(*args, value=True))
|
options.append(Option(*args, value=True))
|
||||||
|
|
||||||
|
|
||||||
o("debug", "rust.debug", "debug mode; disables optimization unless `--enable-optimize` given")
|
o("debug", "rust.debug", "debug mode; disables optimization unless `--enable-optimize` given")
|
||||||
o("docs", "build.docs", "build standard library documentation")
|
o("docs", "build.docs", "build standard library documentation")
|
||||||
o("compiler-docs", "build.compiler-docs", "build compiler documentation")
|
o("compiler-docs", "build.compiler-docs", "build compiler documentation")
|
||||||
|
@ -120,9 +125,8 @@ v("experimental-targets", "llvm.experimental-targets",
|
||||||
"experimental LLVM targets to build")
|
"experimental LLVM targets to build")
|
||||||
v("release-channel", "rust.channel", "the name of the release channel to build")
|
v("release-channel", "rust.channel", "the name of the release channel to build")
|
||||||
|
|
||||||
# Used on systems where "cc" and "ar" are unavailable
|
# Used on systems where "cc" is unavailable
|
||||||
v("default-linker", "rust.default-linker", "the default linker")
|
v("default-linker", "rust.default-linker", "the default linker")
|
||||||
v("default-ar", "rust.default-ar", "the default ar")
|
|
||||||
|
|
||||||
# Many of these are saved below during the "writing configuration" step
|
# Many of these are saved below during the "writing configuration" step
|
||||||
# (others are conditionally saved).
|
# (others are conditionally saved).
|
||||||
|
@ -137,13 +141,16 @@ v("target", None, "GNUs ./configure syntax LLVM target triples")
|
||||||
|
|
||||||
v("set", None, "set arbitrary key/value pairs in TOML configuration")
|
v("set", None, "set arbitrary key/value pairs in TOML configuration")
|
||||||
|
|
||||||
|
|
||||||
def p(msg):
|
def p(msg):
|
||||||
print("configure: " + msg)
|
print("configure: " + msg)
|
||||||
|
|
||||||
|
|
||||||
def err(msg):
|
def err(msg):
|
||||||
print("configure: error: " + msg)
|
print("configure: error: " + msg)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
if '--help' in sys.argv or '-h' in sys.argv:
|
if '--help' in sys.argv or '-h' in sys.argv:
|
||||||
print('Usage: ./configure [options]')
|
print('Usage: ./configure [options]')
|
||||||
print('')
|
print('')
|
||||||
|
@ -209,7 +216,7 @@ while i < len(sys.argv):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
found = True
|
found = True
|
||||||
if not option.name in known_args:
|
if option.name not in known_args:
|
||||||
known_args[option.name] = []
|
known_args[option.name] = []
|
||||||
known_args[option.name].append((option, value))
|
known_args[option.name].append((option, value))
|
||||||
break
|
break
|
||||||
|
@ -228,11 +235,13 @@ if 'option-checking' not in known_args or known_args['option-checking'][1]:
|
||||||
# TOML we're going to write out
|
# TOML we're going to write out
|
||||||
config = {}
|
config = {}
|
||||||
|
|
||||||
|
|
||||||
def build():
|
def build():
|
||||||
if 'build' in known_args:
|
if 'build' in known_args:
|
||||||
return known_args['build'][0][1]
|
return known_args['build'][0][1]
|
||||||
return bootstrap.default_build_triple()
|
return bootstrap.default_build_triple()
|
||||||
|
|
||||||
|
|
||||||
def set(key, value):
|
def set(key, value):
|
||||||
s = "{:20} := {}".format(key, value)
|
s = "{:20} := {}".format(key, value)
|
||||||
if len(s) < 70:
|
if len(s) < 70:
|
||||||
|
@ -246,10 +255,11 @@ def set(key, value):
|
||||||
if i == len(parts) - 1:
|
if i == len(parts) - 1:
|
||||||
arr[part] = value
|
arr[part] = value
|
||||||
else:
|
else:
|
||||||
if not part in arr:
|
if part not in arr:
|
||||||
arr[part] = {}
|
arr[part] = {}
|
||||||
arr = arr[part]
|
arr = arr[part]
|
||||||
|
|
||||||
|
|
||||||
for key in known_args:
|
for key in known_args:
|
||||||
# The `set` option is special and can be passed a bunch of times
|
# The `set` option is special and can be passed a bunch of times
|
||||||
if key == 'set':
|
if key == 'set':
|
||||||
|
@ -346,8 +356,9 @@ for target in configured_targets:
|
||||||
targets[target] = sections['target'][:]
|
targets[target] = sections['target'][:]
|
||||||
targets[target][0] = targets[target][0].replace("x86_64-unknown-linux-gnu", target)
|
targets[target][0] = targets[target][0].replace("x86_64-unknown-linux-gnu", target)
|
||||||
|
|
||||||
|
|
||||||
# Here we walk through the constructed configuration we have from the parsed
|
# Here we walk through the constructed configuration we have from the parsed
|
||||||
# command line arguemnts. We then apply each piece of configuration by
|
# command line arguments. We then apply each piece of configuration by
|
||||||
# basically just doing a `sed` to change the various configuration line to what
|
# basically just doing a `sed` to change the various configuration line to what
|
||||||
# we've got configure.
|
# we've got configure.
|
||||||
def to_toml(value):
|
def to_toml(value):
|
||||||
|
@ -361,7 +372,8 @@ def to_toml(value):
|
||||||
elif isinstance(value, str):
|
elif isinstance(value, str):
|
||||||
return "'" + value + "'"
|
return "'" + value + "'"
|
||||||
else:
|
else:
|
||||||
raise 'no toml'
|
raise RuntimeError('no toml')
|
||||||
|
|
||||||
|
|
||||||
def configure_section(lines, config):
|
def configure_section(lines, config):
|
||||||
for key in config:
|
for key in config:
|
||||||
|
@ -376,10 +388,11 @@ def configure_section(lines, config):
|
||||||
if not found:
|
if not found:
|
||||||
raise RuntimeError("failed to find config line for {}".format(key))
|
raise RuntimeError("failed to find config line for {}".format(key))
|
||||||
|
|
||||||
|
|
||||||
for section_key in config:
|
for section_key in config:
|
||||||
section_config = config[section_key]
|
section_config = config[section_key]
|
||||||
if not section_key in sections:
|
if section_key not in sections:
|
||||||
raise RuntimeError("config key {} not in sections".format(key))
|
raise RuntimeError("config key {} not in sections".format(section_key))
|
||||||
|
|
||||||
if section_key == 'target':
|
if section_key == 'target':
|
||||||
for target in section_config:
|
for target in section_config:
|
||||||
|
@ -408,11 +421,6 @@ with open('Makefile', 'w') as f:
|
||||||
contents = contents.replace("$(CFG_PYTHON)", sys.executable)
|
contents = contents.replace("$(CFG_PYTHON)", sys.executable)
|
||||||
f.write(contents)
|
f.write(contents)
|
||||||
|
|
||||||
# Finally, clean up with a bit of a help message
|
|
||||||
relpath = os.path.dirname(__file__)
|
|
||||||
if relpath == '':
|
|
||||||
relpath = '.'
|
|
||||||
|
|
||||||
p("")
|
p("")
|
||||||
p("run `python {}/x.py --help`".format(relpath))
|
p("run `python {}/x.py --help`".format(rust_dir))
|
||||||
p("")
|
p("")
|
||||||
|
|
|
@ -176,7 +176,7 @@ fn make_win_dist(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let target_tools = ["gcc.exe", "ld.exe", "ar.exe", "dlltool.exe", "libwinpthread-1.dll"];
|
let target_tools = ["gcc.exe", "ld.exe", "dlltool.exe", "libwinpthread-1.dll"];
|
||||||
let mut rustc_dlls = vec!["libstdc++-6.dll", "libwinpthread-1.dll"];
|
let mut rustc_dlls = vec!["libstdc++-6.dll", "libwinpthread-1.dll"];
|
||||||
if target_triple.starts_with("i686-") {
|
if target_triple.starts_with("i686-") {
|
||||||
rustc_dlls.push("libgcc_s_dw2-1.dll");
|
rustc_dlls.push("libgcc_s_dw2-1.dll");
|
||||||
|
@ -1035,7 +1035,7 @@ pub struct Rls {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Step for Rls {
|
impl Step for Rls {
|
||||||
type Output = PathBuf;
|
type Output = Option<PathBuf>;
|
||||||
const ONLY_BUILD_TARGETS: bool = true;
|
const ONLY_BUILD_TARGETS: bool = true;
|
||||||
const ONLY_HOSTS: bool = true;
|
const ONLY_HOSTS: bool = true;
|
||||||
|
|
||||||
|
@ -1050,12 +1050,17 @@ impl Step for Rls {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
fn run(self, builder: &Builder) -> PathBuf {
|
fn run(self, builder: &Builder) -> Option<PathBuf> {
|
||||||
let build = builder.build;
|
let build = builder.build;
|
||||||
let stage = self.stage;
|
let stage = self.stage;
|
||||||
let target = self.target;
|
let target = self.target;
|
||||||
assert!(build.config.extended);
|
assert!(build.config.extended);
|
||||||
|
|
||||||
|
if !builder.config.toolstate.rls.testing() {
|
||||||
|
println!("skipping Dist RLS stage{} ({})", stage, target);
|
||||||
|
return None
|
||||||
|
}
|
||||||
|
|
||||||
println!("Dist RLS stage{} ({})", stage, target);
|
println!("Dist RLS stage{} ({})", stage, target);
|
||||||
let src = build.src.join("src/tools/rls");
|
let src = build.src.join("src/tools/rls");
|
||||||
let release_num = build.release_num("rls");
|
let release_num = build.release_num("rls");
|
||||||
|
@ -1102,7 +1107,7 @@ impl Step for Rls {
|
||||||
.arg("--component-name=rls-preview");
|
.arg("--component-name=rls-preview");
|
||||||
|
|
||||||
build.run(&mut cmd);
|
build.run(&mut cmd);
|
||||||
distdir(build).join(format!("{}-{}.tar.gz", name, target))
|
Some(distdir(build).join(format!("{}-{}.tar.gz", name, target)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1202,8 +1207,12 @@ impl Step for Extended {
|
||||||
// upgrades rustc was upgraded before rust-std. To avoid rustc clobbering
|
// upgrades rustc was upgraded before rust-std. To avoid rustc clobbering
|
||||||
// the std files during uninstall. To do this ensure that rustc comes
|
// the std files during uninstall. To do this ensure that rustc comes
|
||||||
// before rust-std in the list below.
|
// before rust-std in the list below.
|
||||||
let mut tarballs = vec![rustc_installer, cargo_installer, rls_installer,
|
let mut tarballs = Vec::new();
|
||||||
analysis_installer, std_installer];
|
tarballs.push(rustc_installer);
|
||||||
|
tarballs.push(cargo_installer);
|
||||||
|
tarballs.extend(rls_installer.clone());
|
||||||
|
tarballs.push(analysis_installer);
|
||||||
|
tarballs.push(std_installer);
|
||||||
if build.config.docs {
|
if build.config.docs {
|
||||||
tarballs.push(docs_installer);
|
tarballs.push(docs_installer);
|
||||||
}
|
}
|
||||||
|
@ -1245,35 +1254,38 @@ impl Step for Extended {
|
||||||
}
|
}
|
||||||
rtf.push_str("}");
|
rtf.push_str("}");
|
||||||
|
|
||||||
|
fn filter(contents: &str, marker: &str) -> String {
|
||||||
|
let start = format!("tool-{}-start", marker);
|
||||||
|
let end = format!("tool-{}-end", marker);
|
||||||
|
let mut lines = Vec::new();
|
||||||
|
let mut omitted = false;
|
||||||
|
for line in contents.lines() {
|
||||||
|
if line.contains(&start) {
|
||||||
|
omitted = true;
|
||||||
|
} else if line.contains(&end) {
|
||||||
|
omitted = false;
|
||||||
|
} else if !omitted {
|
||||||
|
lines.push(line);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
lines.join("\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
let xform = |p: &Path| {
|
||||||
|
let mut contents = String::new();
|
||||||
|
t!(t!(File::open(p)).read_to_string(&mut contents));
|
||||||
|
if rls_installer.is_none() {
|
||||||
|
contents = filter(&contents, "rls");
|
||||||
|
}
|
||||||
|
let ret = tmp.join(p.file_name().unwrap());
|
||||||
|
t!(t!(File::create(&ret)).write_all(contents.as_bytes()));
|
||||||
|
return ret
|
||||||
|
};
|
||||||
|
|
||||||
if target.contains("apple-darwin") {
|
if target.contains("apple-darwin") {
|
||||||
let pkg = tmp.join("pkg");
|
let pkg = tmp.join("pkg");
|
||||||
let _ = fs::remove_dir_all(&pkg);
|
let _ = fs::remove_dir_all(&pkg);
|
||||||
t!(fs::create_dir_all(pkg.join("rustc")));
|
|
||||||
t!(fs::create_dir_all(pkg.join("cargo")));
|
|
||||||
t!(fs::create_dir_all(pkg.join("rust-docs")));
|
|
||||||
t!(fs::create_dir_all(pkg.join("rust-std")));
|
|
||||||
t!(fs::create_dir_all(pkg.join("rls")));
|
|
||||||
t!(fs::create_dir_all(pkg.join("rust-analysis")));
|
|
||||||
|
|
||||||
cp_r(&work.join(&format!("{}-{}", pkgname(build, "rustc"), target)),
|
|
||||||
&pkg.join("rustc"));
|
|
||||||
cp_r(&work.join(&format!("{}-{}", pkgname(build, "cargo"), target)),
|
|
||||||
&pkg.join("cargo"));
|
|
||||||
cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-docs"), target)),
|
|
||||||
&pkg.join("rust-docs"));
|
|
||||||
cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-std"), target)),
|
|
||||||
&pkg.join("rust-std"));
|
|
||||||
cp_r(&work.join(&format!("{}-{}", pkgname(build, "rls"), target)),
|
|
||||||
&pkg.join("rls"));
|
|
||||||
cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-analysis"), target)),
|
|
||||||
&pkg.join("rust-analysis"));
|
|
||||||
|
|
||||||
install(&etc.join("pkg/postinstall"), &pkg.join("rustc"), 0o755);
|
|
||||||
install(&etc.join("pkg/postinstall"), &pkg.join("cargo"), 0o755);
|
|
||||||
install(&etc.join("pkg/postinstall"), &pkg.join("rust-docs"), 0o755);
|
|
||||||
install(&etc.join("pkg/postinstall"), &pkg.join("rust-std"), 0o755);
|
|
||||||
install(&etc.join("pkg/postinstall"), &pkg.join("rls"), 0o755);
|
|
||||||
install(&etc.join("pkg/postinstall"), &pkg.join("rust-analysis"), 0o755);
|
|
||||||
|
|
||||||
let pkgbuild = |component: &str| {
|
let pkgbuild = |component: &str| {
|
||||||
let mut cmd = Command::new("pkgbuild");
|
let mut cmd = Command::new("pkgbuild");
|
||||||
|
@ -1283,12 +1295,23 @@ impl Step for Extended {
|
||||||
.arg(pkg.join(component).with_extension("pkg"));
|
.arg(pkg.join(component).with_extension("pkg"));
|
||||||
build.run(&mut cmd);
|
build.run(&mut cmd);
|
||||||
};
|
};
|
||||||
pkgbuild("rustc");
|
|
||||||
pkgbuild("cargo");
|
let prepare = |name: &str| {
|
||||||
pkgbuild("rust-docs");
|
t!(fs::create_dir_all(pkg.join(name)));
|
||||||
pkgbuild("rust-std");
|
cp_r(&work.join(&format!("{}-{}", pkgname(build, name), target)),
|
||||||
pkgbuild("rls");
|
&pkg.join(name));
|
||||||
pkgbuild("rust-analysis");
|
install(&etc.join("pkg/postinstall"), &pkg.join(name), 0o755);
|
||||||
|
pkgbuild(name);
|
||||||
|
};
|
||||||
|
prepare("rustc");
|
||||||
|
prepare("cargo");
|
||||||
|
prepare("rust-docs");
|
||||||
|
prepare("rust-std");
|
||||||
|
prepare("rust-analysis");
|
||||||
|
|
||||||
|
if rls_installer.is_some() {
|
||||||
|
prepare("rls");
|
||||||
|
}
|
||||||
|
|
||||||
// create an 'uninstall' package
|
// create an 'uninstall' package
|
||||||
install(&etc.join("pkg/postinstall"), &pkg.join("uninstall"), 0o755);
|
install(&etc.join("pkg/postinstall"), &pkg.join("uninstall"), 0o755);
|
||||||
|
@ -1298,7 +1321,7 @@ impl Step for Extended {
|
||||||
t!(t!(File::create(pkg.join("res/LICENSE.txt"))).write_all(license.as_bytes()));
|
t!(t!(File::create(pkg.join("res/LICENSE.txt"))).write_all(license.as_bytes()));
|
||||||
install(&etc.join("gfx/rust-logo.png"), &pkg.join("res"), 0o644);
|
install(&etc.join("gfx/rust-logo.png"), &pkg.join("res"), 0o644);
|
||||||
let mut cmd = Command::new("productbuild");
|
let mut cmd = Command::new("productbuild");
|
||||||
cmd.arg("--distribution").arg(etc.join("pkg/Distribution.xml"))
|
cmd.arg("--distribution").arg(xform(&etc.join("pkg/Distribution.xml")))
|
||||||
.arg("--resources").arg(pkg.join("res"))
|
.arg("--resources").arg(pkg.join("res"))
|
||||||
.arg(distdir(build).join(format!("{}-{}.pkg",
|
.arg(distdir(build).join(format!("{}-{}.pkg",
|
||||||
pkgname(build, "rust"),
|
pkgname(build, "rust"),
|
||||||
|
@ -1310,46 +1333,34 @@ impl Step for Extended {
|
||||||
if target.contains("windows") {
|
if target.contains("windows") {
|
||||||
let exe = tmp.join("exe");
|
let exe = tmp.join("exe");
|
||||||
let _ = fs::remove_dir_all(&exe);
|
let _ = fs::remove_dir_all(&exe);
|
||||||
t!(fs::create_dir_all(exe.join("rustc")));
|
|
||||||
t!(fs::create_dir_all(exe.join("cargo")));
|
|
||||||
t!(fs::create_dir_all(exe.join("rls")));
|
|
||||||
t!(fs::create_dir_all(exe.join("rust-analysis")));
|
|
||||||
t!(fs::create_dir_all(exe.join("rust-docs")));
|
|
||||||
t!(fs::create_dir_all(exe.join("rust-std")));
|
|
||||||
cp_r(&work.join(&format!("{}-{}", pkgname(build, "rustc"), target))
|
|
||||||
.join("rustc"),
|
|
||||||
&exe.join("rustc"));
|
|
||||||
cp_r(&work.join(&format!("{}-{}", pkgname(build, "cargo"), target))
|
|
||||||
.join("cargo"),
|
|
||||||
&exe.join("cargo"));
|
|
||||||
cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-docs"), target))
|
|
||||||
.join("rust-docs"),
|
|
||||||
&exe.join("rust-docs"));
|
|
||||||
cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-std"), target))
|
|
||||||
.join(format!("rust-std-{}", target)),
|
|
||||||
&exe.join("rust-std"));
|
|
||||||
cp_r(&work.join(&format!("{}-{}", pkgname(build, "rls"), target)).join("rls-preview"),
|
|
||||||
&exe.join("rls"));
|
|
||||||
cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-analysis"), target))
|
|
||||||
.join(format!("rust-analysis-{}", target)),
|
|
||||||
&exe.join("rust-analysis"));
|
|
||||||
|
|
||||||
t!(fs::remove_file(exe.join("rustc/manifest.in")));
|
|
||||||
t!(fs::remove_file(exe.join("cargo/manifest.in")));
|
|
||||||
t!(fs::remove_file(exe.join("rust-docs/manifest.in")));
|
|
||||||
t!(fs::remove_file(exe.join("rust-std/manifest.in")));
|
|
||||||
t!(fs::remove_file(exe.join("rls/manifest.in")));
|
|
||||||
t!(fs::remove_file(exe.join("rust-analysis/manifest.in")));
|
|
||||||
|
|
||||||
|
let prepare = |name: &str| {
|
||||||
|
t!(fs::create_dir_all(exe.join(name)));
|
||||||
|
let dir = if name == "rust-std" || name == "rust-analysis" {
|
||||||
|
format!("{}-{}", name, target)
|
||||||
|
} else if name == "rls" {
|
||||||
|
"rls-preview".to_string()
|
||||||
|
} else {
|
||||||
|
name.to_string()
|
||||||
|
};
|
||||||
|
cp_r(&work.join(&format!("{}-{}", pkgname(build, name), target))
|
||||||
|
.join(dir),
|
||||||
|
&exe.join(name));
|
||||||
|
t!(fs::remove_file(exe.join(name).join("manifest.in")));
|
||||||
|
};
|
||||||
|
prepare("rustc");
|
||||||
|
prepare("cargo");
|
||||||
|
prepare("rust-analysis");
|
||||||
|
prepare("rust-docs");
|
||||||
|
prepare("rust-std");
|
||||||
|
if rls_installer.is_some() {
|
||||||
|
prepare("rls");
|
||||||
|
}
|
||||||
if target.contains("windows-gnu") {
|
if target.contains("windows-gnu") {
|
||||||
t!(fs::create_dir_all(exe.join("rust-mingw")));
|
prepare("rust-mingw");
|
||||||
cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-mingw"), target))
|
|
||||||
.join("rust-mingw"),
|
|
||||||
&exe.join("rust-mingw"));
|
|
||||||
t!(fs::remove_file(exe.join("rust-mingw/manifest.in")));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
install(&etc.join("exe/rust.iss"), &exe, 0o644);
|
install(&xform(&etc.join("exe/rust.iss")), &exe, 0o644);
|
||||||
install(&etc.join("exe/modpath.iss"), &exe, 0o644);
|
install(&etc.join("exe/modpath.iss"), &exe, 0o644);
|
||||||
install(&etc.join("exe/upgrade.iss"), &exe, 0o644);
|
install(&etc.join("exe/upgrade.iss"), &exe, 0o644);
|
||||||
install(&etc.join("gfx/rust-logo.ico"), &exe, 0o644);
|
install(&etc.join("gfx/rust-logo.ico"), &exe, 0o644);
|
||||||
|
@ -1413,6 +1424,7 @@ impl Step for Extended {
|
||||||
.arg("-dr").arg("Std")
|
.arg("-dr").arg("Std")
|
||||||
.arg("-var").arg("var.StdDir")
|
.arg("-var").arg("var.StdDir")
|
||||||
.arg("-out").arg(exe.join("StdGroup.wxs")));
|
.arg("-out").arg(exe.join("StdGroup.wxs")));
|
||||||
|
if rls_installer.is_some() {
|
||||||
build.run(Command::new(&heat)
|
build.run(Command::new(&heat)
|
||||||
.current_dir(&exe)
|
.current_dir(&exe)
|
||||||
.arg("dir")
|
.arg("dir")
|
||||||
|
@ -1423,6 +1435,7 @@ impl Step for Extended {
|
||||||
.arg("-var").arg("var.RlsDir")
|
.arg("-var").arg("var.RlsDir")
|
||||||
.arg("-out").arg(exe.join("RlsGroup.wxs"))
|
.arg("-out").arg(exe.join("RlsGroup.wxs"))
|
||||||
.arg("-t").arg(etc.join("msi/remove-duplicates.xsl")));
|
.arg("-t").arg(etc.join("msi/remove-duplicates.xsl")));
|
||||||
|
}
|
||||||
build.run(Command::new(&heat)
|
build.run(Command::new(&heat)
|
||||||
.current_dir(&exe)
|
.current_dir(&exe)
|
||||||
.arg("dir")
|
.arg("dir")
|
||||||
|
@ -1456,26 +1469,30 @@ impl Step for Extended {
|
||||||
.arg("-dDocsDir=rust-docs")
|
.arg("-dDocsDir=rust-docs")
|
||||||
.arg("-dCargoDir=cargo")
|
.arg("-dCargoDir=cargo")
|
||||||
.arg("-dStdDir=rust-std")
|
.arg("-dStdDir=rust-std")
|
||||||
.arg("-dRlsDir=rls")
|
|
||||||
.arg("-dAnalysisDir=rust-analysis")
|
.arg("-dAnalysisDir=rust-analysis")
|
||||||
.arg("-arch").arg(&arch)
|
.arg("-arch").arg(&arch)
|
||||||
.arg("-out").arg(&output)
|
.arg("-out").arg(&output)
|
||||||
.arg(&input);
|
.arg(&input);
|
||||||
add_env(build, &mut cmd, target);
|
add_env(build, &mut cmd, target);
|
||||||
|
|
||||||
|
if rls_installer.is_some() {
|
||||||
|
cmd.arg("-dRlsDir=rls");
|
||||||
|
}
|
||||||
if target.contains("windows-gnu") {
|
if target.contains("windows-gnu") {
|
||||||
cmd.arg("-dGccDir=rust-mingw");
|
cmd.arg("-dGccDir=rust-mingw");
|
||||||
}
|
}
|
||||||
build.run(&mut cmd);
|
build.run(&mut cmd);
|
||||||
};
|
};
|
||||||
candle(&etc.join("msi/rust.wxs"));
|
candle(&xform(&etc.join("msi/rust.wxs")));
|
||||||
candle(&etc.join("msi/ui.wxs"));
|
candle(&etc.join("msi/ui.wxs"));
|
||||||
candle(&etc.join("msi/rustwelcomedlg.wxs"));
|
candle(&etc.join("msi/rustwelcomedlg.wxs"));
|
||||||
candle("RustcGroup.wxs".as_ref());
|
candle("RustcGroup.wxs".as_ref());
|
||||||
candle("DocsGroup.wxs".as_ref());
|
candle("DocsGroup.wxs".as_ref());
|
||||||
candle("CargoGroup.wxs".as_ref());
|
candle("CargoGroup.wxs".as_ref());
|
||||||
candle("StdGroup.wxs".as_ref());
|
candle("StdGroup.wxs".as_ref());
|
||||||
|
if rls_installer.is_some() {
|
||||||
candle("RlsGroup.wxs".as_ref());
|
candle("RlsGroup.wxs".as_ref());
|
||||||
|
}
|
||||||
candle("AnalysisGroup.wxs".as_ref());
|
candle("AnalysisGroup.wxs".as_ref());
|
||||||
|
|
||||||
if target.contains("windows-gnu") {
|
if target.contains("windows-gnu") {
|
||||||
|
@ -1499,10 +1516,13 @@ impl Step for Extended {
|
||||||
.arg("DocsGroup.wixobj")
|
.arg("DocsGroup.wixobj")
|
||||||
.arg("CargoGroup.wixobj")
|
.arg("CargoGroup.wixobj")
|
||||||
.arg("StdGroup.wixobj")
|
.arg("StdGroup.wixobj")
|
||||||
.arg("RlsGroup.wixobj")
|
|
||||||
.arg("AnalysisGroup.wixobj")
|
.arg("AnalysisGroup.wixobj")
|
||||||
.current_dir(&exe);
|
.current_dir(&exe);
|
||||||
|
|
||||||
|
if rls_installer.is_some() {
|
||||||
|
cmd.arg("RlsGroup.wixobj");
|
||||||
|
}
|
||||||
|
|
||||||
if target.contains("windows-gnu") {
|
if target.contains("windows-gnu") {
|
||||||
cmd.arg("GccGroup.wixobj");
|
cmd.arg("GccGroup.wixobj");
|
||||||
}
|
}
|
||||||
|
|
|
@ -240,10 +240,11 @@ pub struct Build {
|
||||||
lldb_python_dir: Option<String>,
|
lldb_python_dir: Option<String>,
|
||||||
|
|
||||||
// Runtime state filled in later on
|
// Runtime state filled in later on
|
||||||
// target -> (cc, ar)
|
// C/C++ compilers and archiver for all targets
|
||||||
cc: HashMap<Interned<String>, (cc::Tool, Option<PathBuf>)>,
|
cc: HashMap<Interned<String>, cc::Tool>,
|
||||||
// host -> (cc, ar)
|
|
||||||
cxx: HashMap<Interned<String>, cc::Tool>,
|
cxx: HashMap<Interned<String>, cc::Tool>,
|
||||||
|
ar: HashMap<Interned<String>, PathBuf>,
|
||||||
|
// Misc
|
||||||
crates: HashMap<Interned<String>, Crate>,
|
crates: HashMap<Interned<String>, Crate>,
|
||||||
is_sudo: bool,
|
is_sudo: bool,
|
||||||
ci_env: CiEnv,
|
ci_env: CiEnv,
|
||||||
|
@ -324,6 +325,7 @@ impl Build {
|
||||||
rls_info,
|
rls_info,
|
||||||
cc: HashMap::new(),
|
cc: HashMap::new(),
|
||||||
cxx: HashMap::new(),
|
cxx: HashMap::new(),
|
||||||
|
ar: HashMap::new(),
|
||||||
crates: HashMap::new(),
|
crates: HashMap::new(),
|
||||||
lldb_version: None,
|
lldb_version: None,
|
||||||
lldb_python_dir: None,
|
lldb_python_dir: None,
|
||||||
|
@ -612,7 +614,7 @@ impl Build {
|
||||||
|
|
||||||
/// Returns the path to the C compiler for the target specified.
|
/// Returns the path to the C compiler for the target specified.
|
||||||
fn cc(&self, target: Interned<String>) -> &Path {
|
fn cc(&self, target: Interned<String>) -> &Path {
|
||||||
self.cc[&target].0.path()
|
self.cc[&target].path()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns a list of flags to pass to the C compiler for the target
|
/// Returns a list of flags to pass to the C compiler for the target
|
||||||
|
@ -620,7 +622,7 @@ impl Build {
|
||||||
fn cflags(&self, target: Interned<String>) -> Vec<String> {
|
fn cflags(&self, target: Interned<String>) -> Vec<String> {
|
||||||
// Filter out -O and /O (the optimization flags) that we picked up from
|
// Filter out -O and /O (the optimization flags) that we picked up from
|
||||||
// cc-rs because the build scripts will determine that for themselves.
|
// cc-rs because the build scripts will determine that for themselves.
|
||||||
let mut base = self.cc[&target].0.args().iter()
|
let mut base = self.cc[&target].args().iter()
|
||||||
.map(|s| s.to_string_lossy().into_owned())
|
.map(|s| s.to_string_lossy().into_owned())
|
||||||
.filter(|s| !s.starts_with("-O") && !s.starts_with("/O"))
|
.filter(|s| !s.starts_with("-O") && !s.starts_with("/O"))
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
@ -644,7 +646,7 @@ impl Build {
|
||||||
|
|
||||||
/// Returns the path to the `ar` archive utility for the target specified.
|
/// Returns the path to the `ar` archive utility for the target specified.
|
||||||
fn ar(&self, target: Interned<String>) -> Option<&Path> {
|
fn ar(&self, target: Interned<String>) -> Option<&Path> {
|
||||||
self.cc[&target].1.as_ref().map(|p| &**p)
|
self.ar.get(&target).map(|p| &**p)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the path to the C++ compiler for the target specified.
|
/// Returns the path to the C++ compiler for the target specified.
|
||||||
|
@ -657,21 +659,17 @@ impl Build {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns flags to pass to the compiler to generate code for `target`.
|
/// Returns the path to the linker for the given target if it needs to be overriden.
|
||||||
fn rustc_flags(&self, target: Interned<String>) -> Vec<String> {
|
fn linker(&self, target: Interned<String>) -> Option<&Path> {
|
||||||
// New flags should be added here with great caution!
|
if let Some(linker) = self.config.target_config.get(&target)
|
||||||
//
|
.and_then(|c| c.linker.as_ref()) {
|
||||||
// It's quite unfortunate to **require** flags to generate code for a
|
Some(linker)
|
||||||
// target, so it should only be passed here if absolutely necessary!
|
} else if target != self.config.build &&
|
||||||
// Most default configuration should be done through target specs rather
|
!target.contains("msvc") && !target.contains("emscripten") {
|
||||||
// than an entry here.
|
Some(self.cc(target))
|
||||||
|
} else {
|
||||||
let mut base = Vec::new();
|
None
|
||||||
if target != self.config.build && !target.contains("msvc") &&
|
|
||||||
!target.contains("emscripten") {
|
|
||||||
base.push(format!("-Clinker={}", self.cc(target).display()));
|
|
||||||
}
|
}
|
||||||
base
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns if this target should statically link the C runtime, if specified
|
/// Returns if this target should statically link the C runtime, if specified
|
||||||
|
|
|
@ -227,6 +227,13 @@ impl Step for Llvm {
|
||||||
cfg.build_arg("-j").build_arg(build.jobs().to_string());
|
cfg.build_arg("-j").build_arg(build.jobs().to_string());
|
||||||
cfg.define("CMAKE_C_FLAGS", build.cflags(target).join(" "));
|
cfg.define("CMAKE_C_FLAGS", build.cflags(target).join(" "));
|
||||||
cfg.define("CMAKE_CXX_FLAGS", build.cflags(target).join(" "));
|
cfg.define("CMAKE_CXX_FLAGS", build.cflags(target).join(" "));
|
||||||
|
if let Some(ar) = build.ar(target) {
|
||||||
|
if ar.is_absolute() {
|
||||||
|
// LLVM build breaks if `CMAKE_AR` is a relative path, for some reason it
|
||||||
|
// tries to resolve this path in the LLVM build directory.
|
||||||
|
cfg.define("CMAKE_AR", sanitize_cc(ar));
|
||||||
|
}
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
configure_compilers(&mut cfg);
|
configure_compilers(&mut cfg);
|
||||||
|
@ -352,21 +359,22 @@ impl Step for Openssl {
|
||||||
// originally from https://www.openssl.org/source/...
|
// originally from https://www.openssl.org/source/...
|
||||||
let url = format!("https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror/{}",
|
let url = format!("https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror/{}",
|
||||||
name);
|
name);
|
||||||
let mut ok = false;
|
let mut last_error = None;
|
||||||
for _ in 0..3 {
|
for _ in 0..3 {
|
||||||
let status = Command::new("curl")
|
let status = Command::new("curl")
|
||||||
.arg("-o").arg(&tmp)
|
.arg("-o").arg(&tmp)
|
||||||
|
.arg("-f") // make curl fail if the URL does not return HTTP 200
|
||||||
.arg(&url)
|
.arg(&url)
|
||||||
.status()
|
.status()
|
||||||
.expect("failed to spawn curl");
|
.expect("failed to spawn curl");
|
||||||
if status.success() {
|
|
||||||
ok = true;
|
// Retry if download failed.
|
||||||
break
|
if !status.success() {
|
||||||
}
|
last_error = Some(status.to_string());
|
||||||
}
|
continue;
|
||||||
if !ok {
|
|
||||||
panic!("failed to download openssl source")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Ensure the hash is correct.
|
||||||
let mut shasum = if target.contains("apple") || build.build.contains("netbsd") {
|
let mut shasum = if target.contains("apple") || build.build.contains("netbsd") {
|
||||||
let mut cmd = Command::new("shasum");
|
let mut cmd = Command::new("shasum");
|
||||||
cmd.arg("-a").arg("256");
|
cmd.arg("-a").arg("256");
|
||||||
|
@ -376,10 +384,26 @@ impl Step for Openssl {
|
||||||
};
|
};
|
||||||
let output = output(&mut shasum.arg(&tmp));
|
let output = output(&mut shasum.arg(&tmp));
|
||||||
let found = output.split_whitespace().next().unwrap();
|
let found = output.split_whitespace().next().unwrap();
|
||||||
|
|
||||||
|
// If the hash is wrong, probably the download is incomplete or S3 served an error
|
||||||
|
// page. In any case, retry.
|
||||||
if found != OPENSSL_SHA256 {
|
if found != OPENSSL_SHA256 {
|
||||||
panic!("downloaded openssl sha256 different\n\
|
last_error = Some(format!(
|
||||||
|
"downloaded openssl sha256 different\n\
|
||||||
expected: {}\n\
|
expected: {}\n\
|
||||||
found: {}\n", OPENSSL_SHA256, found);
|
found: {}\n",
|
||||||
|
OPENSSL_SHA256,
|
||||||
|
found
|
||||||
|
));
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Everything is fine, so exit the retry loop.
|
||||||
|
last_error = None;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
if let Some(error) = last_error {
|
||||||
|
panic!("failed to download openssl source: {}", error);
|
||||||
}
|
}
|
||||||
t!(fs::rename(&tmp, &tarball));
|
t!(fs::rename(&tmp, &tarball));
|
||||||
}
|
}
|
||||||
|
|
|
@ -387,7 +387,7 @@ pub struct Clippy {
|
||||||
|
|
||||||
impl Step for Clippy {
|
impl Step for Clippy {
|
||||||
type Output = PathBuf;
|
type Output = PathBuf;
|
||||||
const DEFAULT: bool = false;
|
const DEFAULT: bool = true;
|
||||||
const ONLY_HOSTS: bool = true;
|
const ONLY_HOSTS: bool = true;
|
||||||
|
|
||||||
fn should_run(run: ShouldRun) -> ShouldRun {
|
fn should_run(run: ShouldRun) -> ShouldRun {
|
||||||
|
@ -411,7 +411,7 @@ impl Step for Clippy {
|
||||||
builder.ensure(ToolBuild {
|
builder.ensure(ToolBuild {
|
||||||
compiler: self.compiler,
|
compiler: self.compiler,
|
||||||
target: self.target,
|
target: self.target,
|
||||||
tool: "clippy",
|
tool: "clippy-driver",
|
||||||
mode: Mode::Librustc,
|
mode: Mode::Librustc,
|
||||||
path: "src/tools/clippy",
|
path: "src/tools/clippy",
|
||||||
expectation: builder.build.config.toolstate.clippy.passes(ToolState::Compiling),
|
expectation: builder.build.config.toolstate.clippy.passes(ToolState::Compiling),
|
||||||
|
@ -561,7 +561,7 @@ impl<'a> Builder<'a> {
|
||||||
if compiler.host.contains("msvc") {
|
if compiler.host.contains("msvc") {
|
||||||
let curpaths = env::var_os("PATH").unwrap_or_default();
|
let curpaths = env::var_os("PATH").unwrap_or_default();
|
||||||
let curpaths = env::split_paths(&curpaths).collect::<Vec<_>>();
|
let curpaths = env::split_paths(&curpaths).collect::<Vec<_>>();
|
||||||
for &(ref k, ref v) in self.cc[&compiler.host].0.env() {
|
for &(ref k, ref v) in self.cc[&compiler.host].env() {
|
||||||
if k != "PATH" {
|
if k != "PATH" {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
|
@ -31,6 +31,13 @@ impl ToolState {
|
||||||
BuildExpectation::Failing
|
BuildExpectation::Failing
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn testing(&self) -> bool {
|
||||||
|
match *self {
|
||||||
|
ToolState::Testing => true,
|
||||||
|
_ => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for ToolState {
|
impl Default for ToolState {
|
||||||
|
|
|
@ -138,27 +138,6 @@ pub fn gnu_target(target: &str) -> String {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn cc2ar(cc: &Path, target: &str) -> Option<PathBuf> {
|
|
||||||
if target.contains("msvc") {
|
|
||||||
None
|
|
||||||
} else if target.contains("musl") {
|
|
||||||
Some(PathBuf::from("ar"))
|
|
||||||
} else if target.contains("openbsd") {
|
|
||||||
Some(PathBuf::from("ar"))
|
|
||||||
} else {
|
|
||||||
let parent = cc.parent().unwrap();
|
|
||||||
let file = cc.file_name().unwrap().to_str().unwrap();
|
|
||||||
for suffix in &["gcc", "cc", "clang"] {
|
|
||||||
if let Some(idx) = file.rfind(suffix) {
|
|
||||||
let mut file = file[..idx].to_owned();
|
|
||||||
file.push_str("ar");
|
|
||||||
return Some(parent.join(&file));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Some(parent.join(file))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn make(host: &str) -> PathBuf {
|
pub fn make(host: &str) -> PathBuf {
|
||||||
if host.contains("bitrig") || host.contains("dragonfly") ||
|
if host.contains("bitrig") || host.contains("dragonfly") ||
|
||||||
host.contains("freebsd") || host.contains("netbsd") ||
|
host.contains("freebsd") || host.contains("netbsd") ||
|
||||||
|
|
|
@ -5,6 +5,7 @@ RUN sh /scripts/cross-apt-packages.sh
|
||||||
|
|
||||||
RUN apt-get build-dep -y clang llvm && apt-get install -y --no-install-recommends \
|
RUN apt-get build-dep -y clang llvm && apt-get install -y --no-install-recommends \
|
||||||
build-essential \
|
build-essential \
|
||||||
|
gcc-multilib \
|
||||||
libedit-dev \
|
libedit-dev \
|
||||||
libgmp-dev \
|
libgmp-dev \
|
||||||
libisl-dev \
|
libisl-dev \
|
||||||
|
@ -36,17 +37,18 @@ ENV \
|
||||||
AR_aarch64_unknown_fuchsia=aarch64-unknown-fuchsia-ar \
|
AR_aarch64_unknown_fuchsia=aarch64-unknown-fuchsia-ar \
|
||||||
CC_aarch64_unknown_fuchsia=aarch64-unknown-fuchsia-clang \
|
CC_aarch64_unknown_fuchsia=aarch64-unknown-fuchsia-clang \
|
||||||
CXX_aarch64_unknown_fuchsia=aarch64-unknown-fuchsia-clang++ \
|
CXX_aarch64_unknown_fuchsia=aarch64-unknown-fuchsia-clang++ \
|
||||||
AR_sparcv9_sun_solaris=sparcv9-sun-solaris2.11-ar \
|
AR_sparcv9_sun_solaris=sparcv9-sun-solaris2.10-ar \
|
||||||
CC_sparcv9_sun_solaris=sparcv9-sun-solaris2.11-gcc \
|
CC_sparcv9_sun_solaris=sparcv9-sun-solaris2.10-gcc \
|
||||||
CXX_sparcv9_sun_solaris=sparcv9-sun-solaris2.11-g++ \
|
CXX_sparcv9_sun_solaris=sparcv9-sun-solaris2.10-g++ \
|
||||||
AR_x86_64_sun_solaris=x86_64-sun-solaris2.11-ar \
|
AR_x86_64_sun_solaris=x86_64-sun-solaris2.10-ar \
|
||||||
CC_x86_64_sun_solaris=x86_64-sun-solaris2.11-gcc \
|
CC_x86_64_sun_solaris=x86_64-sun-solaris2.10-gcc \
|
||||||
CXX_x86_64_sun_solaris=x86_64-sun-solaris2.11-g++
|
CXX_x86_64_sun_solaris=x86_64-sun-solaris2.10-g++
|
||||||
|
|
||||||
ENV TARGETS=x86_64-unknown-fuchsia
|
ENV TARGETS=x86_64-unknown-fuchsia
|
||||||
ENV TARGETS=$TARGETS,aarch64-unknown-fuchsia
|
ENV TARGETS=$TARGETS,aarch64-unknown-fuchsia
|
||||||
ENV TARGETS=$TARGETS,sparcv9-sun-solaris
|
ENV TARGETS=$TARGETS,sparcv9-sun-solaris
|
||||||
ENV TARGETS=$TARGETS,x86_64-sun-solaris
|
ENV TARGETS=$TARGETS,x86_64-sun-solaris
|
||||||
|
ENV TARGETS=$TARGETS,x86_64-unknown-linux-gnux32
|
||||||
|
|
||||||
ENV RUST_CONFIGURE_ARGS --target=$TARGETS --enable-extended
|
ENV RUST_CONFIGURE_ARGS --target=$TARGETS --enable-extended
|
||||||
ENV SCRIPT python2.7 ../x.py dist --target $TARGETS
|
ENV SCRIPT python2.7 ../x.py dist --target $TARGETS
|
||||||
|
|
|
@ -25,7 +25,7 @@ cd binutils
|
||||||
curl https://ftp.gnu.org/gnu/binutils/binutils-$BINUTILS.tar.xz | tar xJf -
|
curl https://ftp.gnu.org/gnu/binutils/binutils-$BINUTILS.tar.xz | tar xJf -
|
||||||
mkdir binutils-build
|
mkdir binutils-build
|
||||||
cd binutils-build
|
cd binutils-build
|
||||||
hide_output ../binutils-$BINUTILS/configure --target=$ARCH-sun-solaris2.11
|
hide_output ../binutils-$BINUTILS/configure --target=$ARCH-sun-solaris2.10
|
||||||
hide_output make -j10
|
hide_output make -j10
|
||||||
hide_output make install
|
hide_output make install
|
||||||
|
|
||||||
|
@ -58,13 +58,17 @@ for deb in *$APT_ARCH.deb; do
|
||||||
dpkg -x $deb .
|
dpkg -x $deb .
|
||||||
done
|
done
|
||||||
|
|
||||||
mkdir /usr/local/$ARCH-sun-solaris2.11/usr
|
# Strip Solaris 11 functions that are optionally used by libbacktrace.
|
||||||
mv usr/include /usr/local/$ARCH-sun-solaris2.11/usr/include
|
# This is for Solaris 10 compatibility.
|
||||||
mv usr/lib/$LIB_ARCH/* /usr/local/$ARCH-sun-solaris2.11/lib
|
$ARCH-sun-solaris2.10-strip -N dl_iterate_phdr -N strnlen lib/$LIB_ARCH/libc.so
|
||||||
mv lib/$LIB_ARCH/* /usr/local/$ARCH-sun-solaris2.11/lib
|
|
||||||
|
|
||||||
ln -s /usr/local/$ARCH-sun-solaris2.11/usr/include /usr/local/$ARCH-sun-solaris2.11/sys-include
|
mkdir /usr/local/$ARCH-sun-solaris2.10/usr
|
||||||
ln -s /usr/local/$ARCH-sun-solaris2.11/usr/include /usr/local/$ARCH-sun-solaris2.11/include
|
mv usr/include /usr/local/$ARCH-sun-solaris2.10/usr/include
|
||||||
|
mv usr/lib/$LIB_ARCH/* /usr/local/$ARCH-sun-solaris2.10/lib
|
||||||
|
mv lib/$LIB_ARCH/* /usr/local/$ARCH-sun-solaris2.10/lib
|
||||||
|
|
||||||
|
ln -s /usr/local/$ARCH-sun-solaris2.10/usr/include /usr/local/$ARCH-sun-solaris2.10/sys-include
|
||||||
|
ln -s /usr/local/$ARCH-sun-solaris2.10/usr/include /usr/local/$ARCH-sun-solaris2.10/include
|
||||||
|
|
||||||
cd ..
|
cd ..
|
||||||
rm -rf solaris
|
rm -rf solaris
|
||||||
|
@ -80,7 +84,7 @@ mkdir ../gcc-build
|
||||||
cd ../gcc-build
|
cd ../gcc-build
|
||||||
hide_output ../gcc-$GCC/configure \
|
hide_output ../gcc-$GCC/configure \
|
||||||
--enable-languages=c,c++ \
|
--enable-languages=c,c++ \
|
||||||
--target=$ARCH-sun-solaris2.11 \
|
--target=$ARCH-sun-solaris2.10 \
|
||||||
--with-gnu-as \
|
--with-gnu-as \
|
||||||
--with-gnu-ld \
|
--with-gnu-ld \
|
||||||
--disable-multilib \
|
--disable-multilib \
|
||||||
|
@ -94,7 +98,7 @@ hide_output ../gcc-$GCC/configure \
|
||||||
--disable-libsanitizer \
|
--disable-libsanitizer \
|
||||||
--disable-libquadmath-support \
|
--disable-libquadmath-support \
|
||||||
--disable-lto \
|
--disable-lto \
|
||||||
--with-sysroot=/usr/local/$ARCH-sun-solaris2.11
|
--with-sysroot=/usr/local/$ARCH-sun-solaris2.10
|
||||||
|
|
||||||
hide_output make -j10
|
hide_output make -j10
|
||||||
hide_output make install
|
hide_output make install
|
||||||
|
|
|
@ -31,7 +31,7 @@ download_sysimage() {
|
||||||
# Keep printing yes to accept the licenses
|
# Keep printing yes to accept the licenses
|
||||||
while true; do echo yes; sleep 10; done | \
|
while true; do echo yes; sleep 10; done | \
|
||||||
/android/sdk/tools/android update sdk -a --no-ui \
|
/android/sdk/tools/android update sdk -a --no-ui \
|
||||||
--filter "$filter"
|
--filter "$filter" --no-https
|
||||||
}
|
}
|
||||||
|
|
||||||
create_avd() {
|
create_avd() {
|
||||||
|
|
|
@ -152,9 +152,6 @@ never colorize output.
|
||||||
|
|
||||||
.SH CODEGEN OPTIONS
|
.SH CODEGEN OPTIONS
|
||||||
|
|
||||||
.TP
|
|
||||||
\fBar\fR=\fI/path/to/ar\fR
|
|
||||||
Path to the archive utility to use when assembling archives.
|
|
||||||
.TP
|
.TP
|
||||||
\fBlinker\fR=\fI/path/to/cc\fR
|
\fBlinker\fR=\fI/path/to/cc\fR
|
||||||
Path to the linker utility to use when linking libraries, executables, and
|
Path to the linker utility to use when linking libraries, executables, and
|
||||||
|
|
|
@ -16,3 +16,33 @@ The `non_ascii_idents` feature adds support for non-ASCII identifiers.
|
||||||
const ε: f64 = 0.00001f64;
|
const ε: f64 = 0.00001f64;
|
||||||
const Π: f64 = 3.14f64;
|
const Π: f64 = 3.14f64;
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Changes to the language reference
|
||||||
|
|
||||||
|
> **<sup>Lexer:<sup>**
|
||||||
|
> IDENTIFIER :
|
||||||
|
> XID_start XID_continue<sup>\*</sup>
|
||||||
|
> | `_` XID_continue<sup>+</sup>
|
||||||
|
|
||||||
|
An identifier is any nonempty Unicode string of the following form:
|
||||||
|
|
||||||
|
Either
|
||||||
|
|
||||||
|
* The first character has property [`XID_start`]
|
||||||
|
* The remaining characters have property [`XID_continue`]
|
||||||
|
|
||||||
|
Or
|
||||||
|
|
||||||
|
* The first character is `_`
|
||||||
|
* The identifier is more than one character, `_` alone is not an identifier
|
||||||
|
* The remaining characters have property [`XID_continue`]
|
||||||
|
|
||||||
|
that does _not_ occur in the set of [strict keywords].
|
||||||
|
|
||||||
|
> **Note**: [`XID_start`] and [`XID_continue`] as character properties cover the
|
||||||
|
> character ranges used to form the more familiar C and Java language-family
|
||||||
|
> identifiers.
|
||||||
|
|
||||||
|
[`XID_start`]: http://unicode.org/cldr/utility/list-unicodeset.jsp?a=%5B%3AXID_Start%3A%5D&abb=on&g=&i=
|
||||||
|
[`XID_continue`]: http://unicode.org/cldr/utility/list-unicodeset.jsp?a=%5B%3AXID_Continue%3A%5D&abb=on&g=&i=
|
||||||
|
[strict keywords]: ../reference/keywords.html#strict-keywords
|
||||||
|
|
|
@ -0,0 +1,47 @@
|
||||||
|
# `optin_builtin_traits`
|
||||||
|
|
||||||
|
The tracking issue for this feature is [#13231]
|
||||||
|
|
||||||
|
[#13231]: https://github.com/rust-lang/rust/issues/13231
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
|
The `optin_builtin_traits` feature gate allows you to define auto traits.
|
||||||
|
|
||||||
|
Auto traits, like [`Send`] or [`Sync`] in the standard library, are marker traits
|
||||||
|
that are automatically implemented for every type, unless the type, or a type it contains,
|
||||||
|
has explictly opted out via a negative impl.
|
||||||
|
|
||||||
|
[`Send`]: https://doc.rust-lang.org/std/marker/trait.Send.html
|
||||||
|
[`Sync`]: https://doc.rust-lang.org/std/marker/trait.Sync.html
|
||||||
|
|
||||||
|
```rust,ignore
|
||||||
|
impl !Type for Trait
|
||||||
|
```
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
#![feature(optin_builtin_traits)]
|
||||||
|
|
||||||
|
trait Valid {}
|
||||||
|
|
||||||
|
impl Valid for .. {}
|
||||||
|
|
||||||
|
struct True;
|
||||||
|
struct False;
|
||||||
|
|
||||||
|
impl !Valid for False {}
|
||||||
|
|
||||||
|
struct MaybeValid<T>(T);
|
||||||
|
|
||||||
|
fn must_be_valid<T: Valid>(_t: T) { }
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
// works
|
||||||
|
must_be_valid( MaybeValid(True) );
|
||||||
|
|
||||||
|
// compiler error - trait bound not satisfied
|
||||||
|
// must_be_valid( MaybeValid(False) );
|
||||||
|
}
|
||||||
|
```
|
|
@ -0,0 +1,25 @@
|
||||||
|
# `unboxed_closures`
|
||||||
|
|
||||||
|
The tracking issue for this feature is [#29625]
|
||||||
|
|
||||||
|
See Also: [`fn_traits`](library-features/fn-traits.html)
|
||||||
|
|
||||||
|
[#29625]: https://github.com/rust-lang/rust/issues/29625
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
|
The `unboxed_closures` feature allows you to write functions using the `"rust-call"` ABI,
|
||||||
|
required for implmenting the [`Fn*`] family of traits. `"rust-call"` functions must have
|
||||||
|
exactly one (non self) argument, a tuple representing the argument list.
|
||||||
|
|
||||||
|
[`Fn*`]: https://doc.rust-lang.org/std/ops/trait.Fn.html
|
||||||
|
|
||||||
|
```rust
|
||||||
|
#![feature(unboxed_closures)]
|
||||||
|
|
||||||
|
extern "rust-call" fn add_args(args: (u32, u32)) -> u32 {
|
||||||
|
args.0 + args.1
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() {}
|
||||||
|
```
|
35
src/doc/unstable-book/src/library-features/fn-traits.md
Normal file
35
src/doc/unstable-book/src/library-features/fn-traits.md
Normal file
|
@ -0,0 +1,35 @@
|
||||||
|
# `fn_traits`
|
||||||
|
|
||||||
|
The tracking issue for this feature is [#29625]
|
||||||
|
|
||||||
|
See Also: [`unboxed_closures`](language-features/unboxed-closures.html)
|
||||||
|
|
||||||
|
[#29625]: https://github.com/rust-lang/rust/issues/29625
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
|
The `fn_traits` feature allows for implementation of the [`Fn*`] traits
|
||||||
|
for creating custom closure-like types.
|
||||||
|
|
||||||
|
[`Fn*`]: https://doc.rust-lang.org/std/ops/trait.Fn.html
|
||||||
|
|
||||||
|
```rust
|
||||||
|
#![feature(unboxed_closures)]
|
||||||
|
#![feature(fn_traits)]
|
||||||
|
|
||||||
|
struct Adder {
|
||||||
|
a: u32
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FnOnce<(u32, )> for Adder {
|
||||||
|
type Output = u32;
|
||||||
|
extern "rust-call" fn call_once(self, b: (u32, )) -> Self::Output {
|
||||||
|
self.a + b.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
let adder = Adder { a: 3 };
|
||||||
|
assert_eq!(adder(2), 5);
|
||||||
|
}
|
||||||
|
```
|
|
@ -248,7 +248,10 @@ class RustStringSlicePrinter(object):
|
||||||
def to_string(self):
|
def to_string(self):
|
||||||
(length, data_ptr) = rustpp.extract_length_and_ptr_from_slice(self.__val)
|
(length, data_ptr) = rustpp.extract_length_and_ptr_from_slice(self.__val)
|
||||||
raw_ptr = data_ptr.get_wrapped_value()
|
raw_ptr = data_ptr.get_wrapped_value()
|
||||||
return '"%s"' % raw_ptr.string(encoding="utf-8", length=length)
|
return raw_ptr.lazy_string(encoding="utf-8", length=length)
|
||||||
|
|
||||||
|
def display_hint(self):
|
||||||
|
return "string"
|
||||||
|
|
||||||
|
|
||||||
class RustStdVecPrinter(object):
|
class RustStdVecPrinter(object):
|
||||||
|
@ -278,9 +281,11 @@ class RustStdStringPrinter(object):
|
||||||
def to_string(self):
|
def to_string(self):
|
||||||
vec = self.__val.get_child_at_index(0)
|
vec = self.__val.get_child_at_index(0)
|
||||||
(length, data_ptr, cap) = rustpp.extract_length_ptr_and_cap_from_std_vec(vec)
|
(length, data_ptr, cap) = rustpp.extract_length_ptr_and_cap_from_std_vec(vec)
|
||||||
return '"%s"' % data_ptr.get_wrapped_value().string(encoding="utf-8",
|
return data_ptr.get_wrapped_value().lazy_string(encoding="utf-8",
|
||||||
length=length)
|
length=length)
|
||||||
|
|
||||||
|
def display_hint(self):
|
||||||
|
return "string"
|
||||||
|
|
||||||
class RustOsStringPrinter(object):
|
class RustOsStringPrinter(object):
|
||||||
def __init__(self, val):
|
def __init__(self, val):
|
||||||
|
@ -294,8 +299,10 @@ class RustOsStringPrinter(object):
|
||||||
|
|
||||||
(length, data_ptr, cap) = rustpp.extract_length_ptr_and_cap_from_std_vec(
|
(length, data_ptr, cap) = rustpp.extract_length_ptr_and_cap_from_std_vec(
|
||||||
vec)
|
vec)
|
||||||
return '"%s"' % data_ptr.get_wrapped_value().string(length=length)
|
return data_ptr.get_wrapped_value().lazy_string(length=length)
|
||||||
|
|
||||||
|
def display_hint(self):
|
||||||
|
return "string"
|
||||||
|
|
||||||
class RustCStyleVariantPrinter(object):
|
class RustCStyleVariantPrinter(object):
|
||||||
def __init__(self, val):
|
def __init__(self, val):
|
||||||
|
|
|
@ -46,7 +46,9 @@ Name: gcc; Description: "Linker and platform libraries"; Types: full
|
||||||
Name: docs; Description: "HTML documentation"; Types: full
|
Name: docs; Description: "HTML documentation"; Types: full
|
||||||
Name: cargo; Description: "Cargo, the Rust package manager"; Types: full
|
Name: cargo; Description: "Cargo, the Rust package manager"; Types: full
|
||||||
Name: std; Description: "The Rust Standard Library"; Types: full
|
Name: std; Description: "The Rust Standard Library"; Types: full
|
||||||
|
// tool-rls-start
|
||||||
Name: rls; Description: "RLS, the Rust Language Server"
|
Name: rls; Description: "RLS, the Rust Language Server"
|
||||||
|
// tool-rls-end
|
||||||
|
|
||||||
[Files]
|
[Files]
|
||||||
Source: "rustc/*.*"; DestDir: "{app}"; Flags: ignoreversion recursesubdirs; Components: rust
|
Source: "rustc/*.*"; DestDir: "{app}"; Flags: ignoreversion recursesubdirs; Components: rust
|
||||||
|
@ -56,8 +58,10 @@ Source: "rust-mingw/*.*"; DestDir: "{app}"; Flags: ignoreversion recursesubdirs;
|
||||||
Source: "rust-docs/*.*"; DestDir: "{app}"; Flags: ignoreversion recursesubdirs; Components: docs
|
Source: "rust-docs/*.*"; DestDir: "{app}"; Flags: ignoreversion recursesubdirs; Components: docs
|
||||||
Source: "cargo/*.*"; DestDir: "{app}"; Flags: ignoreversion recursesubdirs; Components: cargo
|
Source: "cargo/*.*"; DestDir: "{app}"; Flags: ignoreversion recursesubdirs; Components: cargo
|
||||||
Source: "rust-std/*.*"; DestDir: "{app}"; Flags: ignoreversion recursesubdirs; Components: std
|
Source: "rust-std/*.*"; DestDir: "{app}"; Flags: ignoreversion recursesubdirs; Components: std
|
||||||
|
// tool-rls-start
|
||||||
Source: "rls/*.*"; DestDir: "{app}"; Flags: ignoreversion recursesubdirs; Components: rls
|
Source: "rls/*.*"; DestDir: "{app}"; Flags: ignoreversion recursesubdirs; Components: rls
|
||||||
Source: "rust-analysis/*.*"; DestDir: "{app}"; Flags: ignoreversion recursesubdirs; Components: rls
|
Source: "rust-analysis/*.*"; DestDir: "{app}"; Flags: ignoreversion recursesubdirs; Components: rls
|
||||||
|
// tool-rls-end
|
||||||
|
|
||||||
[Code]
|
[Code]
|
||||||
const
|
const
|
||||||
|
|
|
@ -170,8 +170,10 @@
|
||||||
<Directory Id="Docs" Name="." />
|
<Directory Id="Docs" Name="." />
|
||||||
<Directory Id="Cargo" Name="." />
|
<Directory Id="Cargo" Name="." />
|
||||||
<Directory Id="Std" Name="." />
|
<Directory Id="Std" Name="." />
|
||||||
|
<!-- tool-rls-start -->
|
||||||
<Directory Id="Rls" Name="." />
|
<Directory Id="Rls" Name="." />
|
||||||
<Directory Id="Analysis" Name="." />
|
<Directory Id="Analysis" Name="." />
|
||||||
|
<!-- tool-rls-end -->
|
||||||
</Directory>
|
</Directory>
|
||||||
</Directory>
|
</Directory>
|
||||||
|
|
||||||
|
@ -275,6 +277,7 @@
|
||||||
<ComponentRef Id="PathEnvPerMachine" />
|
<ComponentRef Id="PathEnvPerMachine" />
|
||||||
<ComponentRef Id="PathEnvPerUser" />
|
<ComponentRef Id="PathEnvPerUser" />
|
||||||
</Feature>
|
</Feature>
|
||||||
|
<!-- tool-rls-start -->
|
||||||
<Feature Id="RLS"
|
<Feature Id="RLS"
|
||||||
Title="RLS, the Rust Language Server"
|
Title="RLS, the Rust Language Server"
|
||||||
Display="7"
|
Display="7"
|
||||||
|
@ -283,6 +286,7 @@
|
||||||
<ComponentGroupRef Id="RlsGroup" />
|
<ComponentGroupRef Id="RlsGroup" />
|
||||||
<ComponentGroupRef Id="AnalysisGroup" />
|
<ComponentGroupRef Id="AnalysisGroup" />
|
||||||
</Feature>
|
</Feature>
|
||||||
|
<!-- tool-rls-end -->
|
||||||
|
|
||||||
<UIRef Id="RustUI" />
|
<UIRef Id="RustUI" />
|
||||||
</Product>
|
</Product>
|
||||||
|
|
|
@ -16,7 +16,9 @@
|
||||||
<line choice="rust-std"/>
|
<line choice="rust-std"/>
|
||||||
<line choice="cargo"/>
|
<line choice="cargo"/>
|
||||||
<line choice="rust-docs"/>
|
<line choice="rust-docs"/>
|
||||||
|
<!-- tool-rls-start -->
|
||||||
<line choice="rls"/>
|
<line choice="rls"/>
|
||||||
|
<!-- tool-rls-end -->
|
||||||
</line>
|
</line>
|
||||||
<line choice="uninstall" />
|
<line choice="uninstall" />
|
||||||
</choices-outline>
|
</choices-outline>
|
||||||
|
@ -62,6 +64,7 @@
|
||||||
>
|
>
|
||||||
<pkg-ref id="org.rust-lang.rust-docs"/>
|
<pkg-ref id="org.rust-lang.rust-docs"/>
|
||||||
</choice>
|
</choice>
|
||||||
|
<!-- tool-rls-start -->
|
||||||
<choice id="rls" visible="true"
|
<choice id="rls" visible="true"
|
||||||
title="RLS" description="RLS, the Rust Language Server"
|
title="RLS" description="RLS, the Rust Language Server"
|
||||||
selected="(!choices.uninstall.selected && choices['rls'].selected) || (choices.uninstall.selected && choices.install.selected)"
|
selected="(!choices.uninstall.selected && choices['rls'].selected) || (choices.uninstall.selected && choices.install.selected)"
|
||||||
|
@ -70,11 +73,14 @@
|
||||||
<pkg-ref id="org.rust-lang.rls"/>
|
<pkg-ref id="org.rust-lang.rls"/>
|
||||||
<pkg-ref id="org.rust-lang.rust-analysis"/>
|
<pkg-ref id="org.rust-lang.rust-analysis"/>
|
||||||
</choice>
|
</choice>
|
||||||
|
<!-- tool-rls-end -->
|
||||||
<pkg-ref id="org.rust-lang.rustc" version="0" onConclusion="none">rustc.pkg</pkg-ref>
|
<pkg-ref id="org.rust-lang.rustc" version="0" onConclusion="none">rustc.pkg</pkg-ref>
|
||||||
<pkg-ref id="org.rust-lang.cargo" version="0" onConclusion="none">cargo.pkg</pkg-ref>
|
<pkg-ref id="org.rust-lang.cargo" version="0" onConclusion="none">cargo.pkg</pkg-ref>
|
||||||
<pkg-ref id="org.rust-lang.rust-docs" version="0" onConclusion="none">rust-docs.pkg</pkg-ref>
|
<pkg-ref id="org.rust-lang.rust-docs" version="0" onConclusion="none">rust-docs.pkg</pkg-ref>
|
||||||
<pkg-ref id="org.rust-lang.rust-std" version="0" onConclusion="none">rust-std.pkg</pkg-ref>
|
<pkg-ref id="org.rust-lang.rust-std" version="0" onConclusion="none">rust-std.pkg</pkg-ref>
|
||||||
|
<!-- tool-rls-start -->
|
||||||
<pkg-ref id="org.rust-lang.rls" version="0" onConclusion="none">rls.pkg</pkg-ref>
|
<pkg-ref id="org.rust-lang.rls" version="0" onConclusion="none">rls.pkg</pkg-ref>
|
||||||
|
<!-- tool-rls-end -->
|
||||||
<pkg-ref id="org.rust-lang.rust-analysis" version="0" onConclusion="none">rust-analysis.pkg</pkg-ref>
|
<pkg-ref id="org.rust-lang.rust-analysis" version="0" onConclusion="none">rust-analysis.pkg</pkg-ref>
|
||||||
<pkg-ref id="org.rust-lang.uninstall" version="0" onConclusion="none">uninstall.pkg</pkg-ref>
|
<pkg-ref id="org.rust-lang.uninstall" version="0" onConclusion="none">uninstall.pkg</pkg-ref>
|
||||||
<background file="rust-logo.png" mime-type="image/png"
|
<background file="rust-logo.png" mime-type="image/png"
|
||||||
|
|
|
@ -81,7 +81,7 @@ def execute_command(command_interpreter, command):
|
||||||
|
|
||||||
if res.Succeeded():
|
if res.Succeeded():
|
||||||
if res.HasResult():
|
if res.HasResult():
|
||||||
print(normalize_whitespace(res.GetOutput()), end='\n')
|
print(normalize_whitespace(res.GetOutput() or ''), end='\n')
|
||||||
|
|
||||||
# If the command introduced any breakpoints, make sure to register
|
# If the command introduced any breakpoints, make sure to register
|
||||||
# them with the breakpoint
|
# them with the breakpoint
|
||||||
|
|
|
@ -85,16 +85,23 @@ ident [a-zA-Z\x80-\xff_][a-zA-Z0-9\x80-\xff_]*
|
||||||
<blockcomment>(.|\n) { }
|
<blockcomment>(.|\n) { }
|
||||||
|
|
||||||
_ { return UNDERSCORE; }
|
_ { return UNDERSCORE; }
|
||||||
|
abstract { return ABSTRACT; }
|
||||||
|
alignof { return ALIGNOF; }
|
||||||
as { return AS; }
|
as { return AS; }
|
||||||
|
become { return BECOME; }
|
||||||
box { return BOX; }
|
box { return BOX; }
|
||||||
break { return BREAK; }
|
break { return BREAK; }
|
||||||
|
catch { return CATCH; }
|
||||||
const { return CONST; }
|
const { return CONST; }
|
||||||
continue { return CONTINUE; }
|
continue { return CONTINUE; }
|
||||||
crate { return CRATE; }
|
crate { return CRATE; }
|
||||||
|
default { return DEFAULT; }
|
||||||
|
do { return DO; }
|
||||||
else { return ELSE; }
|
else { return ELSE; }
|
||||||
enum { return ENUM; }
|
enum { return ENUM; }
|
||||||
extern { return EXTERN; }
|
extern { return EXTERN; }
|
||||||
false { return FALSE; }
|
false { return FALSE; }
|
||||||
|
final { return FINAL; }
|
||||||
fn { return FN; }
|
fn { return FN; }
|
||||||
for { return FOR; }
|
for { return FOR; }
|
||||||
if { return IF; }
|
if { return IF; }
|
||||||
|
@ -102,26 +109,36 @@ impl { return IMPL; }
|
||||||
in { return IN; }
|
in { return IN; }
|
||||||
let { return LET; }
|
let { return LET; }
|
||||||
loop { return LOOP; }
|
loop { return LOOP; }
|
||||||
|
macro { return MACRO; }
|
||||||
match { return MATCH; }
|
match { return MATCH; }
|
||||||
mod { return MOD; }
|
mod { return MOD; }
|
||||||
move { return MOVE; }
|
move { return MOVE; }
|
||||||
mut { return MUT; }
|
mut { return MUT; }
|
||||||
|
offsetof { return OFFSETOF; }
|
||||||
|
override { return OVERRIDE; }
|
||||||
priv { return PRIV; }
|
priv { return PRIV; }
|
||||||
proc { return PROC; }
|
proc { return PROC; }
|
||||||
|
pure { return PURE; }
|
||||||
pub { return PUB; }
|
pub { return PUB; }
|
||||||
ref { return REF; }
|
ref { return REF; }
|
||||||
return { return RETURN; }
|
return { return RETURN; }
|
||||||
self { return SELF; }
|
self { return SELF; }
|
||||||
|
sizeof { return SIZEOF; }
|
||||||
static { return STATIC; }
|
static { return STATIC; }
|
||||||
struct { return STRUCT; }
|
struct { return STRUCT; }
|
||||||
|
super { return SUPER; }
|
||||||
trait { return TRAIT; }
|
trait { return TRAIT; }
|
||||||
true { return TRUE; }
|
true { return TRUE; }
|
||||||
type { return TYPE; }
|
type { return TYPE; }
|
||||||
typeof { return TYPEOF; }
|
typeof { return TYPEOF; }
|
||||||
|
union { return UNION; }
|
||||||
unsafe { return UNSAFE; }
|
unsafe { return UNSAFE; }
|
||||||
|
unsized { return UNSIZED; }
|
||||||
use { return USE; }
|
use { return USE; }
|
||||||
|
virtual { return VIRTUAL; }
|
||||||
where { return WHERE; }
|
where { return WHERE; }
|
||||||
while { return WHILE; }
|
while { return WHILE; }
|
||||||
|
yield { return YIELD; }
|
||||||
|
|
||||||
{ident} { return IDENT; }
|
{ident} { return IDENT; }
|
||||||
|
|
||||||
|
@ -194,7 +211,7 @@ while { return WHILE; }
|
||||||
<ltorchar>{ident} { BEGIN(INITIAL); return LIFETIME; }
|
<ltorchar>{ident} { BEGIN(INITIAL); return LIFETIME; }
|
||||||
<ltorchar>\\[nrt\\\x27\x220]\x27 { BEGIN(suffix); return LIT_CHAR; }
|
<ltorchar>\\[nrt\\\x27\x220]\x27 { BEGIN(suffix); return LIT_CHAR; }
|
||||||
<ltorchar>\\x[0-9a-fA-F]{2}\x27 { BEGIN(suffix); return LIT_CHAR; }
|
<ltorchar>\\x[0-9a-fA-F]{2}\x27 { BEGIN(suffix); return LIT_CHAR; }
|
||||||
<ltorchar>\\u\{[0-9a-fA-F]?{6}\}\x27 { BEGIN(suffix); return LIT_CHAR; }
|
<ltorchar>\\u\{([0-9a-fA-F]_*){1,6}\}\x27 { BEGIN(suffix); return LIT_CHAR; }
|
||||||
<ltorchar>.\x27 { BEGIN(suffix); return LIT_CHAR; }
|
<ltorchar>.\x27 { BEGIN(suffix); return LIT_CHAR; }
|
||||||
<ltorchar>[\x80-\xff]{2,4}\x27 { BEGIN(suffix); return LIT_CHAR; }
|
<ltorchar>[\x80-\xff]{2,4}\x27 { BEGIN(suffix); return LIT_CHAR; }
|
||||||
<ltorchar><<EOF>> { BEGIN(INITIAL); return -1; }
|
<ltorchar><<EOF>> { BEGIN(INITIAL); return -1; }
|
||||||
|
@ -205,7 +222,7 @@ b\x22 { BEGIN(bytestr); yymore(); }
|
||||||
<bytestr><<EOF>> { return -1; }
|
<bytestr><<EOF>> { return -1; }
|
||||||
<bytestr>\\[n\nrt\\\x27\x220] { yymore(); }
|
<bytestr>\\[n\nrt\\\x27\x220] { yymore(); }
|
||||||
<bytestr>\\x[0-9a-fA-F]{2} { yymore(); }
|
<bytestr>\\x[0-9a-fA-F]{2} { yymore(); }
|
||||||
<bytestr>\\u\{[0-9a-fA-F]?{6}\} { yymore(); }
|
<bytestr>\\u\{([0-9a-fA-F]_*){1,6}\} { yymore(); }
|
||||||
<bytestr>\\[^n\nrt\\\x27\x220] { return -1; }
|
<bytestr>\\[^n\nrt\\\x27\x220] { return -1; }
|
||||||
<bytestr>(.|\n) { yymore(); }
|
<bytestr>(.|\n) { yymore(); }
|
||||||
|
|
||||||
|
@ -255,8 +272,8 @@ br/# {
|
||||||
b\x27 { BEGIN(byte); yymore(); }
|
b\x27 { BEGIN(byte); yymore(); }
|
||||||
<byte>\\[nrt\\\x27\x220]\x27 { BEGIN(INITIAL); return LIT_BYTE; }
|
<byte>\\[nrt\\\x27\x220]\x27 { BEGIN(INITIAL); return LIT_BYTE; }
|
||||||
<byte>\\x[0-9a-fA-F]{2}\x27 { BEGIN(INITIAL); return LIT_BYTE; }
|
<byte>\\x[0-9a-fA-F]{2}\x27 { BEGIN(INITIAL); return LIT_BYTE; }
|
||||||
<byte>\\u[0-9a-fA-F]{4}\x27 { BEGIN(INITIAL); return LIT_BYTE; }
|
<byte>\\u([0-9a-fA-F]_*){4}\x27 { BEGIN(INITIAL); return LIT_BYTE; }
|
||||||
<byte>\\U[0-9a-fA-F]{8}\x27 { BEGIN(INITIAL); return LIT_BYTE; }
|
<byte>\\U([0-9a-fA-F]_*){8}\x27 { BEGIN(INITIAL); return LIT_BYTE; }
|
||||||
<byte>.\x27 { BEGIN(INITIAL); return LIT_BYTE; }
|
<byte>.\x27 { BEGIN(INITIAL); return LIT_BYTE; }
|
||||||
<byte><<EOF>> { BEGIN(INITIAL); return -1; }
|
<byte><<EOF>> { BEGIN(INITIAL); return -1; }
|
||||||
|
|
||||||
|
@ -313,7 +330,7 @@ r/# {
|
||||||
<str><<EOF>> { return -1; }
|
<str><<EOF>> { return -1; }
|
||||||
<str>\\[n\nr\rt\\\x27\x220] { yymore(); }
|
<str>\\[n\nr\rt\\\x27\x220] { yymore(); }
|
||||||
<str>\\x[0-9a-fA-F]{2} { yymore(); }
|
<str>\\x[0-9a-fA-F]{2} { yymore(); }
|
||||||
<str>\\u\{[0-9a-fA-F]?{6}\} { yymore(); }
|
<str>\\u\{([0-9a-fA-F]_*){1,6}\} { yymore(); }
|
||||||
<str>\\[^n\nrt\\\x27\x220] { return -1; }
|
<str>\\[^n\nrt\\\x27\x220] { return -1; }
|
||||||
<str>(.|\n) { yymore(); }
|
<str>(.|\n) { yymore(); }
|
||||||
|
|
||||||
|
|
|
@ -62,13 +62,19 @@ extern char *yytext;
|
||||||
// keywords
|
// keywords
|
||||||
%token SELF
|
%token SELF
|
||||||
%token STATIC
|
%token STATIC
|
||||||
|
%token ABSTRACT
|
||||||
|
%token ALIGNOF
|
||||||
%token AS
|
%token AS
|
||||||
|
%token BECOME
|
||||||
%token BREAK
|
%token BREAK
|
||||||
|
%token CATCH
|
||||||
%token CRATE
|
%token CRATE
|
||||||
|
%token DO
|
||||||
%token ELSE
|
%token ELSE
|
||||||
%token ENUM
|
%token ENUM
|
||||||
%token EXTERN
|
%token EXTERN
|
||||||
%token FALSE
|
%token FALSE
|
||||||
|
%token FINAL
|
||||||
%token FN
|
%token FN
|
||||||
%token FOR
|
%token FOR
|
||||||
%token IF
|
%token IF
|
||||||
|
@ -76,19 +82,29 @@ extern char *yytext;
|
||||||
%token IN
|
%token IN
|
||||||
%token LET
|
%token LET
|
||||||
%token LOOP
|
%token LOOP
|
||||||
|
%token MACRO
|
||||||
%token MATCH
|
%token MATCH
|
||||||
%token MOD
|
%token MOD
|
||||||
%token MOVE
|
%token MOVE
|
||||||
%token MUT
|
%token MUT
|
||||||
|
%token OFFSETOF
|
||||||
|
%token OVERRIDE
|
||||||
%token PRIV
|
%token PRIV
|
||||||
%token PUB
|
%token PUB
|
||||||
|
%token PURE
|
||||||
%token REF
|
%token REF
|
||||||
%token RETURN
|
%token RETURN
|
||||||
|
%token SIZEOF
|
||||||
%token STRUCT
|
%token STRUCT
|
||||||
|
%token SUPER
|
||||||
|
%token UNION
|
||||||
|
%token UNSIZED
|
||||||
%token TRUE
|
%token TRUE
|
||||||
%token TRAIT
|
%token TRAIT
|
||||||
%token TYPE
|
%token TYPE
|
||||||
%token UNSAFE
|
%token UNSAFE
|
||||||
|
%token VIRTUAL
|
||||||
|
%token YIELD
|
||||||
%token DEFAULT
|
%token DEFAULT
|
||||||
%token USE
|
%token USE
|
||||||
%token WHILE
|
%token WHILE
|
||||||
|
@ -141,6 +157,10 @@ extern char *yytext;
|
||||||
// 'foo:bar . <' is shifted (in a trait reference occurring in a
|
// 'foo:bar . <' is shifted (in a trait reference occurring in a
|
||||||
// bounds list), parsing as foo:(bar<baz>) rather than (foo:bar)<baz>.
|
// bounds list), parsing as foo:(bar<baz>) rather than (foo:bar)<baz>.
|
||||||
%precedence IDENT
|
%precedence IDENT
|
||||||
|
// Put the weak keywords that can be used as idents here as well
|
||||||
|
%precedence CATCH
|
||||||
|
%precedence DEFAULT
|
||||||
|
%precedence UNION
|
||||||
|
|
||||||
// A couple fake-precedence symbols to use in rules associated with +
|
// A couple fake-precedence symbols to use in rules associated with +
|
||||||
// and < in trailing type contexts. These come up when you have a type
|
// and < in trailing type contexts. These come up when you have a type
|
||||||
|
@ -161,13 +181,13 @@ extern char *yytext;
|
||||||
%precedence FOR
|
%precedence FOR
|
||||||
|
|
||||||
// Binops & unops, and their precedences
|
// Binops & unops, and their precedences
|
||||||
|
%precedence '?'
|
||||||
%precedence BOX
|
%precedence BOX
|
||||||
%precedence BOXPLACE
|
|
||||||
%nonassoc DOTDOT
|
%nonassoc DOTDOT
|
||||||
|
|
||||||
// RETURN needs to be lower-precedence than tokens that start
|
// RETURN needs to be lower-precedence than tokens that start
|
||||||
// prefix_exprs
|
// prefix_exprs
|
||||||
%precedence RETURN
|
%precedence RETURN YIELD
|
||||||
|
|
||||||
%right '=' SHLEQ SHREQ MINUSEQ ANDEQ OREQ PLUSEQ STAREQ SLASHEQ CARETEQ PERCENTEQ
|
%right '=' SHLEQ SHREQ MINUSEQ ANDEQ OREQ PLUSEQ STAREQ SLASHEQ CARETEQ PERCENTEQ
|
||||||
%right LARROW
|
%right LARROW
|
||||||
|
@ -321,6 +341,8 @@ view_path
|
||||||
| path_no_types_allowed MOD_SEP '{' idents_or_self ',' '}' { $$ = mk_node("ViewPathList", 2, $1, $4); }
|
| path_no_types_allowed MOD_SEP '{' idents_or_self ',' '}' { $$ = mk_node("ViewPathList", 2, $1, $4); }
|
||||||
| MOD_SEP '{' idents_or_self ',' '}' { $$ = mk_node("ViewPathList", 1, $3); }
|
| MOD_SEP '{' idents_or_self ',' '}' { $$ = mk_node("ViewPathList", 1, $3); }
|
||||||
| path_no_types_allowed MOD_SEP '*' { $$ = mk_node("ViewPathGlob", 1, $1); }
|
| path_no_types_allowed MOD_SEP '*' { $$ = mk_node("ViewPathGlob", 1, $1); }
|
||||||
|
| MOD_SEP '*' { $$ = mk_atom("ViewPathGlob"); }
|
||||||
|
| '*' { $$ = mk_atom("ViewPathGlob"); }
|
||||||
| '{' '}' { $$ = mk_atom("ViewPathListEmpty"); }
|
| '{' '}' { $$ = mk_atom("ViewPathListEmpty"); }
|
||||||
| '{' idents_or_self '}' { $$ = mk_node("ViewPathList", 1, $2); }
|
| '{' idents_or_self '}' { $$ = mk_node("ViewPathList", 1, $2); }
|
||||||
| '{' idents_or_self ',' '}' { $$ = mk_node("ViewPathList", 1, $2); }
|
| '{' idents_or_self ',' '}' { $$ = mk_node("ViewPathList", 1, $2); }
|
||||||
|
@ -334,6 +356,7 @@ block_item
|
||||||
| item_foreign_mod { $$ = mk_node("ItemForeignMod", 1, $1); }
|
| item_foreign_mod { $$ = mk_node("ItemForeignMod", 1, $1); }
|
||||||
| item_struct
|
| item_struct
|
||||||
| item_enum
|
| item_enum
|
||||||
|
| item_union
|
||||||
| item_trait
|
| item_trait
|
||||||
| item_impl
|
| item_impl
|
||||||
;
|
;
|
||||||
|
@ -387,6 +410,7 @@ struct_decl_field
|
||||||
struct_tuple_fields
|
struct_tuple_fields
|
||||||
: struct_tuple_field { $$ = mk_node("StructFields", 1, $1); }
|
: struct_tuple_field { $$ = mk_node("StructFields", 1, $1); }
|
||||||
| struct_tuple_fields ',' struct_tuple_field { $$ = ext_node($1, 1, $3); }
|
| struct_tuple_fields ',' struct_tuple_field { $$ = ext_node($1, 1, $3); }
|
||||||
|
| %empty { $$ = mk_none(); }
|
||||||
;
|
;
|
||||||
|
|
||||||
struct_tuple_field
|
struct_tuple_field
|
||||||
|
@ -417,6 +441,11 @@ enum_args
|
||||||
| %empty { $$ = mk_none(); }
|
| %empty { $$ = mk_none(); }
|
||||||
;
|
;
|
||||||
|
|
||||||
|
// unions
|
||||||
|
item_union
|
||||||
|
: UNION ident generic_params maybe_where_clause '{' struct_decl_fields '}' { $$ = mk_node("ItemUnion", 0); }
|
||||||
|
| UNION ident generic_params maybe_where_clause '{' struct_decl_fields ',' '}' { $$ = mk_node("ItemUnion", 0); }
|
||||||
|
|
||||||
item_mod
|
item_mod
|
||||||
: MOD ident ';' { $$ = mk_node("ItemMod", 1, $2); }
|
: MOD ident ';' { $$ = mk_node("ItemMod", 1, $2); }
|
||||||
| MOD ident '{' maybe_mod_items '}' { $$ = mk_node("ItemMod", 2, $2, $4); }
|
| MOD ident '{' maybe_mod_items '}' { $$ = mk_node("ItemMod", 2, $2, $4); }
|
||||||
|
@ -475,7 +504,7 @@ visibility
|
||||||
|
|
||||||
idents_or_self
|
idents_or_self
|
||||||
: ident_or_self { $$ = mk_node("IdentsOrSelf", 1, $1); }
|
: ident_or_self { $$ = mk_node("IdentsOrSelf", 1, $1); }
|
||||||
| ident_or_self AS ident { $$ = mk_node("IdentsOrSelf", 2, $1, $3); }
|
| idents_or_self AS ident { $$ = mk_node("IdentsOrSelf", 2, $1, $3); }
|
||||||
| idents_or_self ',' ident_or_self { $$ = ext_node($1, 1, $3); }
|
| idents_or_self ',' ident_or_self { $$ = ext_node($1, 1, $3); }
|
||||||
;
|
;
|
||||||
|
|
||||||
|
@ -515,6 +544,7 @@ trait_item
|
||||||
: trait_const
|
: trait_const
|
||||||
| trait_type
|
| trait_type
|
||||||
| trait_method
|
| trait_method
|
||||||
|
| maybe_outer_attrs item_macro { $$ = mk_node("TraitMacroItem", 2, $1, $2); }
|
||||||
;
|
;
|
||||||
|
|
||||||
trait_const
|
trait_const
|
||||||
|
@ -547,36 +577,48 @@ trait_method
|
||||||
;
|
;
|
||||||
|
|
||||||
type_method
|
type_method
|
||||||
: attrs_and_vis maybe_unsafe FN ident generic_params fn_decl_with_self_allow_anon_params maybe_where_clause ';'
|
: maybe_outer_attrs maybe_unsafe FN ident generic_params fn_decl_with_self_allow_anon_params maybe_where_clause ';'
|
||||||
{
|
{
|
||||||
$$ = mk_node("TypeMethod", 6, $1, $2, $4, $5, $6, $7);
|
$$ = mk_node("TypeMethod", 6, $1, $2, $4, $5, $6, $7);
|
||||||
}
|
}
|
||||||
| attrs_and_vis maybe_unsafe EXTERN maybe_abi FN ident generic_params fn_decl_with_self_allow_anon_params maybe_where_clause ';'
|
| maybe_outer_attrs CONST maybe_unsafe FN ident generic_params fn_decl_with_self_allow_anon_params maybe_where_clause ';'
|
||||||
|
{
|
||||||
|
$$ = mk_node("TypeMethod", 6, $1, $3, $5, $6, $7, $8);
|
||||||
|
}
|
||||||
|
| maybe_outer_attrs maybe_unsafe EXTERN maybe_abi FN ident generic_params fn_decl_with_self_allow_anon_params maybe_where_clause ';'
|
||||||
{
|
{
|
||||||
$$ = mk_node("TypeMethod", 7, $1, $2, $4, $6, $7, $8, $9);
|
$$ = mk_node("TypeMethod", 7, $1, $2, $4, $6, $7, $8, $9);
|
||||||
}
|
}
|
||||||
;
|
;
|
||||||
|
|
||||||
method
|
method
|
||||||
: attrs_and_vis maybe_unsafe FN ident generic_params fn_decl_with_self_allow_anon_params maybe_where_clause inner_attrs_and_block
|
: maybe_outer_attrs maybe_unsafe FN ident generic_params fn_decl_with_self_allow_anon_params maybe_where_clause inner_attrs_and_block
|
||||||
{
|
{
|
||||||
$$ = mk_node("Method", 7, $1, $2, $4, $5, $6, $7, $8);
|
$$ = mk_node("Method", 7, $1, $2, $4, $5, $6, $7, $8);
|
||||||
}
|
}
|
||||||
| attrs_and_vis maybe_unsafe EXTERN maybe_abi FN ident generic_params fn_decl_with_self_allow_anon_params maybe_where_clause inner_attrs_and_block
|
| maybe_outer_attrs CONST maybe_unsafe FN ident generic_params fn_decl_with_self_allow_anon_params maybe_where_clause inner_attrs_and_block
|
||||||
|
{
|
||||||
|
$$ = mk_node("Method", 7, $1, $3, $5, $6, $7, $8, $9);
|
||||||
|
}
|
||||||
|
| maybe_outer_attrs maybe_unsafe EXTERN maybe_abi FN ident generic_params fn_decl_with_self_allow_anon_params maybe_where_clause inner_attrs_and_block
|
||||||
{
|
{
|
||||||
$$ = mk_node("Method", 8, $1, $2, $4, $6, $7, $8, $9, $10);
|
$$ = mk_node("Method", 8, $1, $2, $4, $6, $7, $8, $9, $10);
|
||||||
}
|
}
|
||||||
;
|
;
|
||||||
|
|
||||||
impl_method
|
impl_method
|
||||||
: attrs_and_vis maybe_unsafe FN ident generic_params fn_decl_with_self maybe_where_clause inner_attrs_and_block
|
: attrs_and_vis maybe_default maybe_unsafe FN ident generic_params fn_decl_with_self maybe_where_clause inner_attrs_and_block
|
||||||
{
|
{
|
||||||
$$ = mk_node("Method", 7, $1, $2, $4, $5, $6, $7, $8);
|
$$ = mk_node("Method", 8, $1, $2, $3, $5, $6, $7, $8, $9);
|
||||||
}
|
}
|
||||||
| attrs_and_vis maybe_unsafe EXTERN maybe_abi FN ident generic_params fn_decl_with_self maybe_where_clause inner_attrs_and_block
|
| attrs_and_vis maybe_default CONST maybe_unsafe FN ident generic_params fn_decl_with_self maybe_where_clause inner_attrs_and_block
|
||||||
{
|
{
|
||||||
$$ = mk_node("Method", 8, $1, $2, $4, $6, $7, $8, $9, $10);
|
$$ = mk_node("Method", 8, $1, $2, $4, $6, $7, $8, $9, $10);
|
||||||
}
|
}
|
||||||
|
| attrs_and_vis maybe_default maybe_unsafe EXTERN maybe_abi FN ident generic_params fn_decl_with_self maybe_where_clause inner_attrs_and_block
|
||||||
|
{
|
||||||
|
$$ = mk_node("Method", 9, $1, $2, $3, $5, $7, $8, $9, $10, $11);
|
||||||
|
}
|
||||||
;
|
;
|
||||||
|
|
||||||
// There are two forms of impl:
|
// There are two forms of impl:
|
||||||
|
@ -638,12 +680,17 @@ impl_item
|
||||||
| impl_type
|
| impl_type
|
||||||
;
|
;
|
||||||
|
|
||||||
|
maybe_default
|
||||||
|
: DEFAULT { $$ = mk_atom("Default"); }
|
||||||
|
| %empty { $$ = mk_none(); }
|
||||||
|
;
|
||||||
|
|
||||||
impl_const
|
impl_const
|
||||||
: attrs_and_vis item_const { $$ = mk_node("ImplConst", 1, $1, $2); }
|
: attrs_and_vis maybe_default item_const { $$ = mk_node("ImplConst", 3, $1, $2, $3); }
|
||||||
;
|
;
|
||||||
|
|
||||||
impl_type
|
impl_type
|
||||||
: attrs_and_vis TYPE ident generic_params '=' ty_sum ';' { $$ = mk_node("ImplType", 4, $1, $3, $4, $6); }
|
: attrs_and_vis maybe_default TYPE ident generic_params '=' ty_sum ';' { $$ = mk_node("ImplType", 5, $1, $2, $4, $5, $7); }
|
||||||
;
|
;
|
||||||
|
|
||||||
item_fn
|
item_fn
|
||||||
|
@ -651,6 +698,10 @@ item_fn
|
||||||
{
|
{
|
||||||
$$ = mk_node("ItemFn", 5, $2, $3, $4, $5, $6);
|
$$ = mk_node("ItemFn", 5, $2, $3, $4, $5, $6);
|
||||||
}
|
}
|
||||||
|
| CONST FN ident generic_params fn_decl maybe_where_clause inner_attrs_and_block
|
||||||
|
{
|
||||||
|
$$ = mk_node("ItemFn", 5, $3, $4, $5, $6, $7);
|
||||||
|
}
|
||||||
;
|
;
|
||||||
|
|
||||||
item_unsafe_fn
|
item_unsafe_fn
|
||||||
|
@ -658,6 +709,10 @@ item_unsafe_fn
|
||||||
{
|
{
|
||||||
$$ = mk_node("ItemUnsafeFn", 5, $3, $4, $5, $6, $7);
|
$$ = mk_node("ItemUnsafeFn", 5, $3, $4, $5, $6, $7);
|
||||||
}
|
}
|
||||||
|
| CONST UNSAFE FN ident generic_params fn_decl maybe_where_clause inner_attrs_and_block
|
||||||
|
{
|
||||||
|
$$ = mk_node("ItemUnsafeFn", 5, $4, $5, $6, $7, $8);
|
||||||
|
}
|
||||||
| UNSAFE EXTERN maybe_abi FN ident generic_params fn_decl maybe_where_clause inner_attrs_and_block
|
| UNSAFE EXTERN maybe_abi FN ident generic_params fn_decl maybe_where_clause inner_attrs_and_block
|
||||||
{
|
{
|
||||||
$$ = mk_node("ItemUnsafeFn", 6, $3, $5, $6, $7, $8, $9);
|
$$ = mk_node("ItemUnsafeFn", 6, $3, $5, $6, $7, $8, $9);
|
||||||
|
@ -723,12 +778,6 @@ inferrable_param
|
||||||
: pat maybe_ty_ascription { $$ = mk_node("InferrableParam", 2, $1, $2); }
|
: pat maybe_ty_ascription { $$ = mk_node("InferrableParam", 2, $1, $2); }
|
||||||
;
|
;
|
||||||
|
|
||||||
maybe_unboxed_closure_kind
|
|
||||||
: %empty
|
|
||||||
| ':'
|
|
||||||
| '&' maybe_mut ':'
|
|
||||||
;
|
|
||||||
|
|
||||||
maybe_comma_params
|
maybe_comma_params
|
||||||
: ',' { $$ = mk_none(); }
|
: ',' { $$ = mk_none(); }
|
||||||
| ',' params { $$ = $2; }
|
| ',' params { $$ = $2; }
|
||||||
|
@ -784,7 +833,8 @@ ret_ty
|
||||||
;
|
;
|
||||||
|
|
||||||
generic_params
|
generic_params
|
||||||
: '<' lifetimes '>' { $$ = mk_node("Generics", 2, $2, mk_none()); }
|
: '<' '>' { $$ = mk_node("Generics", 2, mk_none(), mk_none()); }
|
||||||
|
| '<' lifetimes '>' { $$ = mk_node("Generics", 2, $2, mk_none()); }
|
||||||
| '<' lifetimes ',' '>' { $$ = mk_node("Generics", 2, $2, mk_none()); }
|
| '<' lifetimes ',' '>' { $$ = mk_node("Generics", 2, $2, mk_none()); }
|
||||||
| '<' lifetimes SHR { push_back('>'); $$ = mk_node("Generics", 2, $2, mk_none()); }
|
| '<' lifetimes SHR { push_back('>'); $$ = mk_node("Generics", 2, $2, mk_none()); }
|
||||||
| '<' lifetimes ',' SHR { push_back('>'); $$ = mk_node("Generics", 2, $2, mk_none()); }
|
| '<' lifetimes ',' SHR { push_back('>'); $$ = mk_node("Generics", 2, $2, mk_none()); }
|
||||||
|
@ -837,6 +887,8 @@ path_no_types_allowed
|
||||||
| MOD_SEP ident { $$ = mk_node("ViewPath", 1, $2); }
|
| MOD_SEP ident { $$ = mk_node("ViewPath", 1, $2); }
|
||||||
| SELF { $$ = mk_node("ViewPath", 1, mk_atom("Self")); }
|
| SELF { $$ = mk_node("ViewPath", 1, mk_atom("Self")); }
|
||||||
| MOD_SEP SELF { $$ = mk_node("ViewPath", 1, mk_atom("Self")); }
|
| MOD_SEP SELF { $$ = mk_node("ViewPath", 1, mk_atom("Self")); }
|
||||||
|
| SUPER { $$ = mk_node("ViewPath", 1, mk_atom("Super")); }
|
||||||
|
| MOD_SEP SUPER { $$ = mk_node("ViewPath", 1, mk_atom("Super")); }
|
||||||
| path_no_types_allowed MOD_SEP ident { $$ = ext_node($1, 1, $3); }
|
| path_no_types_allowed MOD_SEP ident { $$ = ext_node($1, 1, $3); }
|
||||||
;
|
;
|
||||||
|
|
||||||
|
@ -882,7 +934,7 @@ generic_args
|
||||||
;
|
;
|
||||||
|
|
||||||
generic_values
|
generic_values
|
||||||
: maybe_lifetimes maybe_ty_sums_and_or_bindings { $$ = mk_node("GenericValues", 2, $1, $2); }
|
: maybe_ty_sums_and_or_bindings { $$ = mk_node("GenericValues", 1, $1); }
|
||||||
;
|
;
|
||||||
|
|
||||||
maybe_ty_sums_and_or_bindings
|
maybe_ty_sums_and_or_bindings
|
||||||
|
@ -910,12 +962,11 @@ pat
|
||||||
| ANDAND pat { $$ = mk_node("PatRegion", 1, mk_node("PatRegion", 1, $2)); }
|
| ANDAND pat { $$ = mk_node("PatRegion", 1, mk_node("PatRegion", 1, $2)); }
|
||||||
| '(' ')' { $$ = mk_atom("PatUnit"); }
|
| '(' ')' { $$ = mk_atom("PatUnit"); }
|
||||||
| '(' pat_tup ')' { $$ = mk_node("PatTup", 1, $2); }
|
| '(' pat_tup ')' { $$ = mk_node("PatTup", 1, $2); }
|
||||||
| '(' pat_tup ',' ')' { $$ = mk_node("PatTup", 1, $2); }
|
|
||||||
| '[' pat_vec ']' { $$ = mk_node("PatVec", 1, $2); }
|
| '[' pat_vec ']' { $$ = mk_node("PatVec", 1, $2); }
|
||||||
| lit_or_path
|
| lit_or_path
|
||||||
| lit_or_path DOTDOTDOT lit_or_path { $$ = mk_node("PatRange", 2, $1, $3); }
|
| lit_or_path DOTDOTDOT lit_or_path { $$ = mk_node("PatRange", 2, $1, $3); }
|
||||||
| path_expr '{' pat_struct '}' { $$ = mk_node("PatStruct", 2, $1, $3); }
|
| path_expr '{' pat_struct '}' { $$ = mk_node("PatStruct", 2, $1, $3); }
|
||||||
| path_expr '(' DOTDOT ')' { $$ = mk_node("PatEnum", 1, $1); }
|
| path_expr '(' ')' { $$ = mk_node("PatEnum", 2, $1, mk_none()); }
|
||||||
| path_expr '(' pat_tup ')' { $$ = mk_node("PatEnum", 2, $1, $3); }
|
| path_expr '(' pat_tup ')' { $$ = mk_node("PatEnum", 2, $1, $3); }
|
||||||
| path_expr '!' maybe_ident delimited_token_trees { $$ = mk_node("PatMac", 3, $1, $3, $4); }
|
| path_expr '!' maybe_ident delimited_token_trees { $$ = mk_node("PatMac", 3, $1, $3, $4); }
|
||||||
| binding_mode ident { $$ = mk_node("PatIdent", 2, $1, $2); }
|
| binding_mode ident { $$ = mk_node("PatIdent", 2, $1, $2); }
|
||||||
|
@ -953,6 +1004,7 @@ pat_field
|
||||||
| BOX binding_mode ident { $$ = mk_node("PatField", 3, mk_atom("box"), $2, $3); }
|
| BOX binding_mode ident { $$ = mk_node("PatField", 3, mk_atom("box"), $2, $3); }
|
||||||
| ident ':' pat { $$ = mk_node("PatField", 2, $1, $3); }
|
| ident ':' pat { $$ = mk_node("PatField", 2, $1, $3); }
|
||||||
| binding_mode ident ':' pat { $$ = mk_node("PatField", 3, $1, $2, $4); }
|
| binding_mode ident ':' pat { $$ = mk_node("PatField", 3, $1, $2, $4); }
|
||||||
|
| LIT_INTEGER ':' pat { $$ = mk_node("PatField", 2, mk_atom(yytext), $3); }
|
||||||
;
|
;
|
||||||
|
|
||||||
pat_fields
|
pat_fields
|
||||||
|
@ -965,11 +1017,26 @@ pat_struct
|
||||||
| pat_fields ',' { $$ = mk_node("PatStruct", 2, $1, mk_atom("false")); }
|
| pat_fields ',' { $$ = mk_node("PatStruct", 2, $1, mk_atom("false")); }
|
||||||
| pat_fields ',' DOTDOT { $$ = mk_node("PatStruct", 2, $1, mk_atom("true")); }
|
| pat_fields ',' DOTDOT { $$ = mk_node("PatStruct", 2, $1, mk_atom("true")); }
|
||||||
| DOTDOT { $$ = mk_node("PatStruct", 1, mk_atom("true")); }
|
| DOTDOT { $$ = mk_node("PatStruct", 1, mk_atom("true")); }
|
||||||
|
| %empty { $$ = mk_node("PatStruct", 1, mk_none()); }
|
||||||
;
|
;
|
||||||
|
|
||||||
pat_tup
|
pat_tup
|
||||||
: pat { $$ = mk_node("pat_tup", 1, $1); }
|
: pat_tup_elts { $$ = mk_node("PatTup", 2, $1, mk_none()); }
|
||||||
| pat_tup ',' pat { $$ = ext_node($1, 1, $3); }
|
| pat_tup_elts ',' { $$ = mk_node("PatTup", 2, $1, mk_none()); }
|
||||||
|
| pat_tup_elts DOTDOT { $$ = mk_node("PatTup", 2, $1, mk_none()); }
|
||||||
|
| pat_tup_elts ',' DOTDOT { $$ = mk_node("PatTup", 2, $1, mk_none()); }
|
||||||
|
| pat_tup_elts DOTDOT ',' pat_tup_elts { $$ = mk_node("PatTup", 2, $1, $4); }
|
||||||
|
| pat_tup_elts DOTDOT ',' pat_tup_elts ',' { $$ = mk_node("PatTup", 2, $1, $4); }
|
||||||
|
| pat_tup_elts ',' DOTDOT ',' pat_tup_elts { $$ = mk_node("PatTup", 2, $1, $5); }
|
||||||
|
| pat_tup_elts ',' DOTDOT ',' pat_tup_elts ',' { $$ = mk_node("PatTup", 2, $1, $5); }
|
||||||
|
| DOTDOT ',' pat_tup_elts { $$ = mk_node("PatTup", 2, mk_none(), $3); }
|
||||||
|
| DOTDOT ',' pat_tup_elts ',' { $$ = mk_node("PatTup", 2, mk_none(), $3); }
|
||||||
|
| DOTDOT { $$ = mk_node("PatTup", 2, mk_none(), mk_none()); }
|
||||||
|
;
|
||||||
|
|
||||||
|
pat_tup_elts
|
||||||
|
: pat { $$ = mk_node("PatTupElts", 1, $1); }
|
||||||
|
| pat_tup_elts ',' pat { $$ = ext_node($1, 1, $3); }
|
||||||
;
|
;
|
||||||
|
|
||||||
pat_vec
|
pat_vec
|
||||||
|
@ -1010,6 +1077,8 @@ ty_prim
|
||||||
: %prec IDENT path_generic_args_without_colons { $$ = mk_node("TyPath", 2, mk_node("global", 1, mk_atom("false")), $1); }
|
: %prec IDENT path_generic_args_without_colons { $$ = mk_node("TyPath", 2, mk_node("global", 1, mk_atom("false")), $1); }
|
||||||
| %prec IDENT MOD_SEP path_generic_args_without_colons { $$ = mk_node("TyPath", 2, mk_node("global", 1, mk_atom("true")), $2); }
|
| %prec IDENT MOD_SEP path_generic_args_without_colons { $$ = mk_node("TyPath", 2, mk_node("global", 1, mk_atom("true")), $2); }
|
||||||
| %prec IDENT SELF MOD_SEP path_generic_args_without_colons { $$ = mk_node("TyPath", 2, mk_node("self", 1, mk_atom("true")), $3); }
|
| %prec IDENT SELF MOD_SEP path_generic_args_without_colons { $$ = mk_node("TyPath", 2, mk_node("self", 1, mk_atom("true")), $3); }
|
||||||
|
| %prec IDENT path_generic_args_without_colons '!' maybe_ident delimited_token_trees { $$ = mk_node("TyMacro", 3, $1, $3, $4); }
|
||||||
|
| %prec IDENT MOD_SEP path_generic_args_without_colons '!' maybe_ident delimited_token_trees { $$ = mk_node("TyMacro", 3, $2, $4, $5); }
|
||||||
| BOX ty { $$ = mk_node("TyBox", 1, $2); }
|
| BOX ty { $$ = mk_node("TyBox", 1, $2); }
|
||||||
| '*' maybe_mut_or_const ty { $$ = mk_node("TyPtr", 2, $2, $3); }
|
| '*' maybe_mut_or_const ty { $$ = mk_node("TyPtr", 2, $2, $3); }
|
||||||
| '&' ty { $$ = mk_node("TyRptr", 2, mk_atom("MutImmutable"), $2); }
|
| '&' ty { $$ = mk_node("TyRptr", 2, mk_atom("MutImmutable"), $2); }
|
||||||
|
@ -1024,7 +1093,6 @@ ty_prim
|
||||||
| TYPEOF '(' expr ')' { $$ = mk_node("TyTypeof", 1, $3); }
|
| TYPEOF '(' expr ')' { $$ = mk_node("TyTypeof", 1, $3); }
|
||||||
| UNDERSCORE { $$ = mk_atom("TyInfer"); }
|
| UNDERSCORE { $$ = mk_atom("TyInfer"); }
|
||||||
| ty_bare_fn
|
| ty_bare_fn
|
||||||
| ty_proc
|
|
||||||
| for_in_type
|
| for_in_type
|
||||||
;
|
;
|
||||||
|
|
||||||
|
@ -1046,17 +1114,12 @@ ty_closure
|
||||||
| OROR maybe_bounds ret_ty { $$ = mk_node("TyClosure", 2, $2, $3); }
|
| OROR maybe_bounds ret_ty { $$ = mk_node("TyClosure", 2, $2, $3); }
|
||||||
;
|
;
|
||||||
|
|
||||||
ty_proc
|
|
||||||
: PROC generic_params fn_params maybe_bounds ret_ty { $$ = mk_node("TyProc", 4, $2, $3, $4, $5); }
|
|
||||||
;
|
|
||||||
|
|
||||||
for_in_type
|
for_in_type
|
||||||
: FOR '<' maybe_lifetimes '>' for_in_type_suffix { $$ = mk_node("ForInType", 2, $3, $5); }
|
: FOR '<' maybe_lifetimes '>' for_in_type_suffix { $$ = mk_node("ForInType", 2, $3, $5); }
|
||||||
;
|
;
|
||||||
|
|
||||||
for_in_type_suffix
|
for_in_type_suffix
|
||||||
: ty_proc
|
: ty_bare_fn
|
||||||
| ty_bare_fn
|
|
||||||
| trait_ref
|
| trait_ref
|
||||||
| ty_closure
|
| ty_closure
|
||||||
;
|
;
|
||||||
|
@ -1100,13 +1163,23 @@ ty_sums
|
||||||
;
|
;
|
||||||
|
|
||||||
ty_sum
|
ty_sum
|
||||||
: ty { $$ = mk_node("TySum", 1, $1); }
|
: ty_sum_elt { $$ = mk_node("TySum", 1, $1); }
|
||||||
| ty '+' ty_param_bounds { $$ = mk_node("TySum", 2, $1, $3); }
|
| ty_sum '+' ty_sum_elt { $$ = ext_node($1, 1, $3); }
|
||||||
|
;
|
||||||
|
|
||||||
|
ty_sum_elt
|
||||||
|
: ty
|
||||||
|
| lifetime
|
||||||
;
|
;
|
||||||
|
|
||||||
ty_prim_sum
|
ty_prim_sum
|
||||||
: ty_prim { $$ = mk_node("TySum", 1, $1); }
|
: ty_prim_sum_elt { $$ = mk_node("TySum", 1, $1); }
|
||||||
| ty_prim '+' ty_param_bounds { $$ = mk_node("TySum", 2, $1, $3); }
|
| ty_prim_sum '+' ty_prim_sum_elt { $$ = ext_node($1, 1, $3); }
|
||||||
|
;
|
||||||
|
|
||||||
|
ty_prim_sum_elt
|
||||||
|
: ty_prim
|
||||||
|
| lifetime
|
||||||
;
|
;
|
||||||
|
|
||||||
maybe_ty_param_bounds
|
maybe_ty_param_bounds
|
||||||
|
@ -1127,6 +1200,7 @@ boundseq
|
||||||
polybound
|
polybound
|
||||||
: FOR '<' maybe_lifetimes '>' bound { $$ = mk_node("PolyBound", 2, $3, $5); }
|
: FOR '<' maybe_lifetimes '>' bound { $$ = mk_node("PolyBound", 2, $3, $5); }
|
||||||
| bound
|
| bound
|
||||||
|
| '?' FOR '<' maybe_lifetimes '>' bound { $$ = mk_node("PolyBound", 2, $4, $6); }
|
||||||
| '?' bound { $$ = $2; }
|
| '?' bound { $$ = $2; }
|
||||||
;
|
;
|
||||||
|
|
||||||
|
@ -1244,11 +1318,6 @@ maybe_stmts
|
||||||
// block, nonblock-prefix, and nonblock-nonprefix.
|
// block, nonblock-prefix, and nonblock-nonprefix.
|
||||||
//
|
//
|
||||||
// In non-stmts contexts, expr can relax this trichotomy.
|
// In non-stmts contexts, expr can relax this trichotomy.
|
||||||
//
|
|
||||||
// There is also one other expr subtype: nonparen_expr disallows exprs
|
|
||||||
// surrounded by parens (including tuple expressions), this is
|
|
||||||
// necessary for BOX (place) expressions, so a parens expr following
|
|
||||||
// the BOX is always parsed as the place.
|
|
||||||
|
|
||||||
stmts
|
stmts
|
||||||
: stmt { $$ = mk_node("stmts", 1, $1); }
|
: stmt { $$ = mk_node("stmts", 1, $1); }
|
||||||
|
@ -1256,14 +1325,15 @@ stmts
|
||||||
;
|
;
|
||||||
|
|
||||||
stmt
|
stmt
|
||||||
: let
|
: maybe_outer_attrs let { $$ = $2; }
|
||||||
| stmt_item
|
| stmt_item
|
||||||
| PUB stmt_item { $$ = $2; }
|
| PUB stmt_item { $$ = $2; }
|
||||||
| outer_attrs stmt_item { $$ = $2; }
|
| outer_attrs stmt_item { $$ = $2; }
|
||||||
| outer_attrs PUB stmt_item { $$ = $3; }
|
| outer_attrs PUB stmt_item { $$ = $3; }
|
||||||
| full_block_expr
|
| full_block_expr
|
||||||
| block
|
| maybe_outer_attrs block { $$ = $2; }
|
||||||
| nonblock_expr ';'
|
| nonblock_expr ';'
|
||||||
|
| outer_attrs nonblock_expr ';' { $$ = $2; }
|
||||||
| ';' { $$ = mk_none(); }
|
| ';' { $$ = mk_none(); }
|
||||||
;
|
;
|
||||||
|
|
||||||
|
@ -1296,7 +1366,9 @@ path_expr
|
||||||
// expressions.
|
// expressions.
|
||||||
path_generic_args_with_colons
|
path_generic_args_with_colons
|
||||||
: ident { $$ = mk_node("components", 1, $1); }
|
: ident { $$ = mk_node("components", 1, $1); }
|
||||||
|
| SUPER { $$ = mk_atom("Super"); }
|
||||||
| path_generic_args_with_colons MOD_SEP ident { $$ = ext_node($1, 1, $3); }
|
| path_generic_args_with_colons MOD_SEP ident { $$ = ext_node($1, 1, $3); }
|
||||||
|
| path_generic_args_with_colons MOD_SEP SUPER { $$ = ext_node($1, 1, mk_atom("Super")); }
|
||||||
| path_generic_args_with_colons MOD_SEP generic_args { $$ = ext_node($1, 1, $3); }
|
| path_generic_args_with_colons MOD_SEP generic_args { $$ = ext_node($1, 1, $3); }
|
||||||
;
|
;
|
||||||
|
|
||||||
|
@ -1313,6 +1385,7 @@ nonblock_expr
|
||||||
| SELF { $$ = mk_node("ExprPath", 1, mk_node("ident", 1, mk_atom("self"))); }
|
| SELF { $$ = mk_node("ExprPath", 1, mk_node("ident", 1, mk_atom("self"))); }
|
||||||
| macro_expr { $$ = mk_node("ExprMac", 1, $1); }
|
| macro_expr { $$ = mk_node("ExprMac", 1, $1); }
|
||||||
| path_expr '{' struct_expr_fields '}' { $$ = mk_node("ExprStruct", 2, $1, $3); }
|
| path_expr '{' struct_expr_fields '}' { $$ = mk_node("ExprStruct", 2, $1, $3); }
|
||||||
|
| nonblock_expr '?' { $$ = mk_node("ExprTry", 1, $1); }
|
||||||
| nonblock_expr '.' path_generic_args_with_colons { $$ = mk_node("ExprField", 2, $1, $3); }
|
| nonblock_expr '.' path_generic_args_with_colons { $$ = mk_node("ExprField", 2, $1, $3); }
|
||||||
| nonblock_expr '.' LIT_INTEGER { $$ = mk_node("ExprTupleIndex", 1, $1); }
|
| nonblock_expr '.' LIT_INTEGER { $$ = mk_node("ExprTupleIndex", 1, $1); }
|
||||||
| nonblock_expr '[' maybe_expr ']' { $$ = mk_node("ExprIndex", 2, $1, $3); }
|
| nonblock_expr '[' maybe_expr ']' { $$ = mk_node("ExprIndex", 2, $1, $3); }
|
||||||
|
@ -1325,6 +1398,8 @@ nonblock_expr
|
||||||
| RETURN expr { $$ = mk_node("ExprRet", 1, $2); }
|
| RETURN expr { $$ = mk_node("ExprRet", 1, $2); }
|
||||||
| BREAK { $$ = mk_node("ExprBreak", 0); }
|
| BREAK { $$ = mk_node("ExprBreak", 0); }
|
||||||
| BREAK lifetime { $$ = mk_node("ExprBreak", 1, $2); }
|
| BREAK lifetime { $$ = mk_node("ExprBreak", 1, $2); }
|
||||||
|
| YIELD { $$ = mk_node("ExprYield", 0); }
|
||||||
|
| YIELD expr { $$ = mk_node("ExprYield", 1, $2); }
|
||||||
| nonblock_expr LARROW expr { $$ = mk_node("ExprInPlace", 2, $1, $3); }
|
| nonblock_expr LARROW expr { $$ = mk_node("ExprInPlace", 2, $1, $3); }
|
||||||
| nonblock_expr '=' expr { $$ = mk_node("ExprAssign", 2, $1, $3); }
|
| nonblock_expr '=' expr { $$ = mk_node("ExprAssign", 2, $1, $3); }
|
||||||
| nonblock_expr SHLEQ expr { $$ = mk_node("ExprAssignShl", 2, $1, $3); }
|
| nonblock_expr SHLEQ expr { $$ = mk_node("ExprAssignShl", 2, $1, $3); }
|
||||||
|
@ -1360,8 +1435,8 @@ nonblock_expr
|
||||||
| DOTDOT expr { $$ = mk_node("ExprRange", 2, mk_none(), $2); }
|
| DOTDOT expr { $$ = mk_node("ExprRange", 2, mk_none(), $2); }
|
||||||
| DOTDOT { $$ = mk_node("ExprRange", 2, mk_none(), mk_none()); }
|
| DOTDOT { $$ = mk_node("ExprRange", 2, mk_none(), mk_none()); }
|
||||||
| nonblock_expr AS ty { $$ = mk_node("ExprCast", 2, $1, $3); }
|
| nonblock_expr AS ty { $$ = mk_node("ExprCast", 2, $1, $3); }
|
||||||
| BOX nonparen_expr { $$ = mk_node("ExprBox", 1, $2); }
|
| nonblock_expr ':' ty { $$ = mk_node("ExprTypeAscr", 2, $1, $3); }
|
||||||
| %prec BOXPLACE BOX '(' maybe_expr ')' nonblock_expr { $$ = mk_node("ExprBox", 2, $3, $5); }
|
| BOX expr { $$ = mk_node("ExprBox", 1, $2); }
|
||||||
| expr_qualified_path
|
| expr_qualified_path
|
||||||
| nonblock_prefix_expr
|
| nonblock_prefix_expr
|
||||||
;
|
;
|
||||||
|
@ -1373,6 +1448,7 @@ expr
|
||||||
| SELF { $$ = mk_node("ExprPath", 1, mk_node("ident", 1, mk_atom("self"))); }
|
| SELF { $$ = mk_node("ExprPath", 1, mk_node("ident", 1, mk_atom("self"))); }
|
||||||
| macro_expr { $$ = mk_node("ExprMac", 1, $1); }
|
| macro_expr { $$ = mk_node("ExprMac", 1, $1); }
|
||||||
| path_expr '{' struct_expr_fields '}' { $$ = mk_node("ExprStruct", 2, $1, $3); }
|
| path_expr '{' struct_expr_fields '}' { $$ = mk_node("ExprStruct", 2, $1, $3); }
|
||||||
|
| expr '?' { $$ = mk_node("ExprTry", 1, $1); }
|
||||||
| expr '.' path_generic_args_with_colons { $$ = mk_node("ExprField", 2, $1, $3); }
|
| expr '.' path_generic_args_with_colons { $$ = mk_node("ExprField", 2, $1, $3); }
|
||||||
| expr '.' LIT_INTEGER { $$ = mk_node("ExprTupleIndex", 1, $1); }
|
| expr '.' LIT_INTEGER { $$ = mk_node("ExprTupleIndex", 1, $1); }
|
||||||
| expr '[' maybe_expr ']' { $$ = mk_node("ExprIndex", 2, $1, $3); }
|
| expr '[' maybe_expr ']' { $$ = mk_node("ExprIndex", 2, $1, $3); }
|
||||||
|
@ -1385,6 +1461,8 @@ expr
|
||||||
| RETURN expr { $$ = mk_node("ExprRet", 1, $2); }
|
| RETURN expr { $$ = mk_node("ExprRet", 1, $2); }
|
||||||
| BREAK { $$ = mk_node("ExprBreak", 0); }
|
| BREAK { $$ = mk_node("ExprBreak", 0); }
|
||||||
| BREAK ident { $$ = mk_node("ExprBreak", 1, $2); }
|
| BREAK ident { $$ = mk_node("ExprBreak", 1, $2); }
|
||||||
|
| YIELD { $$ = mk_node("ExprYield", 0); }
|
||||||
|
| YIELD expr { $$ = mk_node("ExprYield", 1, $2); }
|
||||||
| expr LARROW expr { $$ = mk_node("ExprInPlace", 2, $1, $3); }
|
| expr LARROW expr { $$ = mk_node("ExprInPlace", 2, $1, $3); }
|
||||||
| expr '=' expr { $$ = mk_node("ExprAssign", 2, $1, $3); }
|
| expr '=' expr { $$ = mk_node("ExprAssign", 2, $1, $3); }
|
||||||
| expr SHLEQ expr { $$ = mk_node("ExprAssignShl", 2, $1, $3); }
|
| expr SHLEQ expr { $$ = mk_node("ExprAssignShl", 2, $1, $3); }
|
||||||
|
@ -1420,69 +1498,8 @@ expr
|
||||||
| DOTDOT expr { $$ = mk_node("ExprRange", 2, mk_none(), $2); }
|
| DOTDOT expr { $$ = mk_node("ExprRange", 2, mk_none(), $2); }
|
||||||
| DOTDOT { $$ = mk_node("ExprRange", 2, mk_none(), mk_none()); }
|
| DOTDOT { $$ = mk_node("ExprRange", 2, mk_none(), mk_none()); }
|
||||||
| expr AS ty { $$ = mk_node("ExprCast", 2, $1, $3); }
|
| expr AS ty { $$ = mk_node("ExprCast", 2, $1, $3); }
|
||||||
| BOX nonparen_expr { $$ = mk_node("ExprBox", 1, $2); }
|
| expr ':' ty { $$ = mk_node("ExprTypeAscr", 2, $1, $3); }
|
||||||
| %prec BOXPLACE BOX '(' maybe_expr ')' expr { $$ = mk_node("ExprBox", 2, $3, $5); }
|
| BOX expr { $$ = mk_node("ExprBox", 1, $2); }
|
||||||
| expr_qualified_path
|
|
||||||
| block_expr
|
|
||||||
| block
|
|
||||||
| nonblock_prefix_expr
|
|
||||||
;
|
|
||||||
|
|
||||||
nonparen_expr
|
|
||||||
: lit { $$ = mk_node("ExprLit", 1, $1); }
|
|
||||||
| %prec IDENT
|
|
||||||
path_expr { $$ = mk_node("ExprPath", 1, $1); }
|
|
||||||
| SELF { $$ = mk_node("ExprPath", 1, mk_node("ident", 1, mk_atom("self"))); }
|
|
||||||
| macro_expr { $$ = mk_node("ExprMac", 1, $1); }
|
|
||||||
| path_expr '{' struct_expr_fields '}' { $$ = mk_node("ExprStruct", 2, $1, $3); }
|
|
||||||
| nonparen_expr '.' path_generic_args_with_colons { $$ = mk_node("ExprField", 2, $1, $3); }
|
|
||||||
| nonparen_expr '.' LIT_INTEGER { $$ = mk_node("ExprTupleIndex", 1, $1); }
|
|
||||||
| nonparen_expr '[' maybe_expr ']' { $$ = mk_node("ExprIndex", 2, $1, $3); }
|
|
||||||
| nonparen_expr '(' maybe_exprs ')' { $$ = mk_node("ExprCall", 2, $1, $3); }
|
|
||||||
| '[' vec_expr ']' { $$ = mk_node("ExprVec", 1, $2); }
|
|
||||||
| CONTINUE { $$ = mk_node("ExprAgain", 0); }
|
|
||||||
| CONTINUE ident { $$ = mk_node("ExprAgain", 1, $2); }
|
|
||||||
| RETURN { $$ = mk_node("ExprRet", 0); }
|
|
||||||
| RETURN expr { $$ = mk_node("ExprRet", 1, $2); }
|
|
||||||
| BREAK { $$ = mk_node("ExprBreak", 0); }
|
|
||||||
| BREAK ident { $$ = mk_node("ExprBreak", 1, $2); }
|
|
||||||
| nonparen_expr LARROW nonparen_expr { $$ = mk_node("ExprInPlace", 2, $1, $3); }
|
|
||||||
| nonparen_expr '=' nonparen_expr { $$ = mk_node("ExprAssign", 2, $1, $3); }
|
|
||||||
| nonparen_expr SHLEQ nonparen_expr { $$ = mk_node("ExprAssignShl", 2, $1, $3); }
|
|
||||||
| nonparen_expr SHREQ nonparen_expr { $$ = mk_node("ExprAssignShr", 2, $1, $3); }
|
|
||||||
| nonparen_expr MINUSEQ nonparen_expr { $$ = mk_node("ExprAssignSub", 2, $1, $3); }
|
|
||||||
| nonparen_expr ANDEQ nonparen_expr { $$ = mk_node("ExprAssignBitAnd", 2, $1, $3); }
|
|
||||||
| nonparen_expr OREQ nonparen_expr { $$ = mk_node("ExprAssignBitOr", 2, $1, $3); }
|
|
||||||
| nonparen_expr PLUSEQ nonparen_expr { $$ = mk_node("ExprAssignAdd", 2, $1, $3); }
|
|
||||||
| nonparen_expr STAREQ nonparen_expr { $$ = mk_node("ExprAssignMul", 2, $1, $3); }
|
|
||||||
| nonparen_expr SLASHEQ nonparen_expr { $$ = mk_node("ExprAssignDiv", 2, $1, $3); }
|
|
||||||
| nonparen_expr CARETEQ nonparen_expr { $$ = mk_node("ExprAssignBitXor", 2, $1, $3); }
|
|
||||||
| nonparen_expr PERCENTEQ nonparen_expr { $$ = mk_node("ExprAssignRem", 2, $1, $3); }
|
|
||||||
| nonparen_expr OROR nonparen_expr { $$ = mk_node("ExprBinary", 3, mk_atom("BiOr"), $1, $3); }
|
|
||||||
| nonparen_expr ANDAND nonparen_expr { $$ = mk_node("ExprBinary", 3, mk_atom("BiAnd"), $1, $3); }
|
|
||||||
| nonparen_expr EQEQ nonparen_expr { $$ = mk_node("ExprBinary", 3, mk_atom("BiEq"), $1, $3); }
|
|
||||||
| nonparen_expr NE nonparen_expr { $$ = mk_node("ExprBinary", 3, mk_atom("BiNe"), $1, $3); }
|
|
||||||
| nonparen_expr '<' nonparen_expr { $$ = mk_node("ExprBinary", 3, mk_atom("BiLt"), $1, $3); }
|
|
||||||
| nonparen_expr '>' nonparen_expr { $$ = mk_node("ExprBinary", 3, mk_atom("BiGt"), $1, $3); }
|
|
||||||
| nonparen_expr LE nonparen_expr { $$ = mk_node("ExprBinary", 3, mk_atom("BiLe"), $1, $3); }
|
|
||||||
| nonparen_expr GE nonparen_expr { $$ = mk_node("ExprBinary", 3, mk_atom("BiGe"), $1, $3); }
|
|
||||||
| nonparen_expr '|' nonparen_expr { $$ = mk_node("ExprBinary", 3, mk_atom("BiBitOr"), $1, $3); }
|
|
||||||
| nonparen_expr '^' nonparen_expr { $$ = mk_node("ExprBinary", 3, mk_atom("BiBitXor"), $1, $3); }
|
|
||||||
| nonparen_expr '&' nonparen_expr { $$ = mk_node("ExprBinary", 3, mk_atom("BiBitAnd"), $1, $3); }
|
|
||||||
| nonparen_expr SHL nonparen_expr { $$ = mk_node("ExprBinary", 3, mk_atom("BiShl"), $1, $3); }
|
|
||||||
| nonparen_expr SHR nonparen_expr { $$ = mk_node("ExprBinary", 3, mk_atom("BiShr"), $1, $3); }
|
|
||||||
| nonparen_expr '+' nonparen_expr { $$ = mk_node("ExprBinary", 3, mk_atom("BiAdd"), $1, $3); }
|
|
||||||
| nonparen_expr '-' nonparen_expr { $$ = mk_node("ExprBinary", 3, mk_atom("BiSub"), $1, $3); }
|
|
||||||
| nonparen_expr '*' nonparen_expr { $$ = mk_node("ExprBinary", 3, mk_atom("BiMul"), $1, $3); }
|
|
||||||
| nonparen_expr '/' nonparen_expr { $$ = mk_node("ExprBinary", 3, mk_atom("BiDiv"), $1, $3); }
|
|
||||||
| nonparen_expr '%' nonparen_expr { $$ = mk_node("ExprBinary", 3, mk_atom("BiRem"), $1, $3); }
|
|
||||||
| nonparen_expr DOTDOT { $$ = mk_node("ExprRange", 2, $1, mk_none()); }
|
|
||||||
| nonparen_expr DOTDOT nonparen_expr { $$ = mk_node("ExprRange", 2, $1, $3); }
|
|
||||||
| DOTDOT nonparen_expr { $$ = mk_node("ExprRange", 2, mk_none(), $2); }
|
|
||||||
| DOTDOT { $$ = mk_node("ExprRange", 2, mk_none(), mk_none()); }
|
|
||||||
| nonparen_expr AS ty { $$ = mk_node("ExprCast", 2, $1, $3); }
|
|
||||||
| BOX nonparen_expr { $$ = mk_node("ExprBox", 1, $2); }
|
|
||||||
| %prec BOXPLACE BOX '(' maybe_expr ')' expr { $$ = mk_node("ExprBox", 1, $3, $5); }
|
|
||||||
| expr_qualified_path
|
| expr_qualified_path
|
||||||
| block_expr
|
| block_expr
|
||||||
| block
|
| block
|
||||||
|
@ -1495,6 +1512,7 @@ expr_nostruct
|
||||||
path_expr { $$ = mk_node("ExprPath", 1, $1); }
|
path_expr { $$ = mk_node("ExprPath", 1, $1); }
|
||||||
| SELF { $$ = mk_node("ExprPath", 1, mk_node("ident", 1, mk_atom("self"))); }
|
| SELF { $$ = mk_node("ExprPath", 1, mk_node("ident", 1, mk_atom("self"))); }
|
||||||
| macro_expr { $$ = mk_node("ExprMac", 1, $1); }
|
| macro_expr { $$ = mk_node("ExprMac", 1, $1); }
|
||||||
|
| expr_nostruct '?' { $$ = mk_node("ExprTry", 1, $1); }
|
||||||
| expr_nostruct '.' path_generic_args_with_colons { $$ = mk_node("ExprField", 2, $1, $3); }
|
| expr_nostruct '.' path_generic_args_with_colons { $$ = mk_node("ExprField", 2, $1, $3); }
|
||||||
| expr_nostruct '.' LIT_INTEGER { $$ = mk_node("ExprTupleIndex", 1, $1); }
|
| expr_nostruct '.' LIT_INTEGER { $$ = mk_node("ExprTupleIndex", 1, $1); }
|
||||||
| expr_nostruct '[' maybe_expr ']' { $$ = mk_node("ExprIndex", 2, $1, $3); }
|
| expr_nostruct '[' maybe_expr ']' { $$ = mk_node("ExprIndex", 2, $1, $3); }
|
||||||
|
@ -1507,6 +1525,8 @@ expr_nostruct
|
||||||
| RETURN expr { $$ = mk_node("ExprRet", 1, $2); }
|
| RETURN expr { $$ = mk_node("ExprRet", 1, $2); }
|
||||||
| BREAK { $$ = mk_node("ExprBreak", 0); }
|
| BREAK { $$ = mk_node("ExprBreak", 0); }
|
||||||
| BREAK ident { $$ = mk_node("ExprBreak", 1, $2); }
|
| BREAK ident { $$ = mk_node("ExprBreak", 1, $2); }
|
||||||
|
| YIELD { $$ = mk_node("ExprYield", 0); }
|
||||||
|
| YIELD expr { $$ = mk_node("ExprYield", 1, $2); }
|
||||||
| expr_nostruct LARROW expr_nostruct { $$ = mk_node("ExprInPlace", 2, $1, $3); }
|
| expr_nostruct LARROW expr_nostruct { $$ = mk_node("ExprInPlace", 2, $1, $3); }
|
||||||
| expr_nostruct '=' expr_nostruct { $$ = mk_node("ExprAssign", 2, $1, $3); }
|
| expr_nostruct '=' expr_nostruct { $$ = mk_node("ExprAssign", 2, $1, $3); }
|
||||||
| expr_nostruct SHLEQ expr_nostruct { $$ = mk_node("ExprAssignShl", 2, $1, $3); }
|
| expr_nostruct SHLEQ expr_nostruct { $$ = mk_node("ExprAssignShl", 2, $1, $3); }
|
||||||
|
@ -1542,8 +1562,8 @@ expr_nostruct
|
||||||
| DOTDOT expr_nostruct { $$ = mk_node("ExprRange", 2, mk_none(), $2); }
|
| DOTDOT expr_nostruct { $$ = mk_node("ExprRange", 2, mk_none(), $2); }
|
||||||
| DOTDOT { $$ = mk_node("ExprRange", 2, mk_none(), mk_none()); }
|
| DOTDOT { $$ = mk_node("ExprRange", 2, mk_none(), mk_none()); }
|
||||||
| expr_nostruct AS ty { $$ = mk_node("ExprCast", 2, $1, $3); }
|
| expr_nostruct AS ty { $$ = mk_node("ExprCast", 2, $1, $3); }
|
||||||
| BOX nonparen_expr { $$ = mk_node("ExprBox", 1, $2); }
|
| expr_nostruct ':' ty { $$ = mk_node("ExprTypeAscr", 2, $1, $3); }
|
||||||
| %prec BOXPLACE BOX '(' maybe_expr ')' expr_nostruct { $$ = mk_node("ExprBox", 1, $3, $5); }
|
| BOX expr { $$ = mk_node("ExprBox", 1, $2); }
|
||||||
| expr_qualified_path
|
| expr_qualified_path
|
||||||
| block_expr
|
| block_expr
|
||||||
| block
|
| block
|
||||||
|
@ -1558,7 +1578,6 @@ nonblock_prefix_expr_nostruct
|
||||||
| ANDAND maybe_mut expr_nostruct { $$ = mk_node("ExprAddrOf", 1, mk_node("ExprAddrOf", 2, $2, $3)); }
|
| ANDAND maybe_mut expr_nostruct { $$ = mk_node("ExprAddrOf", 1, mk_node("ExprAddrOf", 2, $2, $3)); }
|
||||||
| lambda_expr_nostruct
|
| lambda_expr_nostruct
|
||||||
| MOVE lambda_expr_nostruct { $$ = $2; }
|
| MOVE lambda_expr_nostruct { $$ = $2; }
|
||||||
| proc_expr_nostruct
|
|
||||||
;
|
;
|
||||||
|
|
||||||
nonblock_prefix_expr
|
nonblock_prefix_expr
|
||||||
|
@ -1569,7 +1588,6 @@ nonblock_prefix_expr
|
||||||
| ANDAND maybe_mut expr { $$ = mk_node("ExprAddrOf", 1, mk_node("ExprAddrOf", 2, $2, $3)); }
|
| ANDAND maybe_mut expr { $$ = mk_node("ExprAddrOf", 1, mk_node("ExprAddrOf", 2, $2, $3)); }
|
||||||
| lambda_expr
|
| lambda_expr
|
||||||
| MOVE lambda_expr { $$ = $2; }
|
| MOVE lambda_expr { $$ = $2; }
|
||||||
| proc_expr
|
|
||||||
;
|
;
|
||||||
|
|
||||||
expr_qualified_path
|
expr_qualified_path
|
||||||
|
@ -1608,41 +1626,40 @@ lambda_expr
|
||||||
: %prec LAMBDA
|
: %prec LAMBDA
|
||||||
OROR ret_ty expr { $$ = mk_node("ExprFnBlock", 3, mk_none(), $2, $3); }
|
OROR ret_ty expr { $$ = mk_node("ExprFnBlock", 3, mk_none(), $2, $3); }
|
||||||
| %prec LAMBDA
|
| %prec LAMBDA
|
||||||
'|' maybe_unboxed_closure_kind '|' ret_ty expr { $$ = mk_node("ExprFnBlock", 3, mk_none(), $4, $5); }
|
'|' '|' ret_ty expr { $$ = mk_node("ExprFnBlock", 3, mk_none(), $3, $4); }
|
||||||
| %prec LAMBDA
|
| %prec LAMBDA
|
||||||
'|' inferrable_params '|' ret_ty expr { $$ = mk_node("ExprFnBlock", 3, $2, $4, $5); }
|
'|' inferrable_params '|' ret_ty expr { $$ = mk_node("ExprFnBlock", 3, $2, $4, $5); }
|
||||||
| %prec LAMBDA
|
| %prec LAMBDA
|
||||||
'|' '&' maybe_mut ':' inferrable_params '|' ret_ty expr { $$ = mk_node("ExprFnBlock", 3, $5, $7, $8); }
|
'|' inferrable_params OROR lambda_expr_no_first_bar { $$ = mk_node("ExprFnBlock", 3, $2, mk_none(), $4); }
|
||||||
|
;
|
||||||
|
|
||||||
|
lambda_expr_no_first_bar
|
||||||
|
: %prec LAMBDA
|
||||||
|
'|' ret_ty expr { $$ = mk_node("ExprFnBlock", 3, mk_none(), $2, $3); }
|
||||||
| %prec LAMBDA
|
| %prec LAMBDA
|
||||||
'|' ':' inferrable_params '|' ret_ty expr { $$ = mk_node("ExprFnBlock", 3, $3, $5, $6); }
|
inferrable_params '|' ret_ty expr { $$ = mk_node("ExprFnBlock", 3, $1, $3, $4); }
|
||||||
|
| %prec LAMBDA
|
||||||
|
inferrable_params OROR lambda_expr_no_first_bar { $$ = mk_node("ExprFnBlock", 3, $1, mk_none(), $3); }
|
||||||
;
|
;
|
||||||
|
|
||||||
lambda_expr_nostruct
|
lambda_expr_nostruct
|
||||||
: %prec LAMBDA
|
: %prec LAMBDA
|
||||||
OROR expr_nostruct { $$ = mk_node("ExprFnBlock", 2, mk_none(), $2); }
|
OROR expr_nostruct { $$ = mk_node("ExprFnBlock", 2, mk_none(), $2); }
|
||||||
| %prec LAMBDA
|
| %prec LAMBDA
|
||||||
'|' maybe_unboxed_closure_kind '|' expr_nostruct { $$ = mk_node("ExprFnBlock", 2, mk_none(), $4); }
|
'|' '|' ret_ty expr_nostruct { $$ = mk_node("ExprFnBlock", 3, mk_none(), $3, $4); }
|
||||||
| %prec LAMBDA
|
| %prec LAMBDA
|
||||||
'|' inferrable_params '|' expr_nostruct { $$ = mk_node("ExprFnBlock", 2, $2, $4); }
|
'|' inferrable_params '|' expr_nostruct { $$ = mk_node("ExprFnBlock", 2, $2, $4); }
|
||||||
| %prec LAMBDA
|
| %prec LAMBDA
|
||||||
'|' '&' maybe_mut ':' inferrable_params '|' expr_nostruct { $$ = mk_node("ExprFnBlock", 2, $5, $7); }
|
'|' inferrable_params OROR lambda_expr_nostruct_no_first_bar { $$ = mk_node("ExprFnBlock", 3, $2, mk_none(), $4); }
|
||||||
| %prec LAMBDA
|
|
||||||
'|' ':' inferrable_params '|' expr_nostruct { $$ = mk_node("ExprFnBlock", 2, $3, $5); }
|
|
||||||
|
|
||||||
;
|
;
|
||||||
|
|
||||||
proc_expr
|
lambda_expr_nostruct_no_first_bar
|
||||||
: %prec LAMBDA
|
: %prec LAMBDA
|
||||||
PROC '(' ')' expr { $$ = mk_node("ExprProc", 2, mk_none(), $4); }
|
'|' ret_ty expr_nostruct { $$ = mk_node("ExprFnBlock", 3, mk_none(), $2, $3); }
|
||||||
| %prec LAMBDA
|
| %prec LAMBDA
|
||||||
PROC '(' inferrable_params ')' expr { $$ = mk_node("ExprProc", 2, $3, $5); }
|
inferrable_params '|' ret_ty expr_nostruct { $$ = mk_node("ExprFnBlock", 3, $1, $3, $4); }
|
||||||
;
|
|
||||||
|
|
||||||
proc_expr_nostruct
|
|
||||||
: %prec LAMBDA
|
|
||||||
PROC '(' ')' expr_nostruct { $$ = mk_node("ExprProc", 2, mk_none(), $4); }
|
|
||||||
| %prec LAMBDA
|
| %prec LAMBDA
|
||||||
PROC '(' inferrable_params ')' expr_nostruct { $$ = mk_node("ExprProc", 2, $3, $5); }
|
inferrable_params OROR lambda_expr_nostruct_no_first_bar { $$ = mk_node("ExprFnBlock", 3, $1, mk_none(), $3); }
|
||||||
;
|
;
|
||||||
|
|
||||||
vec_expr
|
vec_expr
|
||||||
|
@ -1654,6 +1671,7 @@ struct_expr_fields
|
||||||
: field_inits
|
: field_inits
|
||||||
| field_inits ','
|
| field_inits ','
|
||||||
| maybe_field_inits default_field_init { $$ = ext_node($1, 1, $2); }
|
| maybe_field_inits default_field_init { $$ = ext_node($1, 1, $2); }
|
||||||
|
| %empty { $$ = mk_none(); }
|
||||||
;
|
;
|
||||||
|
|
||||||
maybe_field_inits
|
maybe_field_inits
|
||||||
|
@ -1668,7 +1686,9 @@ field_inits
|
||||||
;
|
;
|
||||||
|
|
||||||
field_init
|
field_init
|
||||||
: ident ':' expr { $$ = mk_node("FieldInit", 2, $1, $3); }
|
: ident { $$ = mk_node("FieldInit", 1, $1); }
|
||||||
|
| ident ':' expr { $$ = mk_node("FieldInit", 2, $1, $3); }
|
||||||
|
| LIT_INTEGER ':' expr { $$ = mk_node("FieldInit", 2, mk_atom(yytext), $3); }
|
||||||
;
|
;
|
||||||
|
|
||||||
default_field_init
|
default_field_init
|
||||||
|
@ -1689,10 +1709,18 @@ block_expr
|
||||||
|
|
||||||
full_block_expr
|
full_block_expr
|
||||||
: block_expr
|
: block_expr
|
||||||
| full_block_expr '.' path_generic_args_with_colons %prec IDENT { $$ = mk_node("ExprField", 2, $1, $3); }
|
| block_expr_dot
|
||||||
| full_block_expr '.' path_generic_args_with_colons '[' maybe_expr ']' { $$ = mk_node("ExprIndex", 3, $1, $3, $5); }
|
;
|
||||||
| full_block_expr '.' path_generic_args_with_colons '(' maybe_exprs ')' { $$ = mk_node("ExprCall", 3, $1, $3, $5); }
|
|
||||||
| full_block_expr '.' LIT_INTEGER { $$ = mk_node("ExprTupleIndex", 1, $1); }
|
block_expr_dot
|
||||||
|
: block_expr '.' path_generic_args_with_colons %prec IDENT { $$ = mk_node("ExprField", 2, $1, $3); }
|
||||||
|
| block_expr_dot '.' path_generic_args_with_colons %prec IDENT { $$ = mk_node("ExprField", 2, $1, $3); }
|
||||||
|
| block_expr '.' path_generic_args_with_colons '[' maybe_expr ']' { $$ = mk_node("ExprIndex", 3, $1, $3, $5); }
|
||||||
|
| block_expr_dot '.' path_generic_args_with_colons '[' maybe_expr ']' { $$ = mk_node("ExprIndex", 3, $1, $3, $5); }
|
||||||
|
| block_expr '.' path_generic_args_with_colons '(' maybe_exprs ')' { $$ = mk_node("ExprCall", 3, $1, $3, $5); }
|
||||||
|
| block_expr_dot '.' path_generic_args_with_colons '(' maybe_exprs ')' { $$ = mk_node("ExprCall", 3, $1, $3, $5); }
|
||||||
|
| block_expr '.' LIT_INTEGER { $$ = mk_node("ExprTupleIndex", 1, $1); }
|
||||||
|
| block_expr_dot '.' LIT_INTEGER { $$ = mk_node("ExprTupleIndex", 1, $1); }
|
||||||
;
|
;
|
||||||
|
|
||||||
expr_match
|
expr_match
|
||||||
|
@ -1714,12 +1742,13 @@ match_clause
|
||||||
;
|
;
|
||||||
|
|
||||||
nonblock_match_clause
|
nonblock_match_clause
|
||||||
: maybe_outer_attrs pats_or maybe_guard FAT_ARROW nonblock_expr { $$ = mk_node("Arm", 4, $1, $2, $3, $5); }
|
: maybe_outer_attrs pats_or maybe_guard FAT_ARROW nonblock_expr { $$ = mk_node("ArmNonblock", 4, $1, $2, $3, $5); }
|
||||||
| maybe_outer_attrs pats_or maybe_guard FAT_ARROW full_block_expr { $$ = mk_node("Arm", 4, $1, $2, $3, $5); }
|
| maybe_outer_attrs pats_or maybe_guard FAT_ARROW block_expr_dot { $$ = mk_node("ArmNonblock", 4, $1, $2, $3, $5); }
|
||||||
;
|
;
|
||||||
|
|
||||||
block_match_clause
|
block_match_clause
|
||||||
: maybe_outer_attrs pats_or maybe_guard FAT_ARROW block { $$ = mk_node("Arm", 4, $1, $2, $3, $5); }
|
: maybe_outer_attrs pats_or maybe_guard FAT_ARROW block { $$ = mk_node("ArmBlock", 4, $1, $2, $3, $5); }
|
||||||
|
| maybe_outer_attrs pats_or maybe_guard FAT_ARROW block_expr { $$ = mk_node("ArmBlock", 4, $1, $2, $3, $5); }
|
||||||
;
|
;
|
||||||
|
|
||||||
maybe_guard
|
maybe_guard
|
||||||
|
@ -1796,6 +1825,10 @@ maybe_ident
|
||||||
|
|
||||||
ident
|
ident
|
||||||
: IDENT { $$ = mk_node("ident", 1, mk_atom(yytext)); }
|
: IDENT { $$ = mk_node("ident", 1, mk_atom(yytext)); }
|
||||||
|
// Weak keywords that can be used as identifiers
|
||||||
|
| CATCH { $$ = mk_node("ident", 1, mk_atom(yytext)); }
|
||||||
|
| DEFAULT { $$ = mk_node("ident", 1, mk_atom(yytext)); }
|
||||||
|
| UNION { $$ = mk_node("ident", 1, mk_atom(yytext)); }
|
||||||
;
|
;
|
||||||
|
|
||||||
unpaired_token
|
unpaired_token
|
||||||
|
@ -1836,13 +1869,20 @@ unpaired_token
|
||||||
| LIFETIME { $$ = mk_atom(yytext); }
|
| LIFETIME { $$ = mk_atom(yytext); }
|
||||||
| SELF { $$ = mk_atom(yytext); }
|
| SELF { $$ = mk_atom(yytext); }
|
||||||
| STATIC { $$ = mk_atom(yytext); }
|
| STATIC { $$ = mk_atom(yytext); }
|
||||||
|
| ABSTRACT { $$ = mk_atom(yytext); }
|
||||||
|
| ALIGNOF { $$ = mk_atom(yytext); }
|
||||||
| AS { $$ = mk_atom(yytext); }
|
| AS { $$ = mk_atom(yytext); }
|
||||||
|
| BECOME { $$ = mk_atom(yytext); }
|
||||||
| BREAK { $$ = mk_atom(yytext); }
|
| BREAK { $$ = mk_atom(yytext); }
|
||||||
|
| CATCH { $$ = mk_atom(yytext); }
|
||||||
| CRATE { $$ = mk_atom(yytext); }
|
| CRATE { $$ = mk_atom(yytext); }
|
||||||
|
| DEFAULT { $$ = mk_atom(yytext); }
|
||||||
|
| DO { $$ = mk_atom(yytext); }
|
||||||
| ELSE { $$ = mk_atom(yytext); }
|
| ELSE { $$ = mk_atom(yytext); }
|
||||||
| ENUM { $$ = mk_atom(yytext); }
|
| ENUM { $$ = mk_atom(yytext); }
|
||||||
| EXTERN { $$ = mk_atom(yytext); }
|
| EXTERN { $$ = mk_atom(yytext); }
|
||||||
| FALSE { $$ = mk_atom(yytext); }
|
| FALSE { $$ = mk_atom(yytext); }
|
||||||
|
| FINAL { $$ = mk_atom(yytext); }
|
||||||
| FN { $$ = mk_atom(yytext); }
|
| FN { $$ = mk_atom(yytext); }
|
||||||
| FOR { $$ = mk_atom(yytext); }
|
| FOR { $$ = mk_atom(yytext); }
|
||||||
| IF { $$ = mk_atom(yytext); }
|
| IF { $$ = mk_atom(yytext); }
|
||||||
|
@ -1850,21 +1890,31 @@ unpaired_token
|
||||||
| IN { $$ = mk_atom(yytext); }
|
| IN { $$ = mk_atom(yytext); }
|
||||||
| LET { $$ = mk_atom(yytext); }
|
| LET { $$ = mk_atom(yytext); }
|
||||||
| LOOP { $$ = mk_atom(yytext); }
|
| LOOP { $$ = mk_atom(yytext); }
|
||||||
|
| MACRO { $$ = mk_atom(yytext); }
|
||||||
| MATCH { $$ = mk_atom(yytext); }
|
| MATCH { $$ = mk_atom(yytext); }
|
||||||
| MOD { $$ = mk_atom(yytext); }
|
| MOD { $$ = mk_atom(yytext); }
|
||||||
| MOVE { $$ = mk_atom(yytext); }
|
| MOVE { $$ = mk_atom(yytext); }
|
||||||
| MUT { $$ = mk_atom(yytext); }
|
| MUT { $$ = mk_atom(yytext); }
|
||||||
|
| OFFSETOF { $$ = mk_atom(yytext); }
|
||||||
|
| OVERRIDE { $$ = mk_atom(yytext); }
|
||||||
| PRIV { $$ = mk_atom(yytext); }
|
| PRIV { $$ = mk_atom(yytext); }
|
||||||
| PUB { $$ = mk_atom(yytext); }
|
| PUB { $$ = mk_atom(yytext); }
|
||||||
|
| PURE { $$ = mk_atom(yytext); }
|
||||||
| REF { $$ = mk_atom(yytext); }
|
| REF { $$ = mk_atom(yytext); }
|
||||||
| RETURN { $$ = mk_atom(yytext); }
|
| RETURN { $$ = mk_atom(yytext); }
|
||||||
| STRUCT { $$ = mk_atom(yytext); }
|
| STRUCT { $$ = mk_atom(yytext); }
|
||||||
|
| SIZEOF { $$ = mk_atom(yytext); }
|
||||||
|
| SUPER { $$ = mk_atom(yytext); }
|
||||||
| TRUE { $$ = mk_atom(yytext); }
|
| TRUE { $$ = mk_atom(yytext); }
|
||||||
| TRAIT { $$ = mk_atom(yytext); }
|
| TRAIT { $$ = mk_atom(yytext); }
|
||||||
| TYPE { $$ = mk_atom(yytext); }
|
| TYPE { $$ = mk_atom(yytext); }
|
||||||
|
| UNION { $$ = mk_atom(yytext); }
|
||||||
| UNSAFE { $$ = mk_atom(yytext); }
|
| UNSAFE { $$ = mk_atom(yytext); }
|
||||||
|
| UNSIZED { $$ = mk_atom(yytext); }
|
||||||
| USE { $$ = mk_atom(yytext); }
|
| USE { $$ = mk_atom(yytext); }
|
||||||
|
| VIRTUAL { $$ = mk_atom(yytext); }
|
||||||
| WHILE { $$ = mk_atom(yytext); }
|
| WHILE { $$ = mk_atom(yytext); }
|
||||||
|
| YIELD { $$ = mk_atom(yytext); }
|
||||||
| CONTINUE { $$ = mk_atom(yytext); }
|
| CONTINUE { $$ = mk_atom(yytext); }
|
||||||
| PROC { $$ = mk_atom(yytext); }
|
| PROC { $$ = mk_atom(yytext); }
|
||||||
| BOX { $$ = mk_atom(yytext); }
|
| BOX { $$ = mk_atom(yytext); }
|
||||||
|
|
|
@ -30,6 +30,7 @@ enum Token {
|
||||||
DOTDOT,
|
DOTDOT,
|
||||||
DOTDOTDOT,
|
DOTDOTDOT,
|
||||||
MOD_SEP,
|
MOD_SEP,
|
||||||
|
LARROW,
|
||||||
RARROW,
|
RARROW,
|
||||||
FAT_ARROW,
|
FAT_ARROW,
|
||||||
LIT_BYTE,
|
LIT_BYTE,
|
||||||
|
@ -47,13 +48,20 @@ enum Token {
|
||||||
// keywords
|
// keywords
|
||||||
SELF,
|
SELF,
|
||||||
STATIC,
|
STATIC,
|
||||||
|
ABSTRACT,
|
||||||
|
ALIGNOF,
|
||||||
AS,
|
AS,
|
||||||
|
BECOME,
|
||||||
BREAK,
|
BREAK,
|
||||||
|
CATCH,
|
||||||
CRATE,
|
CRATE,
|
||||||
|
DEFAULT,
|
||||||
|
DO,
|
||||||
ELSE,
|
ELSE,
|
||||||
ENUM,
|
ENUM,
|
||||||
EXTERN,
|
EXTERN,
|
||||||
FALSE,
|
FALSE,
|
||||||
|
FINAL,
|
||||||
FN,
|
FN,
|
||||||
FOR,
|
FOR,
|
||||||
IF,
|
IF,
|
||||||
|
@ -61,21 +69,31 @@ enum Token {
|
||||||
IN,
|
IN,
|
||||||
LET,
|
LET,
|
||||||
LOOP,
|
LOOP,
|
||||||
|
MACRO,
|
||||||
MATCH,
|
MATCH,
|
||||||
MOD,
|
MOD,
|
||||||
MOVE,
|
MOVE,
|
||||||
MUT,
|
MUT,
|
||||||
|
OFFSETOF,
|
||||||
|
OVERRIDE,
|
||||||
PRIV,
|
PRIV,
|
||||||
PUB,
|
PUB,
|
||||||
|
PURE,
|
||||||
REF,
|
REF,
|
||||||
RETURN,
|
RETURN,
|
||||||
|
SIZEOF,
|
||||||
STRUCT,
|
STRUCT,
|
||||||
|
SUPER,
|
||||||
|
UNION,
|
||||||
TRUE,
|
TRUE,
|
||||||
TRAIT,
|
TRAIT,
|
||||||
TYPE,
|
TYPE,
|
||||||
UNSAFE,
|
UNSAFE,
|
||||||
|
UNSIZED,
|
||||||
USE,
|
USE,
|
||||||
|
VIRTUAL,
|
||||||
WHILE,
|
WHILE,
|
||||||
|
YIELD,
|
||||||
CONTINUE,
|
CONTINUE,
|
||||||
PROC,
|
PROC,
|
||||||
BOX,
|
BOX,
|
||||||
|
|
|
@ -269,7 +269,38 @@ impl<T: ?Sized> Box<T> {
|
||||||
#[stable(feature = "box_raw", since = "1.4.0")]
|
#[stable(feature = "box_raw", since = "1.4.0")]
|
||||||
#[inline]
|
#[inline]
|
||||||
pub unsafe fn from_raw(raw: *mut T) -> Self {
|
pub unsafe fn from_raw(raw: *mut T) -> Self {
|
||||||
mem::transmute(raw)
|
Box::from_unique(Unique::new_unchecked(raw))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Constructs a `Box` from a `Unique<T>` pointer.
|
||||||
|
///
|
||||||
|
/// After calling this function, the memory is owned by a `Box` and `T` can
|
||||||
|
/// then be destroyed and released upon drop.
|
||||||
|
///
|
||||||
|
/// # Safety
|
||||||
|
///
|
||||||
|
/// A `Unique<T>` can be safely created via [`Unique::new`] and thus doesn't
|
||||||
|
/// necessarily own the data pointed to nor is the data guaranteed to live
|
||||||
|
/// as long as the pointer.
|
||||||
|
///
|
||||||
|
/// [`Unique::new`]: ../../core/ptr/struct.Unique.html#method.new
|
||||||
|
///
|
||||||
|
/// # Examples
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// #![feature(unique)]
|
||||||
|
///
|
||||||
|
/// fn main() {
|
||||||
|
/// let x = Box::new(5);
|
||||||
|
/// let ptr = Box::into_unique(x);
|
||||||
|
/// let x = unsafe { Box::from_unique(ptr) };
|
||||||
|
/// }
|
||||||
|
/// ```
|
||||||
|
#[unstable(feature = "unique", reason = "needs an RFC to flesh out design",
|
||||||
|
issue = "27730")]
|
||||||
|
#[inline]
|
||||||
|
pub unsafe fn from_unique(u: Unique<T>) -> Self {
|
||||||
|
mem::transmute(u)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Consumes the `Box`, returning the wrapped raw pointer.
|
/// Consumes the `Box`, returning the wrapped raw pointer.
|
||||||
|
@ -295,7 +326,7 @@ impl<T: ?Sized> Box<T> {
|
||||||
#[stable(feature = "box_raw", since = "1.4.0")]
|
#[stable(feature = "box_raw", since = "1.4.0")]
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn into_raw(b: Box<T>) -> *mut T {
|
pub fn into_raw(b: Box<T>) -> *mut T {
|
||||||
unsafe { mem::transmute(b) }
|
Box::into_unique(b).as_ptr()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Consumes the `Box`, returning the wrapped pointer as `Unique<T>`.
|
/// Consumes the `Box`, returning the wrapped pointer as `Unique<T>`.
|
||||||
|
@ -303,13 +334,18 @@ impl<T: ?Sized> Box<T> {
|
||||||
/// After calling this function, the caller is responsible for the
|
/// After calling this function, the caller is responsible for the
|
||||||
/// memory previously managed by the `Box`. In particular, the
|
/// memory previously managed by the `Box`. In particular, the
|
||||||
/// caller should properly destroy `T` and release the memory. The
|
/// caller should properly destroy `T` and release the memory. The
|
||||||
/// proper way to do so is to convert the raw pointer back into a
|
/// proper way to do so is to either convert the `Unique<T>` pointer:
|
||||||
/// `Box` with the [`Box::from_raw`] function.
|
///
|
||||||
|
/// - Into a `Box` with the [`Box::from_unique`] function.
|
||||||
|
///
|
||||||
|
/// - Into a raw pointer and back into a `Box` with the [`Box::from_raw`]
|
||||||
|
/// function.
|
||||||
///
|
///
|
||||||
/// Note: this is an associated function, which means that you have
|
/// Note: this is an associated function, which means that you have
|
||||||
/// to call it as `Box::into_unique(b)` instead of `b.into_unique()`. This
|
/// to call it as `Box::into_unique(b)` instead of `b.into_unique()`. This
|
||||||
/// is so that there is no conflict with a method on the inner type.
|
/// is so that there is no conflict with a method on the inner type.
|
||||||
///
|
///
|
||||||
|
/// [`Box::from_unique`]: struct.Box.html#method.from_unique
|
||||||
/// [`Box::from_raw`]: struct.Box.html#method.from_raw
|
/// [`Box::from_raw`]: struct.Box.html#method.from_raw
|
||||||
///
|
///
|
||||||
/// # Examples
|
/// # Examples
|
||||||
|
|
|
@ -121,6 +121,7 @@
|
||||||
#![feature(unique)]
|
#![feature(unique)]
|
||||||
#![feature(unsize)]
|
#![feature(unsize)]
|
||||||
#![feature(allocator_internals)]
|
#![feature(allocator_internals)]
|
||||||
|
#![feature(on_unimplemented)]
|
||||||
|
|
||||||
#![cfg_attr(not(test), feature(fused, fn_traits, placement_new_protocol, swap_with_slice, i128))]
|
#![cfg_attr(not(test), feature(fused, fn_traits, placement_new_protocol, swap_with_slice, i128))]
|
||||||
#![cfg_attr(test, feature(test, box_heap))]
|
#![cfg_attr(test, feature(test, box_heap))]
|
||||||
|
|
|
@ -1543,6 +1543,7 @@ impl<T: Hash> Hash for Vec<T> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
|
#[rustc_on_unimplemented = "vector indices are of type `usize` or ranges of `usize`"]
|
||||||
impl<T> Index<usize> for Vec<T> {
|
impl<T> Index<usize> for Vec<T> {
|
||||||
type Output = T;
|
type Output = T;
|
||||||
|
|
||||||
|
@ -1554,6 +1555,7 @@ impl<T> Index<usize> for Vec<T> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
|
#[rustc_on_unimplemented = "vector indices are of type `usize` or ranges of `usize`"]
|
||||||
impl<T> IndexMut<usize> for Vec<T> {
|
impl<T> IndexMut<usize> for Vec<T> {
|
||||||
#[inline]
|
#[inline]
|
||||||
fn index_mut(&mut self, index: usize) -> &mut T {
|
fn index_mut(&mut self, index: usize) -> &mut T {
|
||||||
|
@ -1562,8 +1564,8 @@ impl<T> IndexMut<usize> for Vec<T> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
|
#[rustc_on_unimplemented = "vector indices are of type `usize` or ranges of `usize`"]
|
||||||
impl<T> ops::Index<ops::Range<usize>> for Vec<T> {
|
impl<T> ops::Index<ops::Range<usize>> for Vec<T> {
|
||||||
type Output = [T];
|
type Output = [T];
|
||||||
|
|
||||||
|
@ -1572,7 +1574,9 @@ impl<T> ops::Index<ops::Range<usize>> for Vec<T> {
|
||||||
Index::index(&**self, index)
|
Index::index(&**self, index)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
|
#[rustc_on_unimplemented = "vector indices are of type `usize` or ranges of `usize`"]
|
||||||
impl<T> ops::Index<ops::RangeTo<usize>> for Vec<T> {
|
impl<T> ops::Index<ops::RangeTo<usize>> for Vec<T> {
|
||||||
type Output = [T];
|
type Output = [T];
|
||||||
|
|
||||||
|
@ -1581,7 +1585,9 @@ impl<T> ops::Index<ops::RangeTo<usize>> for Vec<T> {
|
||||||
Index::index(&**self, index)
|
Index::index(&**self, index)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
|
#[rustc_on_unimplemented = "vector indices are of type `usize` or ranges of `usize`"]
|
||||||
impl<T> ops::Index<ops::RangeFrom<usize>> for Vec<T> {
|
impl<T> ops::Index<ops::RangeFrom<usize>> for Vec<T> {
|
||||||
type Output = [T];
|
type Output = [T];
|
||||||
|
|
||||||
|
@ -1590,7 +1596,9 @@ impl<T> ops::Index<ops::RangeFrom<usize>> for Vec<T> {
|
||||||
Index::index(&**self, index)
|
Index::index(&**self, index)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
|
#[rustc_on_unimplemented = "vector indices are of type `usize` or ranges of `usize`"]
|
||||||
impl<T> ops::Index<ops::RangeFull> for Vec<T> {
|
impl<T> ops::Index<ops::RangeFull> for Vec<T> {
|
||||||
type Output = [T];
|
type Output = [T];
|
||||||
|
|
||||||
|
@ -1599,7 +1607,9 @@ impl<T> ops::Index<ops::RangeFull> for Vec<T> {
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[unstable(feature = "inclusive_range", reason = "recently added, follows RFC", issue = "28237")]
|
#[unstable(feature = "inclusive_range", reason = "recently added, follows RFC", issue = "28237")]
|
||||||
|
#[rustc_on_unimplemented = "vector indices are of type `usize` or ranges of `usize`"]
|
||||||
impl<T> ops::Index<ops::RangeInclusive<usize>> for Vec<T> {
|
impl<T> ops::Index<ops::RangeInclusive<usize>> for Vec<T> {
|
||||||
type Output = [T];
|
type Output = [T];
|
||||||
|
|
||||||
|
@ -1608,7 +1618,9 @@ impl<T> ops::Index<ops::RangeInclusive<usize>> for Vec<T> {
|
||||||
Index::index(&**self, index)
|
Index::index(&**self, index)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[unstable(feature = "inclusive_range", reason = "recently added, follows RFC", issue = "28237")]
|
#[unstable(feature = "inclusive_range", reason = "recently added, follows RFC", issue = "28237")]
|
||||||
|
#[rustc_on_unimplemented = "vector indices are of type `usize` or ranges of `usize`"]
|
||||||
impl<T> ops::Index<ops::RangeToInclusive<usize>> for Vec<T> {
|
impl<T> ops::Index<ops::RangeToInclusive<usize>> for Vec<T> {
|
||||||
type Output = [T];
|
type Output = [T];
|
||||||
|
|
||||||
|
@ -1619,41 +1631,52 @@ impl<T> ops::Index<ops::RangeToInclusive<usize>> for Vec<T> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
|
#[rustc_on_unimplemented = "vector indices are of type `usize` or ranges of `usize`"]
|
||||||
impl<T> ops::IndexMut<ops::Range<usize>> for Vec<T> {
|
impl<T> ops::IndexMut<ops::Range<usize>> for Vec<T> {
|
||||||
#[inline]
|
#[inline]
|
||||||
fn index_mut(&mut self, index: ops::Range<usize>) -> &mut [T] {
|
fn index_mut(&mut self, index: ops::Range<usize>) -> &mut [T] {
|
||||||
IndexMut::index_mut(&mut **self, index)
|
IndexMut::index_mut(&mut **self, index)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
|
#[rustc_on_unimplemented = "vector indices are of type `usize` or ranges of `usize`"]
|
||||||
impl<T> ops::IndexMut<ops::RangeTo<usize>> for Vec<T> {
|
impl<T> ops::IndexMut<ops::RangeTo<usize>> for Vec<T> {
|
||||||
#[inline]
|
#[inline]
|
||||||
fn index_mut(&mut self, index: ops::RangeTo<usize>) -> &mut [T] {
|
fn index_mut(&mut self, index: ops::RangeTo<usize>) -> &mut [T] {
|
||||||
IndexMut::index_mut(&mut **self, index)
|
IndexMut::index_mut(&mut **self, index)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
|
#[rustc_on_unimplemented = "vector indices are of type `usize` or ranges of `usize`"]
|
||||||
impl<T> ops::IndexMut<ops::RangeFrom<usize>> for Vec<T> {
|
impl<T> ops::IndexMut<ops::RangeFrom<usize>> for Vec<T> {
|
||||||
#[inline]
|
#[inline]
|
||||||
fn index_mut(&mut self, index: ops::RangeFrom<usize>) -> &mut [T] {
|
fn index_mut(&mut self, index: ops::RangeFrom<usize>) -> &mut [T] {
|
||||||
IndexMut::index_mut(&mut **self, index)
|
IndexMut::index_mut(&mut **self, index)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
|
#[rustc_on_unimplemented = "vector indices are of type `usize` or ranges of `usize`"]
|
||||||
impl<T> ops::IndexMut<ops::RangeFull> for Vec<T> {
|
impl<T> ops::IndexMut<ops::RangeFull> for Vec<T> {
|
||||||
#[inline]
|
#[inline]
|
||||||
fn index_mut(&mut self, _index: ops::RangeFull) -> &mut [T] {
|
fn index_mut(&mut self, _index: ops::RangeFull) -> &mut [T] {
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[unstable(feature = "inclusive_range", reason = "recently added, follows RFC", issue = "28237")]
|
#[unstable(feature = "inclusive_range", reason = "recently added, follows RFC", issue = "28237")]
|
||||||
|
#[rustc_on_unimplemented = "vector indices are of type `usize` or ranges of `usize`"]
|
||||||
impl<T> ops::IndexMut<ops::RangeInclusive<usize>> for Vec<T> {
|
impl<T> ops::IndexMut<ops::RangeInclusive<usize>> for Vec<T> {
|
||||||
#[inline]
|
#[inline]
|
||||||
fn index_mut(&mut self, index: ops::RangeInclusive<usize>) -> &mut [T] {
|
fn index_mut(&mut self, index: ops::RangeInclusive<usize>) -> &mut [T] {
|
||||||
IndexMut::index_mut(&mut **self, index)
|
IndexMut::index_mut(&mut **self, index)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[unstable(feature = "inclusive_range", reason = "recently added, follows RFC", issue = "28237")]
|
#[unstable(feature = "inclusive_range", reason = "recently added, follows RFC", issue = "28237")]
|
||||||
|
#[rustc_on_unimplemented = "vector indices are of type `usize` or ranges of `usize`"]
|
||||||
impl<T> ops::IndexMut<ops::RangeToInclusive<usize>> for Vec<T> {
|
impl<T> ops::IndexMut<ops::RangeToInclusive<usize>> for Vec<T> {
|
||||||
#[inline]
|
#[inline]
|
||||||
fn index_mut(&mut self, index: ops::RangeToInclusive<usize>) -> &mut [T] {
|
fn index_mut(&mut self, index: ops::RangeToInclusive<usize>) -> &mut [T] {
|
||||||
|
|
|
@ -19,7 +19,7 @@ libc = { path = "../rustc/libc_shim" }
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
build_helper = { path = "../build_helper" }
|
build_helper = { path = "../build_helper" }
|
||||||
cc = "1.0"
|
cc = "1.0.1"
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
debug = []
|
debug = []
|
||||||
|
|
|
@ -63,15 +63,6 @@ fn main() {
|
||||||
_ => return,
|
_ => return,
|
||||||
};
|
};
|
||||||
|
|
||||||
let compiler = cc::Build::new().get_compiler();
|
|
||||||
// only msvc returns None for ar so unwrap is okay
|
|
||||||
let ar = build_helper::cc2ar(compiler.path(), &target).unwrap();
|
|
||||||
let cflags = compiler.args()
|
|
||||||
.iter()
|
|
||||||
.map(|s| s.to_str().unwrap())
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
.join(" ");
|
|
||||||
|
|
||||||
let mut cmd = Command::new("sh");
|
let mut cmd = Command::new("sh");
|
||||||
cmd.arg(native.src_dir.join("configure")
|
cmd.arg(native.src_dir.join("configure")
|
||||||
.to_str()
|
.to_str()
|
||||||
|
@ -79,8 +70,6 @@ fn main() {
|
||||||
.replace("C:\\", "/c/")
|
.replace("C:\\", "/c/")
|
||||||
.replace("\\", "/"))
|
.replace("\\", "/"))
|
||||||
.current_dir(&native.out_dir)
|
.current_dir(&native.out_dir)
|
||||||
.env("CC", compiler.path())
|
|
||||||
.env("EXTRA_CFLAGS", cflags.clone())
|
|
||||||
// jemalloc generates Makefile deps using GCC's "-MM" flag. This means
|
// jemalloc generates Makefile deps using GCC's "-MM" flag. This means
|
||||||
// that GCC will run the preprocessor, and only the preprocessor, over
|
// that GCC will run the preprocessor, and only the preprocessor, over
|
||||||
// jemalloc's source files. If we don't specify CPPFLAGS, then at least
|
// jemalloc's source files. If we don't specify CPPFLAGS, then at least
|
||||||
|
@ -89,9 +78,7 @@ fn main() {
|
||||||
// passed to GCC, and then GCC won't define the
|
// passed to GCC, and then GCC won't define the
|
||||||
// "__GCC_HAVE_SYNC_COMPARE_AND_SWAP_4" macro that jemalloc needs to
|
// "__GCC_HAVE_SYNC_COMPARE_AND_SWAP_4" macro that jemalloc needs to
|
||||||
// select an atomic operation implementation.
|
// select an atomic operation implementation.
|
||||||
.env("CPPFLAGS", cflags.clone())
|
.env("CPPFLAGS", env::var_os("CFLAGS").unwrap_or_default());
|
||||||
.env("AR", &ar)
|
|
||||||
.env("RANLIB", format!("{} s", ar.display()));
|
|
||||||
|
|
||||||
if target.contains("ios") {
|
if target.contains("ios") {
|
||||||
cmd.arg("--disable-tls");
|
cmd.arg("--disable-tls");
|
||||||
|
|
|
@ -2059,14 +2059,23 @@ pub trait Iterator {
|
||||||
let mut other = other.into_iter();
|
let mut other = other.into_iter();
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
match (self.next(), other.next()) {
|
let x = match self.next() {
|
||||||
(None, None) => return Ordering::Equal,
|
None => if other.next().is_none() {
|
||||||
(None, _ ) => return Ordering::Less,
|
return Ordering::Equal
|
||||||
(_ , None) => return Ordering::Greater,
|
} else {
|
||||||
(Some(x), Some(y)) => match x.cmp(&y) {
|
return Ordering::Less
|
||||||
|
},
|
||||||
|
Some(val) => val,
|
||||||
|
};
|
||||||
|
|
||||||
|
let y = match other.next() {
|
||||||
|
None => return Ordering::Greater,
|
||||||
|
Some(val) => val,
|
||||||
|
};
|
||||||
|
|
||||||
|
match x.cmp(&y) {
|
||||||
Ordering::Equal => (),
|
Ordering::Equal => (),
|
||||||
non_eq => return non_eq,
|
non_eq => return non_eq,
|
||||||
},
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2082,14 +2091,23 @@ pub trait Iterator {
|
||||||
let mut other = other.into_iter();
|
let mut other = other.into_iter();
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
match (self.next(), other.next()) {
|
let x = match self.next() {
|
||||||
(None, None) => return Some(Ordering::Equal),
|
None => if other.next().is_none() {
|
||||||
(None, _ ) => return Some(Ordering::Less),
|
return Some(Ordering::Equal)
|
||||||
(_ , None) => return Some(Ordering::Greater),
|
} else {
|
||||||
(Some(x), Some(y)) => match x.partial_cmp(&y) {
|
return Some(Ordering::Less)
|
||||||
|
},
|
||||||
|
Some(val) => val,
|
||||||
|
};
|
||||||
|
|
||||||
|
let y = match other.next() {
|
||||||
|
None => return Some(Ordering::Greater),
|
||||||
|
Some(val) => val,
|
||||||
|
};
|
||||||
|
|
||||||
|
match x.partial_cmp(&y) {
|
||||||
Some(Ordering::Equal) => (),
|
Some(Ordering::Equal) => (),
|
||||||
non_eq => return non_eq,
|
non_eq => return non_eq,
|
||||||
},
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2105,11 +2123,17 @@ pub trait Iterator {
|
||||||
let mut other = other.into_iter();
|
let mut other = other.into_iter();
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
match (self.next(), other.next()) {
|
let x = match self.next() {
|
||||||
(None, None) => return true,
|
None => return other.next().is_none(),
|
||||||
(None, _) | (_, None) => return false,
|
Some(val) => val,
|
||||||
(Some(x), Some(y)) => if x != y { return false },
|
};
|
||||||
}
|
|
||||||
|
let y = match other.next() {
|
||||||
|
None => return false,
|
||||||
|
Some(val) => val,
|
||||||
|
};
|
||||||
|
|
||||||
|
if x != y { return false }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2124,11 +2148,17 @@ pub trait Iterator {
|
||||||
let mut other = other.into_iter();
|
let mut other = other.into_iter();
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
match (self.next(), other.next()) {
|
let x = match self.next() {
|
||||||
(None, None) => return false,
|
None => return other.next().is_some(),
|
||||||
(None, _) | (_, None) => return true,
|
Some(val) => val,
|
||||||
(Some(x), Some(y)) => if x.ne(&y) { return true },
|
};
|
||||||
}
|
|
||||||
|
let y = match other.next() {
|
||||||
|
None => return true,
|
||||||
|
Some(val) => val,
|
||||||
|
};
|
||||||
|
|
||||||
|
if x != y { return true }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2143,19 +2173,22 @@ pub trait Iterator {
|
||||||
let mut other = other.into_iter();
|
let mut other = other.into_iter();
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
match (self.next(), other.next()) {
|
let x = match self.next() {
|
||||||
(None, None) => return false,
|
None => return other.next().is_some(),
|
||||||
(None, _ ) => return true,
|
Some(val) => val,
|
||||||
(_ , None) => return false,
|
};
|
||||||
(Some(x), Some(y)) => {
|
|
||||||
|
let y = match other.next() {
|
||||||
|
None => return false,
|
||||||
|
Some(val) => val,
|
||||||
|
};
|
||||||
|
|
||||||
match x.partial_cmp(&y) {
|
match x.partial_cmp(&y) {
|
||||||
Some(Ordering::Less) => return true,
|
Some(Ordering::Less) => return true,
|
||||||
Some(Ordering::Equal) => {}
|
Some(Ordering::Equal) => (),
|
||||||
Some(Ordering::Greater) => return false,
|
Some(Ordering::Greater) => return false,
|
||||||
None => return false,
|
None => return false,
|
||||||
}
|
}
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2170,19 +2203,22 @@ pub trait Iterator {
|
||||||
let mut other = other.into_iter();
|
let mut other = other.into_iter();
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
match (self.next(), other.next()) {
|
let x = match self.next() {
|
||||||
(None, None) => return true,
|
None => { other.next(); return true; },
|
||||||
(None, _ ) => return true,
|
Some(val) => val,
|
||||||
(_ , None) => return false,
|
};
|
||||||
(Some(x), Some(y)) => {
|
|
||||||
|
let y = match other.next() {
|
||||||
|
None => return false,
|
||||||
|
Some(val) => val,
|
||||||
|
};
|
||||||
|
|
||||||
match x.partial_cmp(&y) {
|
match x.partial_cmp(&y) {
|
||||||
Some(Ordering::Less) => return true,
|
Some(Ordering::Less) => return true,
|
||||||
Some(Ordering::Equal) => {}
|
Some(Ordering::Equal) => (),
|
||||||
Some(Ordering::Greater) => return false,
|
Some(Ordering::Greater) => return false,
|
||||||
None => return false,
|
None => return false,
|
||||||
}
|
}
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2197,21 +2233,24 @@ pub trait Iterator {
|
||||||
let mut other = other.into_iter();
|
let mut other = other.into_iter();
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
match (self.next(), other.next()) {
|
let x = match self.next() {
|
||||||
(None, None) => return false,
|
None => { other.next(); return false; },
|
||||||
(None, _ ) => return false,
|
Some(val) => val,
|
||||||
(_ , None) => return true,
|
};
|
||||||
(Some(x), Some(y)) => {
|
|
||||||
|
let y = match other.next() {
|
||||||
|
None => return true,
|
||||||
|
Some(val) => val,
|
||||||
|
};
|
||||||
|
|
||||||
match x.partial_cmp(&y) {
|
match x.partial_cmp(&y) {
|
||||||
Some(Ordering::Less) => return false,
|
Some(Ordering::Less) => return false,
|
||||||
Some(Ordering::Equal) => {}
|
Some(Ordering::Equal) => (),
|
||||||
Some(Ordering::Greater) => return true,
|
Some(Ordering::Greater) => return true,
|
||||||
None => return false,
|
None => return false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Determines if the elements of this `Iterator` are lexicographically
|
/// Determines if the elements of this `Iterator` are lexicographically
|
||||||
/// greater than or equal to those of another.
|
/// greater than or equal to those of another.
|
||||||
|
@ -2224,19 +2263,22 @@ pub trait Iterator {
|
||||||
let mut other = other.into_iter();
|
let mut other = other.into_iter();
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
match (self.next(), other.next()) {
|
let x = match self.next() {
|
||||||
(None, None) => return true,
|
None => return other.next().is_none(),
|
||||||
(None, _ ) => return false,
|
Some(val) => val,
|
||||||
(_ , None) => return true,
|
};
|
||||||
(Some(x), Some(y)) => {
|
|
||||||
|
let y = match other.next() {
|
||||||
|
None => return true,
|
||||||
|
Some(val) => val,
|
||||||
|
};
|
||||||
|
|
||||||
match x.partial_cmp(&y) {
|
match x.partial_cmp(&y) {
|
||||||
Some(Ordering::Less) => return false,
|
Some(Ordering::Less) => return false,
|
||||||
Some(Ordering::Equal) => {}
|
Some(Ordering::Equal) => (),
|
||||||
Some(Ordering::Greater) => return true,
|
Some(Ordering::Greater) => return true,
|
||||||
None => return false,
|
None => return false,
|
||||||
}
|
}
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -429,9 +429,11 @@ pub fn align_of_val<T: ?Sized>(val: &T) -> usize {
|
||||||
|
|
||||||
/// Returns whether dropping values of type `T` matters.
|
/// Returns whether dropping values of type `T` matters.
|
||||||
///
|
///
|
||||||
/// This is purely an optimization hint, and may be implemented conservatively.
|
/// This is purely an optimization hint, and may be implemented conservatively:
|
||||||
/// For instance, always returning `true` would be a valid implementation of
|
/// it may return `true` for types that don't actually need to be dropped.
|
||||||
/// this function.
|
/// As such always returning `true` would be a valid implementation of
|
||||||
|
/// this function. However if this function actually returns `false`, then you
|
||||||
|
/// can be certain dropping `T` has no side effect.
|
||||||
///
|
///
|
||||||
/// Low level implementations of things like collections, which need to manually
|
/// Low level implementations of things like collections, which need to manually
|
||||||
/// drop their data, should use this function to avoid unnecessarily
|
/// drop their data, should use this function to avoid unnecessarily
|
||||||
|
@ -836,7 +838,7 @@ pub unsafe fn transmute_copy<T, U>(src: &T) -> U {
|
||||||
///
|
///
|
||||||
/// See the `discriminant` function in this module for more information.
|
/// See the `discriminant` function in this module for more information.
|
||||||
#[stable(feature = "discriminant_value", since = "1.21.0")]
|
#[stable(feature = "discriminant_value", since = "1.21.0")]
|
||||||
pub struct Discriminant<T>(u64, PhantomData<*const T>);
|
pub struct Discriminant<T>(u64, PhantomData<fn() -> T>);
|
||||||
|
|
||||||
// N.B. These trait implementations cannot be derived because we don't want any bounds on T.
|
// N.B. These trait implementations cannot be derived because we don't want any bounds on T.
|
||||||
|
|
||||||
|
|
|
@ -1405,16 +1405,6 @@ impl<'a> DoubleEndedIterator for LinesAny<'a> {
|
||||||
#[allow(deprecated)]
|
#[allow(deprecated)]
|
||||||
impl<'a> FusedIterator for LinesAny<'a> {}
|
impl<'a> FusedIterator for LinesAny<'a> {}
|
||||||
|
|
||||||
/*
|
|
||||||
Section: Comparing strings
|
|
||||||
*/
|
|
||||||
|
|
||||||
/// Bytewise slice equality
|
|
||||||
#[inline]
|
|
||||||
fn eq_slice(a: &str, b: &str) -> bool {
|
|
||||||
a.as_bytes() == b.as_bytes()
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
Section: UTF-8 validation
|
Section: UTF-8 validation
|
||||||
*/
|
*/
|
||||||
|
@ -1590,7 +1580,6 @@ mod traits {
|
||||||
use cmp::Ordering;
|
use cmp::Ordering;
|
||||||
use ops;
|
use ops;
|
||||||
use slice::{self, SliceIndex};
|
use slice::{self, SliceIndex};
|
||||||
use str::eq_slice;
|
|
||||||
|
|
||||||
/// Implements ordering of strings.
|
/// Implements ordering of strings.
|
||||||
///
|
///
|
||||||
|
@ -1611,7 +1600,7 @@ mod traits {
|
||||||
impl PartialEq for str {
|
impl PartialEq for str {
|
||||||
#[inline]
|
#[inline]
|
||||||
fn eq(&self, other: &str) -> bool {
|
fn eq(&self, other: &str) -> bool {
|
||||||
eq_slice(self, other)
|
self.as_bytes() == other.as_bytes()
|
||||||
}
|
}
|
||||||
#[inline]
|
#[inline]
|
||||||
fn ne(&self, other: &str) -> bool { !(*self).eq(other) }
|
fn ne(&self, other: &str) -> bool { !(*self).eq(other) }
|
||||||
|
|
|
@ -121,3 +121,19 @@ fn test_transmute() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
#[allow(dead_code)]
|
||||||
|
fn test_discriminant_send_sync() {
|
||||||
|
enum Regular {
|
||||||
|
A,
|
||||||
|
B(i32)
|
||||||
|
}
|
||||||
|
enum NotSendSync {
|
||||||
|
A(*const i32)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn is_send_sync<T: Send + Sync>() { }
|
||||||
|
|
||||||
|
is_send_sync::<Discriminant<Regular>>();
|
||||||
|
is_send_sync::<Discriminant<NotSendSync>>();
|
||||||
|
}
|
||||||
|
|
|
@ -488,7 +488,7 @@ impl Literal {
|
||||||
pub fn string(string: &str) -> Literal {
|
pub fn string(string: &str) -> Literal {
|
||||||
let mut escaped = String::new();
|
let mut escaped = String::new();
|
||||||
for ch in string.chars() {
|
for ch in string.chars() {
|
||||||
escaped.extend(ch.escape_unicode());
|
escaped.extend(ch.escape_debug());
|
||||||
}
|
}
|
||||||
Literal(token::Literal(token::Lit::Str_(Symbol::intern(&escaped)), None))
|
Literal(token::Literal(token::Lit::Str_(Symbol::intern(&escaped)), None))
|
||||||
}
|
}
|
||||||
|
|
|
@ -15,4 +15,4 @@ doc = false
|
||||||
core = { path = "../libcore" }
|
core = { path = "../libcore" }
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
cc = "1.0"
|
cc = "1.0.1"
|
||||||
|
|
|
@ -65,7 +65,7 @@ use hir::map::DefPathHash;
|
||||||
use hir::{HirId, ItemLocalId};
|
use hir::{HirId, ItemLocalId};
|
||||||
|
|
||||||
use ich::Fingerprint;
|
use ich::Fingerprint;
|
||||||
use ty::{TyCtxt, Instance, InstanceDef, ParamEnvAnd, Ty};
|
use ty::{TyCtxt, Instance, InstanceDef, ParamEnv, ParamEnvAnd, PolyTraitRef, Ty};
|
||||||
use ty::subst::Substs;
|
use ty::subst::Substs;
|
||||||
use rustc_data_structures::stable_hasher::{StableHasher, HashStable};
|
use rustc_data_structures::stable_hasher::{StableHasher, HashStable};
|
||||||
use ich::StableHashingContext;
|
use ich::StableHashingContext;
|
||||||
|
@ -505,6 +505,8 @@ define_dep_nodes!( <'tcx>
|
||||||
[] InstanceSymbolName { instance: Instance<'tcx> },
|
[] InstanceSymbolName { instance: Instance<'tcx> },
|
||||||
[] SpecializationGraph(DefId),
|
[] SpecializationGraph(DefId),
|
||||||
[] ObjectSafety(DefId),
|
[] ObjectSafety(DefId),
|
||||||
|
[] FulfillObligation { param_env: ParamEnv<'tcx>, trait_ref: PolyTraitRef<'tcx> },
|
||||||
|
[] VtableMethods { trait_ref: PolyTraitRef<'tcx> },
|
||||||
|
|
||||||
[] IsCopy { param_env: ParamEnvAnd<'tcx, Ty<'tcx>> },
|
[] IsCopy { param_env: ParamEnvAnd<'tcx, Ty<'tcx>> },
|
||||||
[] IsSized { param_env: ParamEnvAnd<'tcx, Ty<'tcx>> },
|
[] IsSized { param_env: ParamEnvAnd<'tcx, Ty<'tcx>> },
|
||||||
|
|
|
@ -19,7 +19,7 @@ mod safe;
|
||||||
mod serialized;
|
mod serialized;
|
||||||
|
|
||||||
pub use self::dep_tracking_map::{DepTrackingMap, DepTrackingMapConfig};
|
pub use self::dep_tracking_map::{DepTrackingMap, DepTrackingMapConfig};
|
||||||
pub use self::dep_node::{DepNode, DepKind, DepConstructor, WorkProductId};
|
pub use self::dep_node::{DepNode, DepKind, DepConstructor, WorkProductId, label_strs};
|
||||||
pub use self::graph::{DepGraph, WorkProduct, DepNodeIndex, DepNodeColor};
|
pub use self::graph::{DepGraph, WorkProduct, DepNodeIndex, DepNodeColor};
|
||||||
pub use self::prev::PreviousDepGraph;
|
pub use self::prev::PreviousDepGraph;
|
||||||
pub use self::query::DepGraphQuery;
|
pub use self::query::DepGraphQuery;
|
||||||
|
|
|
@ -41,7 +41,7 @@ use super::intravisit::Visitor;
|
||||||
/// - Example: Lifetime resolution, which wants to bring lifetimes declared on the
|
/// - Example: Lifetime resolution, which wants to bring lifetimes declared on the
|
||||||
/// impl into scope while visiting the impl-items, and then back out again.
|
/// impl into scope while visiting the impl-items, and then back out again.
|
||||||
/// - How: Implement `intravisit::Visitor` and override the
|
/// - How: Implement `intravisit::Visitor` and override the
|
||||||
/// `visit_nested_map()` methods to return
|
/// `nested_visit_map()` methods to return
|
||||||
/// `NestedVisitorMap::All`. Walk your crate with
|
/// `NestedVisitorMap::All`. Walk your crate with
|
||||||
/// `intravisit::walk_crate()` invoked on `tcx.hir.krate()`.
|
/// `intravisit::walk_crate()` invoked on `tcx.hir.krate()`.
|
||||||
/// - Pro: Visitor methods for any kind of HIR node, not just item-like things.
|
/// - Pro: Visitor methods for any kind of HIR node, not just item-like things.
|
||||||
|
|
|
@ -705,7 +705,7 @@ impl<'a> LoweringContext<'a> {
|
||||||
let expr = self.lower_body(None, |this| this.lower_expr(expr));
|
let expr = self.lower_body(None, |this| this.lower_expr(expr));
|
||||||
hir::TyTypeof(expr)
|
hir::TyTypeof(expr)
|
||||||
}
|
}
|
||||||
TyKind::TraitObject(ref bounds) => {
|
TyKind::TraitObject(ref bounds, ..) => {
|
||||||
let mut lifetime_bound = None;
|
let mut lifetime_bound = None;
|
||||||
let bounds = bounds.iter().filter_map(|bound| {
|
let bounds = bounds.iter().filter_map(|bound| {
|
||||||
match *bound {
|
match *bound {
|
||||||
|
|
|
@ -61,6 +61,9 @@ for ty::RegionKind {
|
||||||
def_id.hash_stable(hcx, hasher);
|
def_id.hash_stable(hcx, hasher);
|
||||||
name.hash_stable(hcx, hasher);
|
name.hash_stable(hcx, hasher);
|
||||||
}
|
}
|
||||||
|
ty::ReLateBound(db, ty::BrEnv) => {
|
||||||
|
db.depth.hash_stable(hcx, hasher);
|
||||||
|
}
|
||||||
ty::ReEarlyBound(ty::EarlyBoundRegion { def_id, index, name }) => {
|
ty::ReEarlyBound(ty::EarlyBoundRegion { def_id, index, name }) => {
|
||||||
def_id.hash_stable(hcx, hasher);
|
def_id.hash_stable(hcx, hasher);
|
||||||
index.hash_stable(hcx, hasher);
|
index.hash_stable(hcx, hasher);
|
||||||
|
@ -841,3 +844,129 @@ impl_stable_hash_for!(struct ::util::common::ErrorReported {});
|
||||||
impl_stable_hash_for!(tuple_struct ::middle::reachable::ReachableSet {
|
impl_stable_hash_for!(tuple_struct ::middle::reachable::ReachableSet {
|
||||||
reachable_set
|
reachable_set
|
||||||
});
|
});
|
||||||
|
|
||||||
|
impl<'gcx, N> HashStable<StableHashingContext<'gcx>>
|
||||||
|
for traits::Vtable<'gcx, N> where N: HashStable<StableHashingContext<'gcx>> {
|
||||||
|
fn hash_stable<W: StableHasherResult>(&self,
|
||||||
|
hcx: &mut StableHashingContext<'gcx>,
|
||||||
|
hasher: &mut StableHasher<W>) {
|
||||||
|
use traits::Vtable::*;
|
||||||
|
|
||||||
|
mem::discriminant(self).hash_stable(hcx, hasher);
|
||||||
|
|
||||||
|
match self {
|
||||||
|
&VtableImpl(ref table_impl) => table_impl.hash_stable(hcx, hasher),
|
||||||
|
&VtableDefaultImpl(ref table_def_impl) => table_def_impl.hash_stable(hcx, hasher),
|
||||||
|
&VtableParam(ref table_param) => table_param.hash_stable(hcx, hasher),
|
||||||
|
&VtableObject(ref table_obj) => table_obj.hash_stable(hcx, hasher),
|
||||||
|
&VtableBuiltin(ref table_builtin) => table_builtin.hash_stable(hcx, hasher),
|
||||||
|
&VtableClosure(ref table_closure) => table_closure.hash_stable(hcx, hasher),
|
||||||
|
&VtableFnPointer(ref table_fn_pointer) => table_fn_pointer.hash_stable(hcx, hasher),
|
||||||
|
&VtableGenerator(ref table_generator) => table_generator.hash_stable(hcx, hasher),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'gcx, N> HashStable<StableHashingContext<'gcx>>
|
||||||
|
for traits::VtableImplData<'gcx, N> where N: HashStable<StableHashingContext<'gcx>> {
|
||||||
|
fn hash_stable<W: StableHasherResult>(&self,
|
||||||
|
hcx: &mut StableHashingContext<'gcx>,
|
||||||
|
hasher: &mut StableHasher<W>) {
|
||||||
|
let traits::VtableImplData {
|
||||||
|
impl_def_id,
|
||||||
|
substs,
|
||||||
|
ref nested,
|
||||||
|
} = *self;
|
||||||
|
impl_def_id.hash_stable(hcx, hasher);
|
||||||
|
substs.hash_stable(hcx, hasher);
|
||||||
|
nested.hash_stable(hcx, hasher);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'gcx, N> HashStable<StableHashingContext<'gcx>>
|
||||||
|
for traits::VtableDefaultImplData<N> where N: HashStable<StableHashingContext<'gcx>> {
|
||||||
|
fn hash_stable<W: StableHasherResult>(&self,
|
||||||
|
hcx: &mut StableHashingContext<'gcx>,
|
||||||
|
hasher: &mut StableHasher<W>) {
|
||||||
|
let traits::VtableDefaultImplData {
|
||||||
|
trait_def_id,
|
||||||
|
ref nested,
|
||||||
|
} = *self;
|
||||||
|
trait_def_id.hash_stable(hcx, hasher);
|
||||||
|
nested.hash_stable(hcx, hasher);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'gcx, N> HashStable<StableHashingContext<'gcx>>
|
||||||
|
for traits::VtableObjectData<'gcx, N> where N: HashStable<StableHashingContext<'gcx>> {
|
||||||
|
fn hash_stable<W: StableHasherResult>(&self,
|
||||||
|
hcx: &mut StableHashingContext<'gcx>,
|
||||||
|
hasher: &mut StableHasher<W>) {
|
||||||
|
let traits::VtableObjectData {
|
||||||
|
upcast_trait_ref,
|
||||||
|
vtable_base,
|
||||||
|
ref nested,
|
||||||
|
} = *self;
|
||||||
|
upcast_trait_ref.hash_stable(hcx, hasher);
|
||||||
|
vtable_base.hash_stable(hcx, hasher);
|
||||||
|
nested.hash_stable(hcx, hasher);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'gcx, N> HashStable<StableHashingContext<'gcx>>
|
||||||
|
for traits::VtableBuiltinData<N> where N: HashStable<StableHashingContext<'gcx>> {
|
||||||
|
fn hash_stable<W: StableHasherResult>(&self,
|
||||||
|
hcx: &mut StableHashingContext<'gcx>,
|
||||||
|
hasher: &mut StableHasher<W>) {
|
||||||
|
let traits::VtableBuiltinData {
|
||||||
|
ref nested,
|
||||||
|
} = *self;
|
||||||
|
nested.hash_stable(hcx, hasher);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'gcx, N> HashStable<StableHashingContext<'gcx>>
|
||||||
|
for traits::VtableClosureData<'gcx, N> where N: HashStable<StableHashingContext<'gcx>> {
|
||||||
|
fn hash_stable<W: StableHasherResult>(&self,
|
||||||
|
hcx: &mut StableHashingContext<'gcx>,
|
||||||
|
hasher: &mut StableHasher<W>) {
|
||||||
|
let traits::VtableClosureData {
|
||||||
|
closure_def_id,
|
||||||
|
substs,
|
||||||
|
ref nested,
|
||||||
|
} = *self;
|
||||||
|
closure_def_id.hash_stable(hcx, hasher);
|
||||||
|
substs.hash_stable(hcx, hasher);
|
||||||
|
nested.hash_stable(hcx, hasher);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'gcx, N> HashStable<StableHashingContext<'gcx>>
|
||||||
|
for traits::VtableFnPointerData<'gcx, N> where N: HashStable<StableHashingContext<'gcx>> {
|
||||||
|
fn hash_stable<W: StableHasherResult>(&self,
|
||||||
|
hcx: &mut StableHashingContext<'gcx>,
|
||||||
|
hasher: &mut StableHasher<W>) {
|
||||||
|
let traits::VtableFnPointerData {
|
||||||
|
fn_ty,
|
||||||
|
ref nested,
|
||||||
|
} = *self;
|
||||||
|
fn_ty.hash_stable(hcx, hasher);
|
||||||
|
nested.hash_stable(hcx, hasher);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'gcx, N> HashStable<StableHashingContext<'gcx>>
|
||||||
|
for traits::VtableGeneratorData<'gcx, N> where N: HashStable<StableHashingContext<'gcx>> {
|
||||||
|
fn hash_stable<W: StableHasherResult>(&self,
|
||||||
|
hcx: &mut StableHashingContext<'gcx>,
|
||||||
|
hasher: &mut StableHasher<W>) {
|
||||||
|
let traits::VtableGeneratorData {
|
||||||
|
closure_def_id,
|
||||||
|
substs,
|
||||||
|
ref nested,
|
||||||
|
} = *self;
|
||||||
|
closure_def_id.hash_stable(hcx, hasher);
|
||||||
|
substs.hash_stable(hcx, hasher);
|
||||||
|
nested.hash_stable(hcx, hasher);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -106,6 +106,7 @@ pub mod lint;
|
||||||
|
|
||||||
pub mod middle {
|
pub mod middle {
|
||||||
pub mod allocator;
|
pub mod allocator;
|
||||||
|
pub mod borrowck;
|
||||||
pub mod expr_use_visitor;
|
pub mod expr_use_visitor;
|
||||||
pub mod const_val;
|
pub mod const_val;
|
||||||
pub mod cstore;
|
pub mod cstore;
|
||||||
|
|
|
@ -222,6 +222,12 @@ declare_lint! {
|
||||||
"unnecessary use of an `unsafe` block"
|
"unnecessary use of an `unsafe` block"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
declare_lint! {
|
||||||
|
pub UNUSED_MUT,
|
||||||
|
Warn,
|
||||||
|
"detect mut variables which don't need to be mutable"
|
||||||
|
}
|
||||||
|
|
||||||
/// Does nothing as a lint pass, but registers some `Lint`s
|
/// Does nothing as a lint pass, but registers some `Lint`s
|
||||||
/// which are used by other parts of the compiler.
|
/// which are used by other parts of the compiler.
|
||||||
#[derive(Copy, Clone)]
|
#[derive(Copy, Clone)]
|
||||||
|
@ -263,7 +269,8 @@ impl LintPass for HardwiredLints {
|
||||||
PARENTHESIZED_PARAMS_IN_TYPES_AND_MODULES,
|
PARENTHESIZED_PARAMS_IN_TYPES_AND_MODULES,
|
||||||
LATE_BOUND_LIFETIME_ARGUMENTS,
|
LATE_BOUND_LIFETIME_ARGUMENTS,
|
||||||
DEPRECATED,
|
DEPRECATED,
|
||||||
UNUSED_UNSAFE
|
UNUSED_UNSAFE,
|
||||||
|
UNUSED_MUT
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
31
src/librustc/middle/borrowck.rs
Normal file
31
src/librustc/middle/borrowck.rs
Normal file
|
@ -0,0 +1,31 @@
|
||||||
|
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
|
||||||
|
// file at the top-level directory of this distribution and at
|
||||||
|
// http://rust-lang.org/COPYRIGHT.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||||
|
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||||
|
// option. This file may not be copied, modified, or distributed
|
||||||
|
// except according to those terms.
|
||||||
|
|
||||||
|
use ich::StableHashingContext;
|
||||||
|
use hir::HirId;
|
||||||
|
use util::nodemap::FxHashSet;
|
||||||
|
|
||||||
|
use rustc_data_structures::stable_hasher::{HashStable, StableHasher,
|
||||||
|
StableHasherResult};
|
||||||
|
|
||||||
|
pub struct BorrowCheckResult {
|
||||||
|
pub used_mut_nodes: FxHashSet<HirId>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'gcx> HashStable<StableHashingContext<'gcx>> for BorrowCheckResult {
|
||||||
|
fn hash_stable<W: StableHasherResult>(&self,
|
||||||
|
hcx: &mut StableHashingContext<'gcx>,
|
||||||
|
hasher: &mut StableHasher<W>) {
|
||||||
|
let BorrowCheckResult {
|
||||||
|
ref used_mut_nodes,
|
||||||
|
} = *self;
|
||||||
|
used_mut_nodes.hash_stable(hcx, hasher);
|
||||||
|
}
|
||||||
|
}
|
|
@ -336,6 +336,12 @@ struct CollectPrivateImplItemsVisitor<'a, 'tcx: 'a> {
|
||||||
|
|
||||||
impl<'a, 'tcx: 'a> ItemLikeVisitor<'tcx> for CollectPrivateImplItemsVisitor<'a, 'tcx> {
|
impl<'a, 'tcx: 'a> ItemLikeVisitor<'tcx> for CollectPrivateImplItemsVisitor<'a, 'tcx> {
|
||||||
fn visit_item(&mut self, item: &hir::Item) {
|
fn visit_item(&mut self, item: &hir::Item) {
|
||||||
|
// Anything which has custom linkage gets thrown on the worklist no
|
||||||
|
// matter where it is in the crate.
|
||||||
|
if attr::contains_name(&item.attrs, "linkage") {
|
||||||
|
self.worklist.push(item.id);
|
||||||
|
}
|
||||||
|
|
||||||
// We need only trait impls here, not inherent impls, and only non-exported ones
|
// We need only trait impls here, not inherent impls, and only non-exported ones
|
||||||
if let hir::ItemImpl(.., Some(ref trait_ref), _, ref impl_item_refs) = item.node {
|
if let hir::ItemImpl(.., Some(ref trait_ref), _, ref impl_item_refs) = item.node {
|
||||||
if !self.access_levels.is_reachable(item.id) {
|
if !self.access_levels.is_reachable(item.id) {
|
||||||
|
|
|
@ -415,9 +415,11 @@ pub enum BorrowKind {
|
||||||
///////////////////////////////////////////////////////////////////////////
|
///////////////////////////////////////////////////////////////////////////
|
||||||
// Variables and temps
|
// Variables and temps
|
||||||
|
|
||||||
newtype_index!(Local, "_");
|
newtype_index!(Local
|
||||||
|
{
|
||||||
pub const RETURN_POINTER: Local = Local(0);
|
DEBUG_NAME = "_",
|
||||||
|
const RETURN_POINTER = 0,
|
||||||
|
});
|
||||||
|
|
||||||
/// Classifies locals into categories. See `Mir::local_kind`.
|
/// Classifies locals into categories. See `Mir::local_kind`.
|
||||||
#[derive(PartialEq, Eq, Debug)]
|
#[derive(PartialEq, Eq, Debug)]
|
||||||
|
@ -551,7 +553,7 @@ pub struct UpvarDecl {
|
||||||
///////////////////////////////////////////////////////////////////////////
|
///////////////////////////////////////////////////////////////////////////
|
||||||
// BasicBlock
|
// BasicBlock
|
||||||
|
|
||||||
newtype_index!(BasicBlock, "bb");
|
newtype_index!(BasicBlock { DEBUG_NAME = "bb" });
|
||||||
|
|
||||||
///////////////////////////////////////////////////////////////////////////
|
///////////////////////////////////////////////////////////////////////////
|
||||||
// BasicBlockData and Terminator
|
// BasicBlockData and Terminator
|
||||||
|
@ -1131,7 +1133,7 @@ pub type LvalueProjection<'tcx> = Projection<'tcx, Lvalue<'tcx>, Local, Ty<'tcx>
|
||||||
/// and the index is a local.
|
/// and the index is a local.
|
||||||
pub type LvalueElem<'tcx> = ProjectionElem<'tcx, Local, Ty<'tcx>>;
|
pub type LvalueElem<'tcx> = ProjectionElem<'tcx, Local, Ty<'tcx>>;
|
||||||
|
|
||||||
newtype_index!(Field, "field");
|
newtype_index!(Field { DEBUG_NAME = "field" });
|
||||||
|
|
||||||
impl<'tcx> Lvalue<'tcx> {
|
impl<'tcx> Lvalue<'tcx> {
|
||||||
pub fn field(self, f: Field, ty: Ty<'tcx>) -> Lvalue<'tcx> {
|
pub fn field(self, f: Field, ty: Ty<'tcx>) -> Lvalue<'tcx> {
|
||||||
|
@ -1196,8 +1198,11 @@ impl<'tcx> Debug for Lvalue<'tcx> {
|
||||||
///////////////////////////////////////////////////////////////////////////
|
///////////////////////////////////////////////////////////////////////////
|
||||||
// Scopes
|
// Scopes
|
||||||
|
|
||||||
newtype_index!(VisibilityScope, "scope");
|
newtype_index!(VisibilityScope
|
||||||
pub const ARGUMENT_VISIBILITY_SCOPE : VisibilityScope = VisibilityScope(0);
|
{
|
||||||
|
DEBUG_NAME = "scope",
|
||||||
|
const ARGUMENT_VISIBILITY_SCOPE = 0,
|
||||||
|
});
|
||||||
|
|
||||||
#[derive(Clone, Debug, RustcEncodable, RustcDecodable)]
|
#[derive(Clone, Debug, RustcEncodable, RustcDecodable)]
|
||||||
pub struct VisibilityScopeData {
|
pub struct VisibilityScopeData {
|
||||||
|
@ -1522,7 +1527,7 @@ pub struct Constant<'tcx> {
|
||||||
pub literal: Literal<'tcx>,
|
pub literal: Literal<'tcx>,
|
||||||
}
|
}
|
||||||
|
|
||||||
newtype_index!(Promoted, "promoted");
|
newtype_index!(Promoted { DEBUG_NAME = "promoted" });
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
|
#[derive(Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
|
||||||
pub enum Literal<'tcx> {
|
pub enum Literal<'tcx> {
|
||||||
|
|
|
@ -352,7 +352,7 @@ top_level_options!(
|
||||||
actually_rustdoc: bool [TRACKED],
|
actually_rustdoc: bool [TRACKED],
|
||||||
|
|
||||||
// Number of object files/codegen units to produce on the backend
|
// Number of object files/codegen units to produce on the backend
|
||||||
codegen_units: usize [UNTRACKED],
|
cli_forced_codegen_units: Option<usize> [UNTRACKED],
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -505,7 +505,7 @@ pub fn basic_options() -> Options {
|
||||||
unstable_features: UnstableFeatures::Disallow,
|
unstable_features: UnstableFeatures::Disallow,
|
||||||
debug_assertions: true,
|
debug_assertions: true,
|
||||||
actually_rustdoc: false,
|
actually_rustdoc: false,
|
||||||
codegen_units: 1,
|
cli_forced_codegen_units: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -864,8 +864,7 @@ options! {CodegenOptions, CodegenSetter, basic_codegen_options,
|
||||||
build_codegen_options, "C", "codegen",
|
build_codegen_options, "C", "codegen",
|
||||||
CG_OPTIONS, cg_type_desc, cgsetters,
|
CG_OPTIONS, cg_type_desc, cgsetters,
|
||||||
ar: Option<String> = (None, parse_opt_string, [UNTRACKED],
|
ar: Option<String> = (None, parse_opt_string, [UNTRACKED],
|
||||||
"tool to assemble archives with (has no effect currently, \
|
"this option is deprecated and does nothing"),
|
||||||
rustc doesn't use an external archiver)"),
|
|
||||||
linker: Option<String> = (None, parse_opt_string, [UNTRACKED],
|
linker: Option<String> = (None, parse_opt_string, [UNTRACKED],
|
||||||
"system linker to link outputs with"),
|
"system linker to link outputs with"),
|
||||||
link_arg: Vec<String> = (vec![], parse_string_push, [UNTRACKED],
|
link_arg: Vec<String> = (vec![], parse_string_push, [UNTRACKED],
|
||||||
|
@ -1100,6 +1099,8 @@ options! {DebuggingOptions, DebuggingSetter, basic_debugging_options,
|
||||||
"generate a graphical HTML report of time spent in trans and LLVM"),
|
"generate a graphical HTML report of time spent in trans and LLVM"),
|
||||||
thinlto: bool = (false, parse_bool, [TRACKED],
|
thinlto: bool = (false, parse_bool, [TRACKED],
|
||||||
"enable ThinLTO when possible"),
|
"enable ThinLTO when possible"),
|
||||||
|
inline_in_all_cgus: Option<bool> = (None, parse_opt_bool, [TRACKED],
|
||||||
|
"control whether #[inline] functions are in all cgus"),
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn default_lib_output() -> CrateType {
|
pub fn default_lib_output() -> CrateType {
|
||||||
|
@ -1709,48 +1710,6 @@ pub fn build_session_options_and_crate_config(matches: &getopts::Matches)
|
||||||
|
|
||||||
let incremental = debugging_opts.incremental.as_ref().map(|m| PathBuf::from(m));
|
let incremental = debugging_opts.incremental.as_ref().map(|m| PathBuf::from(m));
|
||||||
|
|
||||||
let codegen_units = codegen_units.unwrap_or_else(|| {
|
|
||||||
match opt_level {
|
|
||||||
// If we're compiling at `-O0` then default to 16 codegen units.
|
|
||||||
// The number here shouldn't matter too too much as debug mode
|
|
||||||
// builds don't rely on performance at all, meaning that lost
|
|
||||||
// opportunities for inlining through multiple codegen units is
|
|
||||||
// a non-issue.
|
|
||||||
//
|
|
||||||
// Note that the high number here doesn't mean that we'll be
|
|
||||||
// spawning a large number of threads in parallel. The backend
|
|
||||||
// of rustc contains global rate limiting through the
|
|
||||||
// `jobserver` crate so we'll never overload the system with too
|
|
||||||
// much work, but rather we'll only be optimizing when we're
|
|
||||||
// otherwise cooperating with other instances of rustc.
|
|
||||||
//
|
|
||||||
// Rather the high number here means that we should be able to
|
|
||||||
// keep a lot of idle cpus busy. By ensuring that no codegen
|
|
||||||
// unit takes *too* long to build we'll be guaranteed that all
|
|
||||||
// cpus will finish pretty closely to one another and we should
|
|
||||||
// make relatively optimal use of system resources
|
|
||||||
//
|
|
||||||
// Another note worth mentioning here, however, is that this number
|
|
||||||
// isn't *too* high. When codegen units are increased that means we
|
|
||||||
// currently have to codegen `#[inline]` functions into each codegen
|
|
||||||
// unit, which means the more codegen units we're using the more we
|
|
||||||
// may be generating. In other words, increasing codegen units may
|
|
||||||
// increase the overall work the compiler does. If we don't have
|
|
||||||
// enough cores to make up for this loss then increasing the number
|
|
||||||
// of codegen units could become an overall loss!
|
|
||||||
//
|
|
||||||
// As a result we choose a hopefully conservative value 16, which
|
|
||||||
// should be more than the number of cpus of most hardware compiling
|
|
||||||
// Rust but also not too much for 2-4 core machines to have too much
|
|
||||||
// loss of compile time.
|
|
||||||
OptLevel::No => 16,
|
|
||||||
|
|
||||||
// All other optimization levels default use one codegen unit,
|
|
||||||
// the historical default in Rust for a Long Time.
|
|
||||||
_ => 1,
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
(Options {
|
(Options {
|
||||||
crate_types,
|
crate_types,
|
||||||
optimize: opt_level,
|
optimize: opt_level,
|
||||||
|
@ -1775,7 +1734,7 @@ pub fn build_session_options_and_crate_config(matches: &getopts::Matches)
|
||||||
unstable_features: UnstableFeatures::from_environment(),
|
unstable_features: UnstableFeatures::from_environment(),
|
||||||
debug_assertions,
|
debug_assertions,
|
||||||
actually_rustdoc: false,
|
actually_rustdoc: false,
|
||||||
codegen_units,
|
cli_forced_codegen_units: codegen_units,
|
||||||
},
|
},
|
||||||
cfg)
|
cfg)
|
||||||
}
|
}
|
||||||
|
|
|
@ -54,24 +54,24 @@ pub mod config;
|
||||||
pub mod filesearch;
|
pub mod filesearch;
|
||||||
pub mod search_paths;
|
pub mod search_paths;
|
||||||
|
|
||||||
// Represents the data associated with a compilation
|
/// Represents the data associated with a compilation
|
||||||
// session for a single crate.
|
/// session for a single crate.
|
||||||
pub struct Session {
|
pub struct Session {
|
||||||
pub target: config::Config,
|
pub target: config::Config,
|
||||||
pub host: Target,
|
pub host: Target,
|
||||||
pub opts: config::Options,
|
pub opts: config::Options,
|
||||||
pub parse_sess: ParseSess,
|
pub parse_sess: ParseSess,
|
||||||
// For a library crate, this is always none
|
/// For a library crate, this is always none
|
||||||
pub entry_fn: RefCell<Option<(NodeId, Span)>>,
|
pub entry_fn: RefCell<Option<(NodeId, Span)>>,
|
||||||
pub entry_type: Cell<Option<config::EntryFnType>>,
|
pub entry_type: Cell<Option<config::EntryFnType>>,
|
||||||
pub plugin_registrar_fn: Cell<Option<ast::NodeId>>,
|
pub plugin_registrar_fn: Cell<Option<ast::NodeId>>,
|
||||||
pub derive_registrar_fn: Cell<Option<ast::NodeId>>,
|
pub derive_registrar_fn: Cell<Option<ast::NodeId>>,
|
||||||
pub default_sysroot: Option<PathBuf>,
|
pub default_sysroot: Option<PathBuf>,
|
||||||
// The name of the root source file of the crate, in the local file system.
|
/// The name of the root source file of the crate, in the local file system.
|
||||||
// `None` means that there is no source file.
|
/// `None` means that there is no source file.
|
||||||
pub local_crate_source_file: Option<String>,
|
pub local_crate_source_file: Option<String>,
|
||||||
// The directory the compiler has been executed in plus a flag indicating
|
/// The directory the compiler has been executed in plus a flag indicating
|
||||||
// if the value stored here has been affected by path remapping.
|
/// if the value stored here has been affected by path remapping.
|
||||||
pub working_dir: (String, bool),
|
pub working_dir: (String, bool),
|
||||||
pub lint_store: RefCell<lint::LintStore>,
|
pub lint_store: RefCell<lint::LintStore>,
|
||||||
pub buffered_lints: RefCell<Option<lint::LintBuffer>>,
|
pub buffered_lints: RefCell<Option<lint::LintBuffer>>,
|
||||||
|
@ -83,11 +83,11 @@ pub struct Session {
|
||||||
pub plugin_attributes: RefCell<Vec<(String, AttributeType)>>,
|
pub plugin_attributes: RefCell<Vec<(String, AttributeType)>>,
|
||||||
pub crate_types: RefCell<Vec<config::CrateType>>,
|
pub crate_types: RefCell<Vec<config::CrateType>>,
|
||||||
pub dependency_formats: RefCell<dependency_format::Dependencies>,
|
pub dependency_formats: RefCell<dependency_format::Dependencies>,
|
||||||
// The crate_disambiguator is constructed out of all the `-C metadata`
|
/// The crate_disambiguator is constructed out of all the `-C metadata`
|
||||||
// arguments passed to the compiler. Its value together with the crate-name
|
/// arguments passed to the compiler. Its value together with the crate-name
|
||||||
// forms a unique global identifier for the crate. It is used to allow
|
/// forms a unique global identifier for the crate. It is used to allow
|
||||||
// multiple crates with the same name to coexist. See the
|
/// multiple crates with the same name to coexist. See the
|
||||||
// trans::back::symbol_names module for more information.
|
/// trans::back::symbol_names module for more information.
|
||||||
pub crate_disambiguator: RefCell<Option<Symbol>>,
|
pub crate_disambiguator: RefCell<Option<Symbol>>,
|
||||||
pub features: RefCell<feature_gate::Features>,
|
pub features: RefCell<feature_gate::Features>,
|
||||||
|
|
||||||
|
@ -143,17 +143,17 @@ pub struct Session {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct PerfStats {
|
pub struct PerfStats {
|
||||||
// The accumulated time needed for computing the SVH of the crate
|
/// The accumulated time needed for computing the SVH of the crate
|
||||||
pub svh_time: Cell<Duration>,
|
pub svh_time: Cell<Duration>,
|
||||||
// The accumulated time spent on computing incr. comp. hashes
|
/// The accumulated time spent on computing incr. comp. hashes
|
||||||
pub incr_comp_hashes_time: Cell<Duration>,
|
pub incr_comp_hashes_time: Cell<Duration>,
|
||||||
// The number of incr. comp. hash computations performed
|
/// The number of incr. comp. hash computations performed
|
||||||
pub incr_comp_hashes_count: Cell<u64>,
|
pub incr_comp_hashes_count: Cell<u64>,
|
||||||
// The number of bytes hashed when computing ICH values
|
/// The number of bytes hashed when computing ICH values
|
||||||
pub incr_comp_bytes_hashed: Cell<u64>,
|
pub incr_comp_bytes_hashed: Cell<u64>,
|
||||||
// The accumulated time spent on computing symbol hashes
|
/// The accumulated time spent on computing symbol hashes
|
||||||
pub symbol_hash_time: Cell<Duration>,
|
pub symbol_hash_time: Cell<Duration>,
|
||||||
// The accumulated time spent decoding def path tables from metadata
|
/// The accumulated time spent decoding def path tables from metadata
|
||||||
pub decode_def_path_tables_time: Cell<Duration>,
|
pub decode_def_path_tables_time: Cell<Duration>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -636,6 +636,43 @@ impl Session {
|
||||||
}
|
}
|
||||||
ret
|
ret
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns the number of codegen units that should be used for this
|
||||||
|
/// compilation
|
||||||
|
pub fn codegen_units(&self) -> usize {
|
||||||
|
if let Some(n) = self.opts.cli_forced_codegen_units {
|
||||||
|
return n
|
||||||
|
}
|
||||||
|
if let Some(n) = self.target.target.options.default_codegen_units {
|
||||||
|
return n as usize
|
||||||
|
}
|
||||||
|
|
||||||
|
match self.opts.optimize {
|
||||||
|
// If we're compiling at `-O0` then default to 16 codegen units.
|
||||||
|
// The number here shouldn't matter too too much as debug mode
|
||||||
|
// builds don't rely on performance at all, meaning that lost
|
||||||
|
// opportunities for inlining through multiple codegen units is
|
||||||
|
// a non-issue.
|
||||||
|
//
|
||||||
|
// Note that the high number here doesn't mean that we'll be
|
||||||
|
// spawning a large number of threads in parallel. The backend
|
||||||
|
// of rustc contains global rate limiting through the
|
||||||
|
// `jobserver` crate so we'll never overload the system with too
|
||||||
|
// much work, but rather we'll only be optimizing when we're
|
||||||
|
// otherwise cooperating with other instances of rustc.
|
||||||
|
//
|
||||||
|
// Rather the high number here means that we should be able to
|
||||||
|
// keep a lot of idle cpus busy. By ensuring that no codegen
|
||||||
|
// unit takes *too* long to build we'll be guaranteed that all
|
||||||
|
// cpus will finish pretty closely to one another and we should
|
||||||
|
// make relatively optimal use of system resources
|
||||||
|
config::OptLevel::No => 16,
|
||||||
|
|
||||||
|
// All other optimization levels default use one codegen unit,
|
||||||
|
// the historical default in Rust for a Long Time.
|
||||||
|
_ => 1,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn build_session(sopts: config::Options,
|
pub fn build_session(sopts: config::Options,
|
||||||
|
@ -804,24 +841,24 @@ pub fn build_session_(sopts: config::Options,
|
||||||
/// Holds data on the current incremental compilation session, if there is one.
|
/// Holds data on the current incremental compilation session, if there is one.
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub enum IncrCompSession {
|
pub enum IncrCompSession {
|
||||||
// This is the state the session will be in until the incr. comp. dir is
|
/// This is the state the session will be in until the incr. comp. dir is
|
||||||
// needed.
|
/// needed.
|
||||||
NotInitialized,
|
NotInitialized,
|
||||||
// This is the state during which the session directory is private and can
|
/// This is the state during which the session directory is private and can
|
||||||
// be modified.
|
/// be modified.
|
||||||
Active {
|
Active {
|
||||||
session_directory: PathBuf,
|
session_directory: PathBuf,
|
||||||
lock_file: flock::Lock,
|
lock_file: flock::Lock,
|
||||||
load_dep_graph: bool,
|
load_dep_graph: bool,
|
||||||
},
|
},
|
||||||
// This is the state after the session directory has been finalized. In this
|
/// This is the state after the session directory has been finalized. In this
|
||||||
// state, the contents of the directory must not be modified any more.
|
/// state, the contents of the directory must not be modified any more.
|
||||||
Finalized {
|
Finalized {
|
||||||
session_directory: PathBuf,
|
session_directory: PathBuf,
|
||||||
},
|
},
|
||||||
// This is an error state that is reached when some compilation error has
|
/// This is an error state that is reached when some compilation error has
|
||||||
// occurred. It indicates that the contents of the session directory must
|
/// occurred. It indicates that the contents of the session directory must
|
||||||
// not be used, since they might be invalid.
|
/// not be used, since they might be invalid.
|
||||||
InvalidBecauseOfErrors {
|
InvalidBecauseOfErrors {
|
||||||
session_directory: PathBuf,
|
session_directory: PathBuf,
|
||||||
}
|
}
|
||||||
|
|
|
@ -711,41 +711,105 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
OutputTypeParameterMismatch(ref expected_trait_ref, ref actual_trait_ref, _) => {
|
OutputTypeParameterMismatch(ref found_trait_ref, ref expected_trait_ref, _) => {
|
||||||
|
let found_trait_ref = self.resolve_type_vars_if_possible(&*found_trait_ref);
|
||||||
let expected_trait_ref = self.resolve_type_vars_if_possible(&*expected_trait_ref);
|
let expected_trait_ref = self.resolve_type_vars_if_possible(&*expected_trait_ref);
|
||||||
let actual_trait_ref = self.resolve_type_vars_if_possible(&*actual_trait_ref);
|
if expected_trait_ref.self_ty().references_error() {
|
||||||
if actual_trait_ref.self_ty().references_error() {
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
let expected_trait_ty = expected_trait_ref.self_ty();
|
let found_trait_ty = found_trait_ref.self_ty();
|
||||||
let found_span = expected_trait_ty.ty_to_def_id().and_then(|did| {
|
|
||||||
|
let found_did = found_trait_ty.ty_to_def_id();
|
||||||
|
let found_span = found_did.and_then(|did| {
|
||||||
self.tcx.hir.span_if_local(did)
|
self.tcx.hir.span_if_local(did)
|
||||||
});
|
});
|
||||||
|
|
||||||
let self_ty_count =
|
let found_ty_count =
|
||||||
|
match found_trait_ref.skip_binder().substs.type_at(1).sty {
|
||||||
|
ty::TyTuple(ref tys, _) => tys.len(),
|
||||||
|
_ => 1,
|
||||||
|
};
|
||||||
|
let (expected_tys, expected_ty_count) =
|
||||||
match expected_trait_ref.skip_binder().substs.type_at(1).sty {
|
match expected_trait_ref.skip_binder().substs.type_at(1).sty {
|
||||||
ty::TyTuple(ref tys, _) => tys.len(),
|
ty::TyTuple(ref tys, _) =>
|
||||||
_ => 1,
|
(tys.iter().map(|t| &t.sty).collect(), tys.len()),
|
||||||
|
ref sty => (vec![sty], 1),
|
||||||
};
|
};
|
||||||
let arg_ty_count =
|
if found_ty_count == expected_ty_count {
|
||||||
match actual_trait_ref.skip_binder().substs.type_at(1).sty {
|
|
||||||
ty::TyTuple(ref tys, _) => tys.len(),
|
|
||||||
_ => 1,
|
|
||||||
};
|
|
||||||
if self_ty_count == arg_ty_count {
|
|
||||||
self.report_closure_arg_mismatch(span,
|
self.report_closure_arg_mismatch(span,
|
||||||
found_span,
|
found_span,
|
||||||
expected_trait_ref,
|
found_trait_ref,
|
||||||
actual_trait_ref)
|
expected_trait_ref)
|
||||||
} else {
|
} else {
|
||||||
// Expected `|| { }`, found `|x, y| { }`
|
let expected_tuple = if expected_ty_count == 1 {
|
||||||
// Expected `fn(x) -> ()`, found `|| { }`
|
expected_tys.first().and_then(|t| {
|
||||||
|
if let &&ty::TyTuple(ref tuptys, _) = t {
|
||||||
|
Some(tuptys.len())
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
// FIXME(#44150): Expand this to "N args expected but a N-tuple found."
|
||||||
|
// Type of the 1st expected argument is somehow provided as type of a
|
||||||
|
// found one in that case.
|
||||||
|
//
|
||||||
|
// ```
|
||||||
|
// [1i32, 2, 3].sort_by(|(a, b)| ..)
|
||||||
|
// // ^^^^^^^^
|
||||||
|
// // expected_trait_ref: std::ops::FnMut<(&i32, &i32)>
|
||||||
|
// // found_trait_ref: std::ops::FnMut<(&i32,)>
|
||||||
|
// ```
|
||||||
|
|
||||||
|
let (closure_span, closure_args) = found_did
|
||||||
|
.and_then(|did| self.tcx.hir.get_if_local(did))
|
||||||
|
.and_then(|node| {
|
||||||
|
if let hir::map::NodeExpr(
|
||||||
|
&hir::Expr {
|
||||||
|
node: hir::ExprClosure(_, ref decl, id, span, _),
|
||||||
|
..
|
||||||
|
}) = node
|
||||||
|
{
|
||||||
|
let ty_snips = decl.inputs.iter()
|
||||||
|
.map(|ty| {
|
||||||
|
self.tcx.sess.codemap().span_to_snippet(ty.span).ok()
|
||||||
|
.and_then(|snip| {
|
||||||
|
// filter out dummy spans
|
||||||
|
if snip == "," || snip == "|" {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(snip)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.collect::<Vec<Option<String>>>();
|
||||||
|
|
||||||
|
let body = self.tcx.hir.body(id);
|
||||||
|
let pat_snips = body.arguments.iter()
|
||||||
|
.map(|arg|
|
||||||
|
self.tcx.sess.codemap().span_to_snippet(arg.pat.span).ok())
|
||||||
|
.collect::<Option<Vec<String>>>();
|
||||||
|
|
||||||
|
Some((span, pat_snips, ty_snips))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.map(|(span, pat, ty)| (Some(span), Some((pat, ty))))
|
||||||
|
.unwrap_or((None, None));
|
||||||
|
let closure_args = closure_args.and_then(|(pat, ty)| Some((pat?, ty)));
|
||||||
|
|
||||||
self.report_arg_count_mismatch(
|
self.report_arg_count_mismatch(
|
||||||
span,
|
span,
|
||||||
found_span,
|
closure_span.or(found_span),
|
||||||
arg_ty_count,
|
expected_ty_count,
|
||||||
self_ty_count,
|
expected_tuple,
|
||||||
expected_trait_ty.is_closure()
|
found_ty_count,
|
||||||
|
closure_args,
|
||||||
|
found_trait_ty.is_closure()
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -767,32 +831,97 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
|
||||||
err.emit();
|
err.emit();
|
||||||
}
|
}
|
||||||
|
|
||||||
fn report_arg_count_mismatch(&self,
|
fn report_arg_count_mismatch(
|
||||||
|
&self,
|
||||||
span: Span,
|
span: Span,
|
||||||
found_span: Option<Span>,
|
found_span: Option<Span>,
|
||||||
expected: usize,
|
expected: usize,
|
||||||
|
expected_tuple: Option<usize>,
|
||||||
found: usize,
|
found: usize,
|
||||||
is_closure: bool)
|
closure_args: Option<(Vec<String>, Vec<Option<String>>)>,
|
||||||
-> DiagnosticBuilder<'tcx>
|
is_closure: bool
|
||||||
{
|
) -> DiagnosticBuilder<'tcx> {
|
||||||
let mut err = struct_span_err!(self.tcx.sess, span, E0593,
|
use std::borrow::Cow;
|
||||||
"{} takes {} argument{} but {} argument{} {} required",
|
|
||||||
if is_closure { "closure" } else { "function" },
|
|
||||||
found,
|
|
||||||
if found == 1 { "" } else { "s" },
|
|
||||||
expected,
|
|
||||||
if expected == 1 { "" } else { "s" },
|
|
||||||
if expected == 1 { "is" } else { "are" });
|
|
||||||
|
|
||||||
err.span_label(span, format!("expected {} that takes {} argument{}",
|
let kind = if is_closure { "closure" } else { "function" };
|
||||||
if is_closure { "closure" } else { "function" },
|
|
||||||
expected,
|
let args_str = |n, distinct| format!(
|
||||||
if expected == 1 { "" } else { "s" }));
|
"{} {}argument{}",
|
||||||
if let Some(span) = found_span {
|
n,
|
||||||
err.span_label(span, format!("takes {} argument{}",
|
if distinct && n >= 2 { "distinct " } else { "" },
|
||||||
found,
|
if n == 1 { "" } else { "s" },
|
||||||
if found == 1 { "" } else { "s" }));
|
);
|
||||||
|
|
||||||
|
let expected_str = if let Some(n) = expected_tuple {
|
||||||
|
assert!(expected == 1);
|
||||||
|
if closure_args.as_ref().map(|&(ref pats, _)| pats.len()) == Some(n) {
|
||||||
|
Cow::from("a single tuple as argument")
|
||||||
|
} else {
|
||||||
|
// be verbose when numbers differ
|
||||||
|
Cow::from(format!("a single {}-tuple as argument", n))
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
Cow::from(args_str(expected, false))
|
||||||
|
};
|
||||||
|
|
||||||
|
let found_str = if expected_tuple.is_some() {
|
||||||
|
args_str(found, true)
|
||||||
|
} else {
|
||||||
|
args_str(found, false)
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
let mut err = struct_span_err!(self.tcx.sess, span, E0593,
|
||||||
|
"{} is expected to take {}, but it takes {}",
|
||||||
|
kind,
|
||||||
|
expected_str,
|
||||||
|
found_str,
|
||||||
|
);
|
||||||
|
|
||||||
|
err.span_label(
|
||||||
|
span,
|
||||||
|
format!(
|
||||||
|
"expected {} that takes {}",
|
||||||
|
kind,
|
||||||
|
expected_str,
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
if let Some(span) = found_span {
|
||||||
|
if let (Some(expected_tuple), Some((pats, tys))) = (expected_tuple, closure_args) {
|
||||||
|
if expected_tuple != found || pats.len() != found {
|
||||||
|
err.span_label(span, format!("takes {}", found_str));
|
||||||
|
} else {
|
||||||
|
let sugg = format!(
|
||||||
|
"|({}){}|",
|
||||||
|
pats.join(", "),
|
||||||
|
|
||||||
|
// add type annotations if available
|
||||||
|
if tys.iter().any(|ty| ty.is_some()) {
|
||||||
|
Cow::from(format!(
|
||||||
|
": ({})",
|
||||||
|
tys.into_iter().map(|ty| if let Some(ty) = ty {
|
||||||
|
ty
|
||||||
|
} else {
|
||||||
|
"_".to_string()
|
||||||
|
}).collect::<Vec<String>>().join(", ")
|
||||||
|
))
|
||||||
|
} else {
|
||||||
|
Cow::from("")
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
err.span_suggestion(
|
||||||
|
span,
|
||||||
|
"consider changing the closure to accept a tuple",
|
||||||
|
sugg
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
err.span_label(span, format!("takes {}", found_str));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
err
|
err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -650,13 +650,14 @@ pub fn normalize_and_test_predicates<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
/// Given a trait `trait_ref`, iterates the vtable entries
|
/// Given a trait `trait_ref`, iterates the vtable entries
|
||||||
/// that come from `trait_ref`, including its supertraits.
|
/// that come from `trait_ref`, including its supertraits.
|
||||||
#[inline] // FIXME(#35870) Avoid closures being unexported due to impl Trait.
|
#[inline] // FIXME(#35870) Avoid closures being unexported due to impl Trait.
|
||||||
pub fn get_vtable_methods<'a, 'tcx>(
|
fn vtable_methods<'a, 'tcx>(
|
||||||
tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
trait_ref: ty::PolyTraitRef<'tcx>)
|
trait_ref: ty::PolyTraitRef<'tcx>)
|
||||||
-> impl Iterator<Item=Option<(DefId, &'tcx Substs<'tcx>)>> + 'a
|
-> Rc<Vec<Option<(DefId, &'tcx Substs<'tcx>)>>>
|
||||||
{
|
{
|
||||||
debug!("get_vtable_methods({:?})", trait_ref);
|
debug!("vtable_methods({:?})", trait_ref);
|
||||||
|
|
||||||
|
Rc::new(
|
||||||
supertraits(tcx, trait_ref).flat_map(move |trait_ref| {
|
supertraits(tcx, trait_ref).flat_map(move |trait_ref| {
|
||||||
let trait_methods = tcx.associated_items(trait_ref.def_id())
|
let trait_methods = tcx.associated_items(trait_ref.def_id())
|
||||||
.filter(|item| item.kind == ty::AssociatedKind::Method);
|
.filter(|item| item.kind == ty::AssociatedKind::Method);
|
||||||
|
@ -664,12 +665,12 @@ pub fn get_vtable_methods<'a, 'tcx>(
|
||||||
// Now list each method's DefId and Substs (for within its trait).
|
// Now list each method's DefId and Substs (for within its trait).
|
||||||
// If the method can never be called from this object, produce None.
|
// If the method can never be called from this object, produce None.
|
||||||
trait_methods.map(move |trait_method| {
|
trait_methods.map(move |trait_method| {
|
||||||
debug!("get_vtable_methods: trait_method={:?}", trait_method);
|
debug!("vtable_methods: trait_method={:?}", trait_method);
|
||||||
let def_id = trait_method.def_id;
|
let def_id = trait_method.def_id;
|
||||||
|
|
||||||
// Some methods cannot be called on an object; skip those.
|
// Some methods cannot be called on an object; skip those.
|
||||||
if !tcx.is_vtable_safe_method(trait_ref.def_id(), &trait_method) {
|
if !tcx.is_vtable_safe_method(trait_ref.def_id(), &trait_method) {
|
||||||
debug!("get_vtable_methods: not vtable safe");
|
debug!("vtable_methods: not vtable safe");
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -690,13 +691,14 @@ pub fn get_vtable_methods<'a, 'tcx>(
|
||||||
// do not want to try and trans it, in that case (see #23435).
|
// do not want to try and trans it, in that case (see #23435).
|
||||||
let predicates = tcx.predicates_of(def_id).instantiate_own(tcx, substs);
|
let predicates = tcx.predicates_of(def_id).instantiate_own(tcx, substs);
|
||||||
if !normalize_and_test_predicates(tcx, predicates.predicates) {
|
if !normalize_and_test_predicates(tcx, predicates.predicates) {
|
||||||
debug!("get_vtable_methods: predicates do not hold");
|
debug!("vtable_methods: predicates do not hold");
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
Some((def_id, substs))
|
Some((def_id, substs))
|
||||||
})
|
})
|
||||||
})
|
}).collect()
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'tcx,O> Obligation<'tcx,O> {
|
impl<'tcx,O> Obligation<'tcx,O> {
|
||||||
|
@ -835,6 +837,8 @@ pub fn provide(providers: &mut ty::maps::Providers) {
|
||||||
is_object_safe: object_safety::is_object_safe_provider,
|
is_object_safe: object_safety::is_object_safe_provider,
|
||||||
specialization_graph_of: specialize::specialization_graph_provider,
|
specialization_graph_of: specialize::specialization_graph_provider,
|
||||||
specializes: specialize::specializes,
|
specializes: specialize::specializes,
|
||||||
|
trans_fulfill_obligation: trans::trans_fulfill_obligation,
|
||||||
|
vtable_methods,
|
||||||
..*providers
|
..*providers
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -844,6 +848,8 @@ pub fn provide_extern(providers: &mut ty::maps::Providers) {
|
||||||
is_object_safe: object_safety::is_object_safe_provider,
|
is_object_safe: object_safety::is_object_safe_provider,
|
||||||
specialization_graph_of: specialize::specialization_graph_provider,
|
specialization_graph_of: specialize::specialization_graph_provider,
|
||||||
specializes: specialize::specializes,
|
specializes: specialize::specializes,
|
||||||
|
trans_fulfill_obligation: trans::trans_fulfill_obligation,
|
||||||
|
vtable_methods,
|
||||||
..*providers
|
..*providers
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,41 +17,36 @@ use dep_graph::{DepGraph, DepKind, DepTrackingMap, DepTrackingMapConfig};
|
||||||
use infer::TransNormalize;
|
use infer::TransNormalize;
|
||||||
use std::cell::RefCell;
|
use std::cell::RefCell;
|
||||||
use std::marker::PhantomData;
|
use std::marker::PhantomData;
|
||||||
use syntax::ast;
|
use syntax_pos::DUMMY_SP;
|
||||||
use syntax_pos::Span;
|
|
||||||
use traits::{FulfillmentContext, Obligation, ObligationCause, SelectionContext, Vtable};
|
use traits::{FulfillmentContext, Obligation, ObligationCause, SelectionContext, Vtable};
|
||||||
use ty::{self, Ty, TyCtxt};
|
use ty::{self, Ty, TyCtxt};
|
||||||
use ty::subst::{Subst, Substs};
|
use ty::subst::{Subst, Substs};
|
||||||
use ty::fold::{TypeFoldable, TypeFolder};
|
use ty::fold::{TypeFoldable, TypeFolder};
|
||||||
use util::common::MemoizationMap;
|
use util::common::MemoizationMap;
|
||||||
|
|
||||||
impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> {
|
/// Attempts to resolve an obligation to a vtable.. The result is
|
||||||
/// Attempts to resolve an obligation to a vtable.. The result is
|
/// a shallow vtable resolution -- meaning that we do not
|
||||||
/// a shallow vtable resolution -- meaning that we do not
|
/// (necessarily) resolve all nested obligations on the impl. Note
|
||||||
/// (necessarily) resolve all nested obligations on the impl. Note
|
/// that type check should guarantee to us that all nested
|
||||||
/// that type check should guarantee to us that all nested
|
/// obligations *could be* resolved if we wanted to.
|
||||||
/// obligations *could be* resolved if we wanted to.
|
/// Assumes that this is run after the entire crate has been successfully type-checked.
|
||||||
/// Assumes that this is run after the entire crate has been successfully type-checked.
|
pub fn trans_fulfill_obligation<'a, 'tcx>(ty: TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
pub fn trans_fulfill_obligation(self,
|
(param_env, trait_ref):
|
||||||
span: Span,
|
(ty::ParamEnv<'tcx>, ty::PolyTraitRef<'tcx>))
|
||||||
param_env: ty::ParamEnv<'tcx>,
|
|
||||||
trait_ref: ty::PolyTraitRef<'tcx>)
|
|
||||||
-> Vtable<'tcx, ()>
|
-> Vtable<'tcx, ()>
|
||||||
{
|
{
|
||||||
// Remove any references to regions; this helps improve caching.
|
// Remove any references to regions; this helps improve caching.
|
||||||
let trait_ref = self.erase_regions(&trait_ref);
|
let trait_ref = ty.erase_regions(&trait_ref);
|
||||||
|
|
||||||
self.trans_trait_caches.trait_cache.memoize((param_env, trait_ref), || {
|
|
||||||
debug!("trans::fulfill_obligation(trait_ref={:?}, def_id={:?})",
|
debug!("trans::fulfill_obligation(trait_ref={:?}, def_id={:?})",
|
||||||
(param_env, trait_ref), trait_ref.def_id());
|
(param_env, trait_ref), trait_ref.def_id());
|
||||||
|
|
||||||
// Do the initial selection for the obligation. This yields the
|
// Do the initial selection for the obligation. This yields the
|
||||||
// shallow result we are looking for -- that is, what specific impl.
|
// shallow result we are looking for -- that is, what specific impl.
|
||||||
self.infer_ctxt().enter(|infcx| {
|
ty.infer_ctxt().enter(|infcx| {
|
||||||
let mut selcx = SelectionContext::new(&infcx);
|
let mut selcx = SelectionContext::new(&infcx);
|
||||||
|
|
||||||
let obligation_cause = ObligationCause::misc(span,
|
let obligation_cause = ObligationCause::dummy();
|
||||||
ast::DUMMY_NODE_ID);
|
|
||||||
let obligation = Obligation::new(obligation_cause,
|
let obligation = Obligation::new(obligation_cause,
|
||||||
param_env,
|
param_env,
|
||||||
trait_ref.to_poly_trait_predicate());
|
trait_ref.to_poly_trait_predicate());
|
||||||
|
@ -65,15 +60,12 @@ impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> {
|
||||||
// leading to an ambiguous result. So report this as an
|
// leading to an ambiguous result. So report this as an
|
||||||
// overflow bug, since I believe this is the only case
|
// overflow bug, since I believe this is the only case
|
||||||
// where ambiguity can result.
|
// where ambiguity can result.
|
||||||
debug!("Encountered ambiguity selecting `{:?}` during trans, \
|
bug!("Encountered ambiguity selecting `{:?}` during trans, \
|
||||||
presuming due to overflow",
|
presuming due to overflow",
|
||||||
trait_ref);
|
trait_ref)
|
||||||
self.sess.span_fatal(span,
|
|
||||||
"reached the recursion limit during monomorphization \
|
|
||||||
(selection ambiguity)");
|
|
||||||
}
|
}
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
span_bug!(span, "Encountered error `{:?}` selecting `{:?}` during trans",
|
bug!("Encountered error `{:?}` selecting `{:?}` during trans",
|
||||||
e, trait_ref)
|
e, trait_ref)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -88,14 +80,14 @@ impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> {
|
||||||
debug!("fulfill_obligation: register_predicate_obligation {:?}", predicate);
|
debug!("fulfill_obligation: register_predicate_obligation {:?}", predicate);
|
||||||
fulfill_cx.register_predicate_obligation(&infcx, predicate);
|
fulfill_cx.register_predicate_obligation(&infcx, predicate);
|
||||||
});
|
});
|
||||||
let vtable = infcx.drain_fulfillment_cx_or_panic(span, &mut fulfill_cx, &vtable);
|
let vtable = infcx.drain_fulfillment_cx_or_panic(DUMMY_SP, &mut fulfill_cx, &vtable);
|
||||||
|
|
||||||
info!("Cache miss: {:?} => {:?}", trait_ref, vtable);
|
info!("Cache miss: {:?} => {:?}", trait_ref, vtable);
|
||||||
vtable
|
vtable
|
||||||
})
|
})
|
||||||
})
|
}
|
||||||
}
|
|
||||||
|
|
||||||
|
impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> {
|
||||||
/// Monomorphizes a type from the AST by first applying the in-scope
|
/// Monomorphizes a type from the AST by first applying the in-scope
|
||||||
/// substitutions and then normalizing any associated types.
|
/// substitutions and then normalizing any associated types.
|
||||||
pub fn trans_apply_param_substs<T>(self,
|
pub fn trans_apply_param_substs<T>(self,
|
||||||
|
@ -149,14 +141,12 @@ impl<'a, 'gcx> TypeFolder<'gcx, 'gcx> for AssociatedTypeNormalizer<'a, 'gcx> {
|
||||||
/// Specializes caches used in trans -- in particular, they assume all
|
/// Specializes caches used in trans -- in particular, they assume all
|
||||||
/// types are fully monomorphized and that free regions can be erased.
|
/// types are fully monomorphized and that free regions can be erased.
|
||||||
pub struct TransTraitCaches<'tcx> {
|
pub struct TransTraitCaches<'tcx> {
|
||||||
trait_cache: RefCell<DepTrackingMap<TraitSelectionCache<'tcx>>>,
|
|
||||||
project_cache: RefCell<DepTrackingMap<ProjectionCache<'tcx>>>,
|
project_cache: RefCell<DepTrackingMap<ProjectionCache<'tcx>>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'tcx> TransTraitCaches<'tcx> {
|
impl<'tcx> TransTraitCaches<'tcx> {
|
||||||
pub fn new(graph: DepGraph) -> Self {
|
pub fn new(graph: DepGraph) -> Self {
|
||||||
TransTraitCaches {
|
TransTraitCaches {
|
||||||
trait_cache: RefCell::new(DepTrackingMap::new(graph.clone())),
|
|
||||||
project_cache: RefCell::new(DepTrackingMap::new(graph)),
|
project_cache: RefCell::new(DepTrackingMap::new(graph)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -898,11 +898,6 @@ pub struct GlobalCtxt<'tcx> {
|
||||||
|
|
||||||
pub inhabitedness_cache: RefCell<FxHashMap<Ty<'tcx>, DefIdForest>>,
|
pub inhabitedness_cache: RefCell<FxHashMap<Ty<'tcx>, DefIdForest>>,
|
||||||
|
|
||||||
/// Set of nodes which mark locals as mutable which end up getting used at
|
|
||||||
/// some point. Local variable definitions not in this set can be warned
|
|
||||||
/// about.
|
|
||||||
pub used_mut_nodes: RefCell<NodeSet>,
|
|
||||||
|
|
||||||
/// Caches the results of trait selection. This cache is used
|
/// Caches the results of trait selection. This cache is used
|
||||||
/// for things that do not have to do with the parameters in scope.
|
/// for things that do not have to do with the parameters in scope.
|
||||||
pub selection_cache: traits::SelectionCache<'tcx>,
|
pub selection_cache: traits::SelectionCache<'tcx>,
|
||||||
|
@ -1185,7 +1180,6 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
||||||
rcache: RefCell::new(FxHashMap()),
|
rcache: RefCell::new(FxHashMap()),
|
||||||
normalized_cache: RefCell::new(FxHashMap()),
|
normalized_cache: RefCell::new(FxHashMap()),
|
||||||
inhabitedness_cache: RefCell::new(FxHashMap()),
|
inhabitedness_cache: RefCell::new(FxHashMap()),
|
||||||
used_mut_nodes: RefCell::new(NodeSet()),
|
|
||||||
selection_cache: traits::SelectionCache::new(),
|
selection_cache: traits::SelectionCache::new(),
|
||||||
evaluation_cache: traits::EvaluationCache::new(),
|
evaluation_cache: traits::EvaluationCache::new(),
|
||||||
rvalue_promotable_to_static: RefCell::new(NodeMap()),
|
rvalue_promotable_to_static: RefCell::new(NodeMap()),
|
||||||
|
|
|
@ -13,7 +13,6 @@ use ty::{self, Ty, TypeFoldable, Substs, TyCtxt};
|
||||||
use ty::subst::{Kind, Subst};
|
use ty::subst::{Kind, Subst};
|
||||||
use traits;
|
use traits;
|
||||||
use syntax::abi::Abi;
|
use syntax::abi::Abi;
|
||||||
use syntax::codemap::DUMMY_SP;
|
|
||||||
use util::ppaux;
|
use util::ppaux;
|
||||||
|
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
@ -212,7 +211,7 @@ fn resolve_associated_item<'a, 'tcx>(
|
||||||
def_id, trait_id, rcvr_substs);
|
def_id, trait_id, rcvr_substs);
|
||||||
|
|
||||||
let trait_ref = ty::TraitRef::from_method(tcx, trait_id, rcvr_substs);
|
let trait_ref = ty::TraitRef::from_method(tcx, trait_id, rcvr_substs);
|
||||||
let vtbl = tcx.trans_fulfill_obligation(DUMMY_SP, param_env, ty::Binder(trait_ref));
|
let vtbl = tcx.trans_fulfill_obligation((param_env, ty::Binder(trait_ref)));
|
||||||
|
|
||||||
// Now that we know which impl is being used, we can dispatch to
|
// Now that we know which impl is being used, we can dispatch to
|
||||||
// the actual function:
|
// the actual function:
|
||||||
|
|
|
@ -221,6 +221,12 @@ impl<'tcx> QueryDescription for queries::is_mir_available<'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<'tcx> QueryDescription for queries::trans_fulfill_obligation<'tcx> {
|
||||||
|
fn describe(tcx: TyCtxt, key: (ty::ParamEnv<'tcx>, ty::PolyTraitRef<'tcx>)) -> String {
|
||||||
|
format!("checking if `{}` fulfills its obligations", tcx.item_path_str(key.1.def_id()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl<'tcx> QueryDescription for queries::trait_impls_of<'tcx> {
|
impl<'tcx> QueryDescription for queries::trait_impls_of<'tcx> {
|
||||||
fn describe(tcx: TyCtxt, def_id: DefId) -> String {
|
fn describe(tcx: TyCtxt, def_id: DefId) -> String {
|
||||||
format!("trait impls of `{}`", tcx.item_path_str(def_id))
|
format!("trait impls of `{}`", tcx.item_path_str(def_id))
|
||||||
|
@ -497,6 +503,12 @@ impl<'tcx> QueryDescription for queries::has_clone_closures<'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<'tcx> QueryDescription for queries::vtable_methods<'tcx> {
|
||||||
|
fn describe(tcx: TyCtxt, key: ty::PolyTraitRef<'tcx> ) -> String {
|
||||||
|
format!("finding all methods for trait {}", tcx.item_path_str(key.def_id()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl<'tcx> QueryDescription for queries::has_copy_closures<'tcx> {
|
impl<'tcx> QueryDescription for queries::has_copy_closures<'tcx> {
|
||||||
fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
|
fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
|
||||||
format!("seeing if the crate has enabled `Copy` closures")
|
format!("seeing if the crate has enabled `Copy` closures")
|
||||||
|
|
|
@ -134,6 +134,24 @@ impl Key for (MirSuite, MirPassIndex, DefId) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<'tcx> Key for (ty::ParamEnv<'tcx>, ty::PolyTraitRef<'tcx>) {
|
||||||
|
fn map_crate(&self) -> CrateNum {
|
||||||
|
self.1.def_id().krate
|
||||||
|
}
|
||||||
|
fn default_span(&self, tcx: TyCtxt) -> Span {
|
||||||
|
tcx.def_span(self.1.def_id())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'tcx> Key for ty::PolyTraitRef<'tcx>{
|
||||||
|
fn map_crate(&self) -> CrateNum {
|
||||||
|
self.def_id().krate
|
||||||
|
}
|
||||||
|
fn default_span(&self, tcx: TyCtxt) -> Span {
|
||||||
|
tcx.def_span(self.def_id())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl<'tcx> Key for Ty<'tcx> {
|
impl<'tcx> Key for Ty<'tcx> {
|
||||||
fn map_crate(&self) -> CrateNum {
|
fn map_crate(&self) -> CrateNum {
|
||||||
LOCAL_CRATE
|
LOCAL_CRATE
|
||||||
|
|
|
@ -15,6 +15,7 @@ use hir::def::{Def, Export};
|
||||||
use hir::{self, TraitCandidate, ItemLocalId};
|
use hir::{self, TraitCandidate, ItemLocalId};
|
||||||
use hir::svh::Svh;
|
use hir::svh::Svh;
|
||||||
use lint;
|
use lint;
|
||||||
|
use middle::borrowck::BorrowCheckResult;
|
||||||
use middle::const_val;
|
use middle::const_val;
|
||||||
use middle::cstore::{ExternCrate, LinkagePreference, NativeLibrary,
|
use middle::cstore::{ExternCrate, LinkagePreference, NativeLibrary,
|
||||||
ExternBodyNestedBodies};
|
ExternBodyNestedBodies};
|
||||||
|
@ -30,6 +31,7 @@ use middle::trans::{CodegenUnit, Stats};
|
||||||
use mir;
|
use mir;
|
||||||
use session::CompileResult;
|
use session::CompileResult;
|
||||||
use session::config::OutputFilenames;
|
use session::config::OutputFilenames;
|
||||||
|
use traits::Vtable;
|
||||||
use traits::specialization_graph;
|
use traits::specialization_graph;
|
||||||
use ty::{self, CrateInherentImpls, Ty, TyCtxt};
|
use ty::{self, CrateInherentImpls, Ty, TyCtxt};
|
||||||
use ty::layout::{Layout, LayoutError};
|
use ty::layout::{Layout, LayoutError};
|
||||||
|
@ -182,7 +184,7 @@ define_maps! { <'tcx>
|
||||||
|
|
||||||
[] fn coherent_trait: coherent_trait_dep_node((CrateNum, DefId)) -> (),
|
[] fn coherent_trait: coherent_trait_dep_node((CrateNum, DefId)) -> (),
|
||||||
|
|
||||||
[] fn borrowck: BorrowCheck(DefId) -> (),
|
[] fn borrowck: BorrowCheck(DefId) -> Rc<BorrowCheckResult>,
|
||||||
// FIXME: shouldn't this return a `Result<(), BorrowckErrors>` instead?
|
// FIXME: shouldn't this return a `Result<(), BorrowckErrors>` instead?
|
||||||
[] fn mir_borrowck: MirBorrowCheck(DefId) -> (),
|
[] fn mir_borrowck: MirBorrowCheck(DefId) -> (),
|
||||||
|
|
||||||
|
@ -227,7 +229,11 @@ define_maps! { <'tcx>
|
||||||
[] fn item_body_nested_bodies: ItemBodyNestedBodies(DefId) -> ExternBodyNestedBodies,
|
[] fn item_body_nested_bodies: ItemBodyNestedBodies(DefId) -> ExternBodyNestedBodies,
|
||||||
[] fn const_is_rvalue_promotable_to_static: ConstIsRvaluePromotableToStatic(DefId) -> bool,
|
[] fn const_is_rvalue_promotable_to_static: ConstIsRvaluePromotableToStatic(DefId) -> bool,
|
||||||
[] fn is_mir_available: IsMirAvailable(DefId) -> bool,
|
[] fn is_mir_available: IsMirAvailable(DefId) -> bool,
|
||||||
|
[] fn vtable_methods: vtable_methods_node(ty::PolyTraitRef<'tcx>)
|
||||||
|
-> Rc<Vec<Option<(DefId, &'tcx Substs<'tcx>)>>>,
|
||||||
|
|
||||||
|
[] fn trans_fulfill_obligation: fulfill_obligation_dep_node(
|
||||||
|
(ty::ParamEnv<'tcx>, ty::PolyTraitRef<'tcx>)) -> Vtable<'tcx, ()>,
|
||||||
[] fn trait_impls_of: TraitImpls(DefId) -> Rc<ty::trait_def::TraitImpls>,
|
[] fn trait_impls_of: TraitImpls(DefId) -> Rc<ty::trait_def::TraitImpls>,
|
||||||
[] fn specialization_graph_of: SpecializationGraph(DefId) -> Rc<specialization_graph::Graph>,
|
[] fn specialization_graph_of: SpecializationGraph(DefId) -> Rc<specialization_graph::Graph>,
|
||||||
[] fn is_object_safe: ObjectSafety(DefId) -> bool,
|
[] fn is_object_safe: ObjectSafety(DefId) -> bool,
|
||||||
|
@ -347,6 +353,14 @@ fn type_param_predicates<'tcx>((item_id, param_id): (DefId, DefId)) -> DepConstr
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn fulfill_obligation_dep_node<'tcx>((param_env, trait_ref):
|
||||||
|
(ty::ParamEnv<'tcx>, ty::PolyTraitRef<'tcx>)) -> DepConstructor<'tcx> {
|
||||||
|
DepConstructor::FulfillObligation {
|
||||||
|
param_env,
|
||||||
|
trait_ref
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn coherent_trait_dep_node<'tcx>((_, def_id): (CrateNum, DefId)) -> DepConstructor<'tcx> {
|
fn coherent_trait_dep_node<'tcx>((_, def_id): (CrateNum, DefId)) -> DepConstructor<'tcx> {
|
||||||
DepConstructor::CoherenceCheckTrait(def_id)
|
DepConstructor::CoherenceCheckTrait(def_id)
|
||||||
}
|
}
|
||||||
|
@ -459,3 +473,7 @@ fn collect_and_partition_translation_items_node<'tcx>(_: CrateNum) -> DepConstru
|
||||||
fn output_filenames_node<'tcx>(_: CrateNum) -> DepConstructor<'tcx> {
|
fn output_filenames_node<'tcx>(_: CrateNum) -> DepConstructor<'tcx> {
|
||||||
DepConstructor::OutputFilenames
|
DepConstructor::OutputFilenames
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn vtable_methods_node<'tcx>(trait_ref: ty::PolyTraitRef<'tcx>) -> DepConstructor<'tcx> {
|
||||||
|
DepConstructor::VtableMethods{ trait_ref }
|
||||||
|
}
|
||||||
|
|
|
@ -344,6 +344,52 @@ macro_rules! define_maps {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Ensure that either this query has all green inputs or been executed.
|
||||||
|
/// Executing query::ensure(D) is considered a read of the dep-node D.
|
||||||
|
///
|
||||||
|
/// This function is particularly useful when executing passes for their
|
||||||
|
/// side-effects -- e.g., in order to report errors for erroneous programs.
|
||||||
|
///
|
||||||
|
/// Note: The optimization is only available during incr. comp.
|
||||||
|
pub fn ensure(tcx: TyCtxt<'a, $tcx, 'lcx>, key: $K) -> () {
|
||||||
|
let dep_node = Self::to_dep_node(tcx, &key);
|
||||||
|
|
||||||
|
// Ensuring an "input" or anonymous query makes no sense
|
||||||
|
assert!(!dep_node.kind.is_anon());
|
||||||
|
assert!(!dep_node.kind.is_input());
|
||||||
|
use dep_graph::DepNodeColor;
|
||||||
|
match tcx.dep_graph.node_color(&dep_node) {
|
||||||
|
Some(DepNodeColor::Green(dep_node_index)) => {
|
||||||
|
tcx.dep_graph.read_index(dep_node_index);
|
||||||
|
}
|
||||||
|
Some(DepNodeColor::Red) => {
|
||||||
|
// A DepNodeColor::Red DepNode means that this query was executed
|
||||||
|
// before. We can not call `dep_graph.read()` here as we don't have
|
||||||
|
// the DepNodeIndex. Instead, We call the query again to issue the
|
||||||
|
// appropriate `dep_graph.read()` call. The performance cost this
|
||||||
|
// introduces should be negligible as we'll immediately hit the
|
||||||
|
// in-memory cache.
|
||||||
|
let _ = tcx.$name(key);
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
// Huh
|
||||||
|
if !tcx.dep_graph.is_fully_enabled() {
|
||||||
|
let _ = tcx.$name(key);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
match tcx.dep_graph.try_mark_green(tcx, &dep_node) {
|
||||||
|
Some(dep_node_index) => {
|
||||||
|
debug_assert!(tcx.dep_graph.is_green(dep_node_index));
|
||||||
|
tcx.dep_graph.read_index(dep_node_index);
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
let _ = tcx.$name(key);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn compute_result(tcx: TyCtxt<'a, $tcx, 'lcx>, key: $K) -> $V {
|
fn compute_result(tcx: TyCtxt<'a, $tcx, 'lcx>, key: $K) -> $V {
|
||||||
let provider = tcx.maps.providers[key.map_crate()].$name;
|
let provider = tcx.maps.providers[key.map_crate()].$name;
|
||||||
provider(tcx.global_tcx(), key)
|
provider(tcx.global_tcx(), key)
|
||||||
|
@ -468,8 +514,7 @@ macro_rules! define_maps {
|
||||||
|
|
||||||
define_provider_struct! {
|
define_provider_struct! {
|
||||||
tcx: $tcx,
|
tcx: $tcx,
|
||||||
input: ($(([$($modifiers)*] [$name] [$K] [$V]))*),
|
input: ($(([$($modifiers)*] [$name] [$K] [$V]))*)
|
||||||
output: ()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<$tcx> Copy for Providers<$tcx> {}
|
impl<$tcx> Copy for Providers<$tcx> {}
|
||||||
|
@ -480,78 +525,19 @@ macro_rules! define_maps {
|
||||||
}
|
}
|
||||||
|
|
||||||
macro_rules! define_map_struct {
|
macro_rules! define_map_struct {
|
||||||
// Initial state
|
|
||||||
(tcx: $tcx:tt,
|
(tcx: $tcx:tt,
|
||||||
input: $input:tt) => {
|
input: ($(([$(modifiers:tt)*] [$($attr:tt)*] [$name:ident]))*)) => {
|
||||||
define_map_struct! {
|
|
||||||
tcx: $tcx,
|
|
||||||
input: $input,
|
|
||||||
output: ()
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Final output
|
|
||||||
(tcx: $tcx:tt,
|
|
||||||
input: (),
|
|
||||||
output: ($($output:tt)*)) => {
|
|
||||||
pub struct Maps<$tcx> {
|
pub struct Maps<$tcx> {
|
||||||
providers: IndexVec<CrateNum, Providers<$tcx>>,
|
providers: IndexVec<CrateNum, Providers<$tcx>>,
|
||||||
query_stack: RefCell<Vec<(Span, Query<$tcx>)>>,
|
query_stack: RefCell<Vec<(Span, Query<$tcx>)>>,
|
||||||
$($output)*
|
$($(#[$attr])* $name: RefCell<QueryMap<queries::$name<$tcx>>>,)*
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Field recognized and ready to shift into the output
|
|
||||||
(tcx: $tcx:tt,
|
|
||||||
ready: ([$($pub:tt)*] [$($attr:tt)*] [$name:ident]),
|
|
||||||
input: $input:tt,
|
|
||||||
output: ($($output:tt)*)) => {
|
|
||||||
define_map_struct! {
|
|
||||||
tcx: $tcx,
|
|
||||||
input: $input,
|
|
||||||
output: ($($output)*
|
|
||||||
$(#[$attr])* $($pub)* $name: RefCell<QueryMap<queries::$name<$tcx>>>,)
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// No modifiers left? This is a private item.
|
|
||||||
(tcx: $tcx:tt,
|
|
||||||
input: (([] $attrs:tt $name:tt) $($input:tt)*),
|
|
||||||
output: $output:tt) => {
|
|
||||||
define_map_struct! {
|
|
||||||
tcx: $tcx,
|
|
||||||
ready: ([] $attrs $name),
|
|
||||||
input: ($($input)*),
|
|
||||||
output: $output
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Skip other modifiers
|
|
||||||
(tcx: $tcx:tt,
|
|
||||||
input: (([$other_modifier:tt $($modifiers:tt)*] $($fields:tt)*) $($input:tt)*),
|
|
||||||
output: $output:tt) => {
|
|
||||||
define_map_struct! {
|
|
||||||
tcx: $tcx,
|
|
||||||
input: (([$($modifiers)*] $($fields)*) $($input)*),
|
|
||||||
output: $output
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
macro_rules! define_provider_struct {
|
macro_rules! define_provider_struct {
|
||||||
// Initial state:
|
|
||||||
(tcx: $tcx:tt, input: $input:tt) => {
|
|
||||||
define_provider_struct! {
|
|
||||||
tcx: $tcx,
|
|
||||||
input: $input,
|
|
||||||
output: ()
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Final state:
|
|
||||||
(tcx: $tcx:tt,
|
(tcx: $tcx:tt,
|
||||||
input: (),
|
input: ($(([$($modifiers:tt)*] [$name:ident] [$K:ty] [$R:ty]))*)) => {
|
||||||
output: ($(([$name:ident] [$K:ty] [$R:ty]))*)) => {
|
|
||||||
pub struct Providers<$tcx> {
|
pub struct Providers<$tcx> {
|
||||||
$(pub $name: for<'a> fn(TyCtxt<'a, $tcx, $tcx>, $K) -> $R,)*
|
$(pub $name: for<'a> fn(TyCtxt<'a, $tcx, $tcx>, $K) -> $R,)*
|
||||||
}
|
}
|
||||||
|
@ -566,43 +552,51 @@ macro_rules! define_provider_struct {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// Something ready to shift:
|
|
||||||
(tcx: $tcx:tt,
|
|
||||||
ready: ($name:tt $K:tt $V:tt),
|
|
||||||
input: $input:tt,
|
|
||||||
output: ($($output:tt)*)) => {
|
|
||||||
define_provider_struct! {
|
|
||||||
tcx: $tcx,
|
|
||||||
input: $input,
|
|
||||||
output: ($($output)* ($name $K $V))
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Regular queries produce a `V` only.
|
|
||||||
(tcx: $tcx:tt,
|
|
||||||
input: (([] $name:tt $K:tt $V:tt) $($input:tt)*),
|
|
||||||
output: $output:tt) => {
|
|
||||||
define_provider_struct! {
|
|
||||||
tcx: $tcx,
|
|
||||||
ready: ($name $K $V),
|
|
||||||
input: ($($input)*),
|
|
||||||
output: $output
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Skip modifiers.
|
|
||||||
(tcx: $tcx:tt,
|
|
||||||
input: (([$other_modifier:tt $($modifiers:tt)*] $($fields:tt)*) $($input:tt)*),
|
|
||||||
output: $output:tt) => {
|
|
||||||
define_provider_struct! {
|
|
||||||
tcx: $tcx,
|
|
||||||
input: (([$($modifiers)*] $($fields)*) $($input)*),
|
|
||||||
output: $output
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/// The red/green evaluation system will try to mark a specific DepNode in the
|
||||||
|
/// dependency graph as green by recursively trying to mark the dependencies of
|
||||||
|
/// that DepNode as green. While doing so, it will sometimes encounter a DepNode
|
||||||
|
/// where we don't know if it is red or green and we therefore actually have
|
||||||
|
/// to recompute its value in order to find out. Since the only piece of
|
||||||
|
/// information that we have at that point is the DepNode we are trying to
|
||||||
|
/// re-evaluate, we need some way to re-run a query from just that. This is what
|
||||||
|
/// `force_from_dep_node()` implements.
|
||||||
|
///
|
||||||
|
/// In the general case, a DepNode consists of a DepKind and an opaque
|
||||||
|
/// GUID/fingerprint that will uniquely identify the node. This GUID/fingerprint
|
||||||
|
/// is usually constructed by computing a stable hash of the query-key that the
|
||||||
|
/// DepNode corresponds to. Consequently, it is not in general possible to go
|
||||||
|
/// back from hash to query-key (since hash functions are not reversible). For
|
||||||
|
/// this reason `force_from_dep_node()` is expected to fail from time to time
|
||||||
|
/// because we just cannot find out, from the DepNode alone, what the
|
||||||
|
/// corresponding query-key is and therefore cannot re-run the query.
|
||||||
|
///
|
||||||
|
/// The system deals with this case letting `try_mark_green` fail which forces
|
||||||
|
/// the root query to be re-evaluated.
|
||||||
|
///
|
||||||
|
/// Now, if force_from_dep_node() would always fail, it would be pretty useless.
|
||||||
|
/// Fortunately, we can use some contextual information that will allow us to
|
||||||
|
/// reconstruct query-keys for certain kinds of DepNodes. In particular, we
|
||||||
|
/// enforce by construction that the GUID/fingerprint of certain DepNodes is a
|
||||||
|
/// valid DefPathHash. Since we also always build a huge table that maps every
|
||||||
|
/// DefPathHash in the current codebase to the corresponding DefId, we have
|
||||||
|
/// everything we need to re-run the query.
|
||||||
|
///
|
||||||
|
/// Take the `mir_validated` query as an example. Like many other queries, it
|
||||||
|
/// just has a single parameter: the DefId of the item it will compute the
|
||||||
|
/// validated MIR for. Now, when we call `force_from_dep_node()` on a dep-node
|
||||||
|
/// with kind `MirValidated`, we know that the GUID/fingerprint of the dep-node
|
||||||
|
/// is actually a DefPathHash, and can therefore just look up the corresponding
|
||||||
|
/// DefId in `tcx.def_path_hash_to_def_id`.
|
||||||
|
///
|
||||||
|
/// When you implement a new query, it will likely have a corresponding new
|
||||||
|
/// DepKind, and you'll have to support it here in `force_from_dep_node()`. As
|
||||||
|
/// a rule of thumb, if your query takes a DefId or DefIndex as sole parameter,
|
||||||
|
/// then `force_from_dep_node()` should not fail for it. Otherwise, you can just
|
||||||
|
/// add it to the "We don't have enough information to reconstruct..." group in
|
||||||
|
/// the match below.
|
||||||
pub fn force_from_dep_node<'a, 'gcx, 'lcx>(tcx: TyCtxt<'a, 'gcx, 'lcx>,
|
pub fn force_from_dep_node<'a, 'gcx, 'lcx>(tcx: TyCtxt<'a, 'gcx, 'lcx>,
|
||||||
dep_node: &DepNode)
|
dep_node: &DepNode)
|
||||||
-> bool {
|
-> bool {
|
||||||
|
@ -687,16 +681,16 @@ pub fn force_from_dep_node<'a, 'gcx, 'lcx>(tcx: TyCtxt<'a, 'gcx, 'lcx>,
|
||||||
DepKind::Hir |
|
DepKind::Hir |
|
||||||
|
|
||||||
// This are anonymous nodes
|
// This are anonymous nodes
|
||||||
|
DepKind::TraitSelect |
|
||||||
|
|
||||||
|
// We don't have enough information to reconstruct the query key of
|
||||||
|
// these
|
||||||
DepKind::IsCopy |
|
DepKind::IsCopy |
|
||||||
DepKind::IsSized |
|
DepKind::IsSized |
|
||||||
DepKind::IsFreeze |
|
DepKind::IsFreeze |
|
||||||
DepKind::NeedsDrop |
|
DepKind::NeedsDrop |
|
||||||
DepKind::Layout |
|
DepKind::Layout |
|
||||||
DepKind::TraitSelect |
|
|
||||||
DepKind::ConstEval |
|
DepKind::ConstEval |
|
||||||
|
|
||||||
// We don't have enough information to reconstruct the query key of
|
|
||||||
// these
|
|
||||||
DepKind::InstanceSymbolName |
|
DepKind::InstanceSymbolName |
|
||||||
DepKind::MirShim |
|
DepKind::MirShim |
|
||||||
DepKind::BorrowCheckKrate |
|
DepKind::BorrowCheckKrate |
|
||||||
|
@ -705,6 +699,8 @@ pub fn force_from_dep_node<'a, 'gcx, 'lcx>(tcx: TyCtxt<'a, 'gcx, 'lcx>,
|
||||||
DepKind::TypeParamPredicates |
|
DepKind::TypeParamPredicates |
|
||||||
DepKind::CodegenUnit |
|
DepKind::CodegenUnit |
|
||||||
DepKind::CompileCodegenUnit |
|
DepKind::CompileCodegenUnit |
|
||||||
|
DepKind::FulfillObligation |
|
||||||
|
DepKind::VtableMethods |
|
||||||
|
|
||||||
// These are just odd
|
// These are just odd
|
||||||
DepKind::Null |
|
DepKind::Null |
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -11,5 +11,4 @@
|
||||||
fn main() {
|
fn main() {
|
||||||
println!("cargo:rerun-if-changed=build.rs");
|
println!("cargo:rerun-if-changed=build.rs");
|
||||||
println!("cargo:rerun-if-env-changed=CFG_DEFAULT_LINKER");
|
println!("cargo:rerun-if-env-changed=CFG_DEFAULT_LINKER");
|
||||||
println!("cargo:rerun-if-env-changed=CFG_DEFAULT_AR");
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,7 +20,6 @@ pub fn target() -> Result<Target, String> {
|
||||||
|
|
||||||
let opts = TargetOptions {
|
let opts = TargetOptions {
|
||||||
linker: cmd("emcc"),
|
linker: cmd("emcc"),
|
||||||
ar: cmd("emar"),
|
|
||||||
|
|
||||||
dynamic_linking: false,
|
dynamic_linking: false,
|
||||||
executables: true,
|
executables: true,
|
||||||
|
|
|
@ -135,6 +135,7 @@ macro_rules! supported_targets {
|
||||||
|
|
||||||
supported_targets! {
|
supported_targets! {
|
||||||
("x86_64-unknown-linux-gnu", x86_64_unknown_linux_gnu),
|
("x86_64-unknown-linux-gnu", x86_64_unknown_linux_gnu),
|
||||||
|
("x86_64-unknown-linux-gnux32", x86_64_unknown_linux_gnux32),
|
||||||
("i686-unknown-linux-gnu", i686_unknown_linux_gnu),
|
("i686-unknown-linux-gnu", i686_unknown_linux_gnu),
|
||||||
("i586-unknown-linux-gnu", i586_unknown_linux_gnu),
|
("i586-unknown-linux-gnu", i586_unknown_linux_gnu),
|
||||||
("mips-unknown-linux-gnu", mips_unknown_linux_gnu),
|
("mips-unknown-linux-gnu", mips_unknown_linux_gnu),
|
||||||
|
@ -268,8 +269,6 @@ pub struct TargetOptions {
|
||||||
|
|
||||||
/// Linker to invoke. Defaults to "cc".
|
/// Linker to invoke. Defaults to "cc".
|
||||||
pub linker: String,
|
pub linker: String,
|
||||||
/// Archive utility to use when managing archives. Defaults to "ar".
|
|
||||||
pub ar: String,
|
|
||||||
|
|
||||||
/// Linker arguments that are unconditionally passed *before* any
|
/// Linker arguments that are unconditionally passed *before* any
|
||||||
/// user-defined libraries.
|
/// user-defined libraries.
|
||||||
|
@ -430,6 +429,9 @@ pub struct TargetOptions {
|
||||||
|
|
||||||
/// The minimum alignment for global symbols.
|
/// The minimum alignment for global symbols.
|
||||||
pub min_global_align: Option<u64>,
|
pub min_global_align: Option<u64>,
|
||||||
|
|
||||||
|
/// Default number of codegen units to use in debug mode
|
||||||
|
pub default_codegen_units: Option<u64>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for TargetOptions {
|
impl Default for TargetOptions {
|
||||||
|
@ -439,7 +441,6 @@ impl Default for TargetOptions {
|
||||||
TargetOptions {
|
TargetOptions {
|
||||||
is_builtin: false,
|
is_builtin: false,
|
||||||
linker: option_env!("CFG_DEFAULT_LINKER").unwrap_or("cc").to_string(),
|
linker: option_env!("CFG_DEFAULT_LINKER").unwrap_or("cc").to_string(),
|
||||||
ar: option_env!("CFG_DEFAULT_AR").unwrap_or("ar").to_string(),
|
|
||||||
pre_link_args: LinkArgs::new(),
|
pre_link_args: LinkArgs::new(),
|
||||||
post_link_args: LinkArgs::new(),
|
post_link_args: LinkArgs::new(),
|
||||||
asm_args: Vec::new(),
|
asm_args: Vec::new(),
|
||||||
|
@ -492,6 +493,7 @@ impl Default for TargetOptions {
|
||||||
crt_static_respected: false,
|
crt_static_respected: false,
|
||||||
stack_probes: false,
|
stack_probes: false,
|
||||||
min_global_align: None,
|
min_global_align: None,
|
||||||
|
default_codegen_units: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -680,7 +682,6 @@ impl Target {
|
||||||
|
|
||||||
key!(is_builtin, bool);
|
key!(is_builtin, bool);
|
||||||
key!(linker);
|
key!(linker);
|
||||||
key!(ar);
|
|
||||||
key!(pre_link_args, link_args);
|
key!(pre_link_args, link_args);
|
||||||
key!(pre_link_objects_exe, list);
|
key!(pre_link_objects_exe, list);
|
||||||
key!(pre_link_objects_dll, list);
|
key!(pre_link_objects_dll, list);
|
||||||
|
@ -732,6 +733,7 @@ impl Target {
|
||||||
key!(crt_static_respected, bool);
|
key!(crt_static_respected, bool);
|
||||||
key!(stack_probes, bool);
|
key!(stack_probes, bool);
|
||||||
key!(min_global_align, Option<u64>);
|
key!(min_global_align, Option<u64>);
|
||||||
|
key!(default_codegen_units, Option<u64>);
|
||||||
|
|
||||||
if let Some(array) = obj.find("abi-blacklist").and_then(Json::as_array) {
|
if let Some(array) = obj.find("abi-blacklist").and_then(Json::as_array) {
|
||||||
for name in array.iter().filter_map(|abi| abi.as_string()) {
|
for name in array.iter().filter_map(|abi| abi.as_string()) {
|
||||||
|
@ -872,7 +874,6 @@ impl ToJson for Target {
|
||||||
|
|
||||||
target_option_val!(is_builtin);
|
target_option_val!(is_builtin);
|
||||||
target_option_val!(linker);
|
target_option_val!(linker);
|
||||||
target_option_val!(ar);
|
|
||||||
target_option_val!(link_args - pre_link_args);
|
target_option_val!(link_args - pre_link_args);
|
||||||
target_option_val!(pre_link_objects_exe);
|
target_option_val!(pre_link_objects_exe);
|
||||||
target_option_val!(pre_link_objects_dll);
|
target_option_val!(pre_link_objects_dll);
|
||||||
|
@ -924,6 +925,7 @@ impl ToJson for Target {
|
||||||
target_option_val!(crt_static_respected);
|
target_option_val!(crt_static_respected);
|
||||||
target_option_val!(stack_probes);
|
target_option_val!(stack_probes);
|
||||||
target_option_val!(min_global_align);
|
target_option_val!(min_global_align);
|
||||||
|
target_option_val!(default_codegen_units);
|
||||||
|
|
||||||
if default.abi_blacklist != self.options.abi_blacklist {
|
if default.abi_blacklist != self.options.abi_blacklist {
|
||||||
d.insert("abi-blacklist".to_string(), self.options.abi_blacklist.iter()
|
d.insert("abi-blacklist".to_string(), self.options.abi_blacklist.iter()
|
||||||
|
|
|
@ -48,6 +48,11 @@ pub fn target() -> TargetResult {
|
||||||
// code because of the extra costs it involves.
|
// code because of the extra costs it involves.
|
||||||
relocation_model: "static".to_string(),
|
relocation_model: "static".to_string(),
|
||||||
|
|
||||||
|
// Right now we invoke an external assembler and this isn't
|
||||||
|
// compatible with multiple codegen units, and plus we probably
|
||||||
|
// don't want to invoke that many gcc instances.
|
||||||
|
default_codegen_units: Some(1),
|
||||||
|
|
||||||
.. Default::default( )
|
.. Default::default( )
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
|
@ -25,7 +25,6 @@ pub fn target() -> Result<Target, String> {
|
||||||
|
|
||||||
let opts = TargetOptions {
|
let opts = TargetOptions {
|
||||||
linker: cmd("emcc"),
|
linker: cmd("emcc"),
|
||||||
ar: cmd("emar"),
|
|
||||||
|
|
||||||
dynamic_linking: false,
|
dynamic_linking: false,
|
||||||
executables: true,
|
executables: true,
|
||||||
|
|
|
@ -22,7 +22,6 @@ pub fn target() -> Result<Target, String> {
|
||||||
|
|
||||||
let opts = TargetOptions {
|
let opts = TargetOptions {
|
||||||
linker: cmd("emcc"),
|
linker: cmd("emcc"),
|
||||||
ar: cmd("emar"),
|
|
||||||
|
|
||||||
dynamic_linking: false,
|
dynamic_linking: false,
|
||||||
executables: true,
|
executables: true,
|
||||||
|
|
|
@ -21,37 +21,6 @@ pub fn opts() -> TargetOptions {
|
||||||
TargetOptions {
|
TargetOptions {
|
||||||
function_sections: true,
|
function_sections: true,
|
||||||
linker: "link.exe".to_string(),
|
linker: "link.exe".to_string(),
|
||||||
// When taking a look at the value of this `ar` field, one might expect
|
|
||||||
// `lib.exe` to be the value here! The `lib.exe` program is the default
|
|
||||||
// tool for managing `.lib` archives on Windows, but unfortunately the
|
|
||||||
// compiler cannot use it.
|
|
||||||
//
|
|
||||||
// To recap, we use `ar` here to manage rlibs (which are just archives).
|
|
||||||
// LLVM does not expose bindings for modifying archives so we have to
|
|
||||||
// invoke this utility for write operations (e.g. deleting files, adding
|
|
||||||
// files, etc). Normally archives only have object files within them,
|
|
||||||
// but the compiler also uses archives for storing metadata and
|
|
||||||
// compressed bytecode, so we don't exactly fall within "normal use
|
|
||||||
// cases".
|
|
||||||
//
|
|
||||||
// MSVC's `lib.exe` tool by default will choke when adding a non-object
|
|
||||||
// file to an archive, which we do on a regular basis, making it
|
|
||||||
// inoperable for us. Luckily, however, LLVM has already rewritten `ar`
|
|
||||||
// in the form of `llvm-ar` which is built by default when we build
|
|
||||||
// LLVM. This tool, unlike `lib.exe`, works just fine with non-object
|
|
||||||
// files, so we use it instead.
|
|
||||||
//
|
|
||||||
// Note that there's a few caveats associated with this:
|
|
||||||
//
|
|
||||||
// * This still requires that the *linker* (the consumer of rlibs) will
|
|
||||||
// ignore non-object files. Thankfully `link.exe` on Windows does
|
|
||||||
// indeed ignore non-object files in archives.
|
|
||||||
// * This requires `llvm-ar.exe` to be distributed with the compiler
|
|
||||||
// itself, but we already make sure of this elsewhere.
|
|
||||||
//
|
|
||||||
// Perhaps one day we won't even need this tool at all and we'll just be
|
|
||||||
// able to make library calls into LLVM!
|
|
||||||
ar: "llvm-ar.exe".to_string(),
|
|
||||||
dynamic_linking: true,
|
dynamic_linking: true,
|
||||||
executables: true,
|
executables: true,
|
||||||
dll_prefix: "".to_string(),
|
dll_prefix: "".to_string(),
|
||||||
|
|
|
@ -16,7 +16,6 @@ pub fn target() -> TargetResult {
|
||||||
base.cpu = "x86-64".to_string();
|
base.cpu = "x86-64".to_string();
|
||||||
base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("-m64".to_string());
|
base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("-m64".to_string());
|
||||||
base.linker = "x86_64-rumprun-netbsd-gcc".to_string();
|
base.linker = "x86_64-rumprun-netbsd-gcc".to_string();
|
||||||
base.ar = "x86_64-rumprun-netbsd-ar".to_string();
|
|
||||||
base.max_atomic_width = Some(64);
|
base.max_atomic_width = Some(64);
|
||||||
|
|
||||||
base.dynamic_linking = false;
|
base.dynamic_linking = false;
|
||||||
|
|
35
src/librustc_back/target/x86_64_unknown_linux_gnux32.rs
Normal file
35
src/librustc_back/target/x86_64_unknown_linux_gnux32.rs
Normal file
|
@ -0,0 +1,35 @@
|
||||||
|
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
|
||||||
|
// file at the top-level directory of this distribution and at
|
||||||
|
// http://rust-lang.org/COPYRIGHT.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||||
|
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||||
|
// option. This file may not be copied, modified, or distributed
|
||||||
|
// except according to those terms.
|
||||||
|
|
||||||
|
use LinkerFlavor;
|
||||||
|
use target::{Target, TargetResult};
|
||||||
|
|
||||||
|
pub fn target() -> TargetResult {
|
||||||
|
let mut base = super::linux_base::opts();
|
||||||
|
base.cpu = "x86-64".to_string();
|
||||||
|
base.max_atomic_width = Some(64);
|
||||||
|
base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("-mx32".to_string());
|
||||||
|
base.stack_probes = true;
|
||||||
|
base.has_elf_tls = false;
|
||||||
|
|
||||||
|
Ok(Target {
|
||||||
|
llvm_target: "x86_64-unknown-linux-gnux32".to_string(),
|
||||||
|
target_endian: "little".to_string(),
|
||||||
|
target_pointer_width: "32".to_string(),
|
||||||
|
target_c_int_width: "32".to_string(),
|
||||||
|
data_layout: "e-m:e-p:32:32-i64:64-f80:128-n8:16:32:64-S128".to_string(),
|
||||||
|
arch: "x86_64".to_string(),
|
||||||
|
target_os: "linux".to_string(),
|
||||||
|
target_env: "gnu".to_string(),
|
||||||
|
target_vendor: "unknown".to_string(),
|
||||||
|
linker_flavor: LinkerFlavor::Gcc,
|
||||||
|
options: base,
|
||||||
|
})
|
||||||
|
}
|
|
@ -15,5 +15,6 @@ syntax = { path = "../libsyntax" }
|
||||||
syntax_pos = { path = "../libsyntax_pos" }
|
syntax_pos = { path = "../libsyntax_pos" }
|
||||||
graphviz = { path = "../libgraphviz" }
|
graphviz = { path = "../libgraphviz" }
|
||||||
rustc = { path = "../librustc" }
|
rustc = { path = "../librustc" }
|
||||||
|
rustc_back = { path = "../librustc_back" }
|
||||||
rustc_mir = { path = "../librustc_mir" }
|
rustc_mir = { path = "../librustc_mir" }
|
||||||
rustc_errors = { path = "../librustc_errors" }
|
rustc_errors = { path = "../librustc_errors" }
|
||||||
|
|
|
@ -770,7 +770,8 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> {
|
||||||
let lp = opt_loan_path(&assignee_cmt).unwrap();
|
let lp = opt_loan_path(&assignee_cmt).unwrap();
|
||||||
self.move_data.each_assignment_of(assignment_id, &lp, |assign| {
|
self.move_data.each_assignment_of(assignment_id, &lp, |assign| {
|
||||||
if assignee_cmt.mutbl.is_mutable() {
|
if assignee_cmt.mutbl.is_mutable() {
|
||||||
self.tcx().used_mut_nodes.borrow_mut().insert(local_id);
|
let hir_id = self.bccx.tcx.hir.node_to_hir_id(local_id);
|
||||||
|
self.bccx.used_mut_nodes.borrow_mut().insert(hir_id);
|
||||||
} else {
|
} else {
|
||||||
self.bccx.report_reassigned_immutable_variable(
|
self.bccx.report_reassigned_immutable_variable(
|
||||||
assignment_span,
|
assignment_span,
|
||||||
|
|
|
@ -442,13 +442,13 @@ impl<'a, 'tcx> GatherLoanCtxt<'a, 'tcx> {
|
||||||
wrapped_path = match current_path.kind {
|
wrapped_path = match current_path.kind {
|
||||||
LpVar(local_id) => {
|
LpVar(local_id) => {
|
||||||
if !through_borrow {
|
if !through_borrow {
|
||||||
self.tcx().used_mut_nodes.borrow_mut().insert(local_id);
|
let hir_id = self.bccx.tcx.hir.node_to_hir_id(local_id);
|
||||||
|
self.bccx.used_mut_nodes.borrow_mut().insert(hir_id);
|
||||||
}
|
}
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
LpUpvar(ty::UpvarId{ var_id, closure_expr_id: _ }) => {
|
LpUpvar(ty::UpvarId{ var_id, closure_expr_id: _ }) => {
|
||||||
let local_id = self.tcx().hir.hir_to_node_id(var_id);
|
self.bccx.used_mut_nodes.borrow_mut().insert(var_id);
|
||||||
self.tcx().used_mut_nodes.borrow_mut().insert(local_id);
|
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
LpExtend(ref base, mc::McInherited, LpDeref(pointer_kind)) |
|
LpExtend(ref base, mc::McInherited, LpDeref(pointer_kind)) |
|
||||||
|
|
|
@ -20,6 +20,7 @@ pub use self::MovedValueUseKind::*;
|
||||||
|
|
||||||
use self::InteriorKind::*;
|
use self::InteriorKind::*;
|
||||||
|
|
||||||
|
use rustc::hir::HirId;
|
||||||
use rustc::hir::map as hir_map;
|
use rustc::hir::map as hir_map;
|
||||||
use rustc::hir::map::blocks::FnLikeNode;
|
use rustc::hir::map::blocks::FnLikeNode;
|
||||||
use rustc::cfg;
|
use rustc::cfg;
|
||||||
|
@ -27,6 +28,7 @@ use rustc::middle::dataflow::DataFlowContext;
|
||||||
use rustc::middle::dataflow::BitwiseOperator;
|
use rustc::middle::dataflow::BitwiseOperator;
|
||||||
use rustc::middle::dataflow::DataFlowOperator;
|
use rustc::middle::dataflow::DataFlowOperator;
|
||||||
use rustc::middle::dataflow::KillFrom;
|
use rustc::middle::dataflow::KillFrom;
|
||||||
|
use rustc::middle::borrowck::BorrowCheckResult;
|
||||||
use rustc::hir::def_id::{DefId, DefIndex};
|
use rustc::hir::def_id::{DefId, DefIndex};
|
||||||
use rustc::middle::expr_use_visitor as euv;
|
use rustc::middle::expr_use_visitor as euv;
|
||||||
use rustc::middle::mem_categorization as mc;
|
use rustc::middle::mem_categorization as mc;
|
||||||
|
@ -37,7 +39,9 @@ use rustc::middle::free_region::RegionRelations;
|
||||||
use rustc::ty::{self, Ty, TyCtxt};
|
use rustc::ty::{self, Ty, TyCtxt};
|
||||||
use rustc::ty::maps::Providers;
|
use rustc::ty::maps::Providers;
|
||||||
use rustc_mir::util::borrowck_errors::{BorrowckErrors, Origin};
|
use rustc_mir::util::borrowck_errors::{BorrowckErrors, Origin};
|
||||||
|
use rustc::util::nodemap::FxHashSet;
|
||||||
|
|
||||||
|
use std::cell::RefCell;
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
use std::hash::{Hash, Hasher};
|
use std::hash::{Hash, Hasher};
|
||||||
|
@ -54,6 +58,8 @@ pub mod gather_loans;
|
||||||
|
|
||||||
pub mod move_data;
|
pub mod move_data;
|
||||||
|
|
||||||
|
mod unused;
|
||||||
|
|
||||||
#[derive(Clone, Copy)]
|
#[derive(Clone, Copy)]
|
||||||
pub struct LoanDataFlowOperator;
|
pub struct LoanDataFlowOperator;
|
||||||
|
|
||||||
|
@ -79,7 +85,9 @@ pub struct AnalysisData<'a, 'tcx: 'a> {
|
||||||
pub move_data: move_data::FlowedMoveData<'a, 'tcx>,
|
pub move_data: move_data::FlowedMoveData<'a, 'tcx>,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn borrowck<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, owner_def_id: DefId) {
|
fn borrowck<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, owner_def_id: DefId)
|
||||||
|
-> Rc<BorrowCheckResult>
|
||||||
|
{
|
||||||
debug!("borrowck(body_owner_def_id={:?})", owner_def_id);
|
debug!("borrowck(body_owner_def_id={:?})", owner_def_id);
|
||||||
|
|
||||||
let owner_id = tcx.hir.as_local_node_id(owner_def_id).unwrap();
|
let owner_id = tcx.hir.as_local_node_id(owner_def_id).unwrap();
|
||||||
|
@ -91,7 +99,9 @@ fn borrowck<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, owner_def_id: DefId) {
|
||||||
// those things (notably the synthesized constructors from
|
// those things (notably the synthesized constructors from
|
||||||
// tuple structs/variants) do not have an associated body
|
// tuple structs/variants) do not have an associated body
|
||||||
// and do not need borrowchecking.
|
// and do not need borrowchecking.
|
||||||
return;
|
return Rc::new(BorrowCheckResult {
|
||||||
|
used_mut_nodes: FxHashSet(),
|
||||||
|
})
|
||||||
}
|
}
|
||||||
_ => { }
|
_ => { }
|
||||||
}
|
}
|
||||||
|
@ -100,7 +110,14 @@ fn borrowck<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, owner_def_id: DefId) {
|
||||||
let tables = tcx.typeck_tables_of(owner_def_id);
|
let tables = tcx.typeck_tables_of(owner_def_id);
|
||||||
let region_scope_tree = tcx.region_scope_tree(owner_def_id);
|
let region_scope_tree = tcx.region_scope_tree(owner_def_id);
|
||||||
let body = tcx.hir.body(body_id);
|
let body = tcx.hir.body(body_id);
|
||||||
let bccx = &mut BorrowckCtxt { tcx, tables, region_scope_tree, owner_def_id, body };
|
let mut bccx = BorrowckCtxt {
|
||||||
|
tcx,
|
||||||
|
tables,
|
||||||
|
region_scope_tree,
|
||||||
|
owner_def_id,
|
||||||
|
body,
|
||||||
|
used_mut_nodes: RefCell::new(FxHashSet()),
|
||||||
|
};
|
||||||
|
|
||||||
// Eventually, borrowck will always read the MIR, but at the
|
// Eventually, borrowck will always read the MIR, but at the
|
||||||
// moment we do not. So, for now, we always force MIR to be
|
// moment we do not. So, for now, we always force MIR to be
|
||||||
|
@ -118,14 +135,19 @@ fn borrowck<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, owner_def_id: DefId) {
|
||||||
if let Some(AnalysisData { all_loans,
|
if let Some(AnalysisData { all_loans,
|
||||||
loans: loan_dfcx,
|
loans: loan_dfcx,
|
||||||
move_data: flowed_moves }) =
|
move_data: flowed_moves }) =
|
||||||
build_borrowck_dataflow_data(bccx, false, body_id,
|
build_borrowck_dataflow_data(&mut bccx, false, body_id,
|
||||||
|bccx| {
|
|bccx| {
|
||||||
cfg = Some(cfg::CFG::new(bccx.tcx, &body));
|
cfg = Some(cfg::CFG::new(bccx.tcx, &body));
|
||||||
cfg.as_mut().unwrap()
|
cfg.as_mut().unwrap()
|
||||||
})
|
})
|
||||||
{
|
{
|
||||||
check_loans::check_loans(bccx, &loan_dfcx, &flowed_moves, &all_loans, body);
|
check_loans::check_loans(&mut bccx, &loan_dfcx, &flowed_moves, &all_loans, body);
|
||||||
}
|
}
|
||||||
|
unused::check(&mut bccx, body);
|
||||||
|
|
||||||
|
Rc::new(BorrowCheckResult {
|
||||||
|
used_mut_nodes: bccx.used_mut_nodes.into_inner(),
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn build_borrowck_dataflow_data<'a, 'c, 'tcx, F>(this: &mut BorrowckCtxt<'a, 'tcx>,
|
fn build_borrowck_dataflow_data<'a, 'c, 'tcx, F>(this: &mut BorrowckCtxt<'a, 'tcx>,
|
||||||
|
@ -198,7 +220,14 @@ pub fn build_borrowck_dataflow_data_for_fn<'a, 'tcx>(
|
||||||
let tables = tcx.typeck_tables_of(owner_def_id);
|
let tables = tcx.typeck_tables_of(owner_def_id);
|
||||||
let region_scope_tree = tcx.region_scope_tree(owner_def_id);
|
let region_scope_tree = tcx.region_scope_tree(owner_def_id);
|
||||||
let body = tcx.hir.body(body_id);
|
let body = tcx.hir.body(body_id);
|
||||||
let mut bccx = BorrowckCtxt { tcx, tables, region_scope_tree, owner_def_id, body };
|
let mut bccx = BorrowckCtxt {
|
||||||
|
tcx,
|
||||||
|
tables,
|
||||||
|
region_scope_tree,
|
||||||
|
owner_def_id,
|
||||||
|
body,
|
||||||
|
used_mut_nodes: RefCell::new(FxHashSet()),
|
||||||
|
};
|
||||||
|
|
||||||
let dataflow_data = build_borrowck_dataflow_data(&mut bccx, true, body_id, |_| cfg);
|
let dataflow_data = build_borrowck_dataflow_data(&mut bccx, true, body_id, |_| cfg);
|
||||||
(bccx, dataflow_data.unwrap())
|
(bccx, dataflow_data.unwrap())
|
||||||
|
@ -219,6 +248,8 @@ pub struct BorrowckCtxt<'a, 'tcx: 'a> {
|
||||||
owner_def_id: DefId,
|
owner_def_id: DefId,
|
||||||
|
|
||||||
body: &'tcx hir::Body,
|
body: &'tcx hir::Body,
|
||||||
|
|
||||||
|
used_mut_nodes: RefCell<FxHashSet<HirId>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'b, 'tcx: 'b> BorrowckErrors for BorrowckCtxt<'b, 'tcx> {
|
impl<'b, 'tcx: 'b> BorrowckErrors for BorrowckCtxt<'b, 'tcx> {
|
||||||
|
@ -615,10 +646,11 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
|
||||||
let msg = if !has_fork && partial { "partially " }
|
let msg = if !has_fork && partial { "partially " }
|
||||||
else if has_fork && !has_common { "collaterally "}
|
else if has_fork && !has_common { "collaterally "}
|
||||||
else { "" };
|
else { "" };
|
||||||
let mut err = struct_span_err!(
|
let mut err = self.cannot_act_on_moved_value(use_span,
|
||||||
self.tcx.sess, use_span, E0382,
|
verb,
|
||||||
"{} of {}moved value: `{}`",
|
msg,
|
||||||
verb, msg, nl);
|
&format!("{}", nl),
|
||||||
|
Origin::Ast);
|
||||||
let need_note = match lp.ty.sty {
|
let need_note = match lp.ty.sty {
|
||||||
ty::TypeVariants::TyClosure(id, _) => {
|
ty::TypeVariants::TyClosure(id, _) => {
|
||||||
let node_id = self.tcx.hir.as_local_node_id(id).unwrap();
|
let node_id = self.tcx.hir.as_local_node_id(id).unwrap();
|
||||||
|
@ -698,10 +730,10 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
|
||||||
&self,
|
&self,
|
||||||
span: Span,
|
span: Span,
|
||||||
lp: &LoanPath<'tcx>) {
|
lp: &LoanPath<'tcx>) {
|
||||||
span_err!(
|
self.cannot_partially_reinit_an_uninit_struct(span,
|
||||||
self.tcx.sess, span, E0383,
|
&self.loan_path_to_string(lp),
|
||||||
"partial reinitialization of uninitialized structure `{}`",
|
Origin::Ast)
|
||||||
self.loan_path_to_string(lp));
|
.emit();
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn report_reassigned_immutable_variable(&self,
|
pub fn report_reassigned_immutable_variable(&self,
|
||||||
|
@ -759,11 +791,24 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
|
||||||
|
|
||||||
let mut db = match err.cause {
|
let mut db = match err.cause {
|
||||||
MutabilityViolation => {
|
MutabilityViolation => {
|
||||||
self.cannot_assign(error_span, &descr, Origin::Ast)
|
let mut db = self.cannot_assign(error_span, &descr, Origin::Ast);
|
||||||
|
if let mc::NoteClosureEnv(upvar_id) = err.cmt.note {
|
||||||
|
let node_id = self.tcx.hir.hir_to_node_id(upvar_id.var_id);
|
||||||
|
let sp = self.tcx.hir.span(node_id);
|
||||||
|
match self.tcx.sess.codemap().span_to_snippet(sp) {
|
||||||
|
Ok(snippet) => {
|
||||||
|
let msg = &format!("consider making `{}` mutable", snippet);
|
||||||
|
db.span_suggestion(sp, msg, format!("mut {}", snippet));
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
db.span_help(sp, "consider making this binding mutable");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
db
|
||||||
}
|
}
|
||||||
BorrowViolation(euv::ClosureCapture(_)) => {
|
BorrowViolation(euv::ClosureCapture(_)) => {
|
||||||
struct_span_err!(self.tcx.sess, error_span, E0595,
|
self.closure_cannot_assign_to_borrowed(error_span, &descr, Origin::Ast)
|
||||||
"closure cannot assign to {}", descr)
|
|
||||||
}
|
}
|
||||||
BorrowViolation(euv::OverloadedOperator) |
|
BorrowViolation(euv::OverloadedOperator) |
|
||||||
BorrowViolation(euv::AddrOf) |
|
BorrowViolation(euv::AddrOf) |
|
||||||
|
@ -772,8 +817,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
|
||||||
BorrowViolation(euv::AutoUnsafe) |
|
BorrowViolation(euv::AutoUnsafe) |
|
||||||
BorrowViolation(euv::ForLoop) |
|
BorrowViolation(euv::ForLoop) |
|
||||||
BorrowViolation(euv::MatchDiscriminant) => {
|
BorrowViolation(euv::MatchDiscriminant) => {
|
||||||
struct_span_err!(self.tcx.sess, error_span, E0596,
|
self.cannot_borrow_path_as_mutable(error_span, &descr, Origin::Ast)
|
||||||
"cannot borrow {} as mutable", descr)
|
|
||||||
}
|
}
|
||||||
BorrowViolation(euv::ClosureInvocation) => {
|
BorrowViolation(euv::ClosureInvocation) => {
|
||||||
span_bug!(err.span,
|
span_bug!(err.span,
|
||||||
|
@ -855,21 +899,12 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
|
||||||
|
|
||||||
if let Some((yield_span, _)) = maybe_borrow_across_yield {
|
if let Some((yield_span, _)) = maybe_borrow_across_yield {
|
||||||
debug!("err_out_of_scope: opt_yield_span = {:?}", yield_span);
|
debug!("err_out_of_scope: opt_yield_span = {:?}", yield_span);
|
||||||
struct_span_err!(self.tcx.sess,
|
self.cannot_borrow_across_generator_yield(error_span, yield_span, Origin::Ast)
|
||||||
error_span,
|
|
||||||
E0626,
|
|
||||||
"borrow may still be in use when generator yields")
|
|
||||||
.span_label(yield_span, "possible yield occurs here")
|
|
||||||
.emit();
|
.emit();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut db = struct_span_err!(self.tcx.sess,
|
let mut db = self.path_does_not_live_long_enough(error_span, &msg, Origin::Ast);
|
||||||
error_span,
|
|
||||||
E0597,
|
|
||||||
"{} does not live long enough",
|
|
||||||
msg);
|
|
||||||
|
|
||||||
let (value_kind, value_msg) = match err.cmt.cat {
|
let (value_kind, value_msg) = match err.cmt.cat {
|
||||||
mc::Categorization::Rvalue(..) =>
|
mc::Categorization::Rvalue(..) =>
|
||||||
("temporary value", "temporary value created here"),
|
("temporary value", "temporary value created here"),
|
||||||
|
@ -978,11 +1013,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
|
||||||
}
|
}
|
||||||
err_borrowed_pointer_too_short(loan_scope, ptr_scope) => {
|
err_borrowed_pointer_too_short(loan_scope, ptr_scope) => {
|
||||||
let descr = self.cmt_to_path_or_string(&err.cmt);
|
let descr = self.cmt_to_path_or_string(&err.cmt);
|
||||||
let mut db = struct_span_err!(self.tcx.sess, error_span, E0598,
|
let mut db = self.lifetime_too_short_for_reborrow(error_span, &descr, Origin::Ast);
|
||||||
"lifetime of {} is too short to guarantee \
|
|
||||||
its contents can be safely reborrowed",
|
|
||||||
descr);
|
|
||||||
|
|
||||||
let descr = match opt_loan_path(&err.cmt) {
|
let descr = match opt_loan_path(&err.cmt) {
|
||||||
Some(lp) => {
|
Some(lp) => {
|
||||||
format!("`{}`", self.loan_path_to_string(&lp))
|
format!("`{}`", self.loan_path_to_string(&lp))
|
||||||
|
@ -1054,12 +1085,8 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
|
||||||
let blame = cmt.immutability_blame();
|
let blame = cmt.immutability_blame();
|
||||||
let mut err = match blame {
|
let mut err = match blame {
|
||||||
Some(ImmutabilityBlame::ClosureEnv(id)) => {
|
Some(ImmutabilityBlame::ClosureEnv(id)) => {
|
||||||
let mut err = struct_span_err!(
|
|
||||||
self.tcx.sess, span, E0387,
|
|
||||||
"{} in a captured outer variable in an `Fn` closure", prefix);
|
|
||||||
|
|
||||||
// FIXME: the distinction between these 2 messages looks wrong.
|
// FIXME: the distinction between these 2 messages looks wrong.
|
||||||
let help = if let BorrowViolation(euv::ClosureCapture(_)) = kind {
|
let help_msg = if let BorrowViolation(euv::ClosureCapture(_)) = kind {
|
||||||
// The aliasability violation with closure captures can
|
// The aliasability violation with closure captures can
|
||||||
// happen for nested closures, so we know the enclosing
|
// happen for nested closures, so we know the enclosing
|
||||||
// closure incorrectly accepts an `Fn` while it needs to
|
// closure incorrectly accepts an `Fn` while it needs to
|
||||||
|
@ -1070,15 +1097,15 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
|
||||||
"consider changing this closure to take self by mutable reference"
|
"consider changing this closure to take self by mutable reference"
|
||||||
};
|
};
|
||||||
let node_id = self.tcx.hir.def_index_to_node_id(id);
|
let node_id = self.tcx.hir.def_index_to_node_id(id);
|
||||||
err.span_help(self.tcx.hir.span(node_id), help);
|
let help_span = self.tcx.hir.span(node_id);
|
||||||
err
|
self.cannot_act_on_capture_in_sharable_fn(span,
|
||||||
|
prefix,
|
||||||
|
(help_span, help_msg),
|
||||||
|
Origin::Ast)
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
let mut err = struct_span_err!(
|
self.cannot_assign_into_immutable_reference(span, prefix,
|
||||||
self.tcx.sess, span, E0389,
|
Origin::Ast)
|
||||||
"{} in a `&` reference", prefix);
|
|
||||||
err.span_label(span, "assignment into an immutable reference");
|
|
||||||
err
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
self.note_immutability_blame(&mut err, blame);
|
self.note_immutability_blame(&mut err, blame);
|
||||||
|
@ -1230,17 +1257,10 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
|
||||||
Err(_) => format!("move |<args>| <body>")
|
Err(_) => format!("move |<args>| <body>")
|
||||||
};
|
};
|
||||||
|
|
||||||
struct_span_err!(self.tcx.sess, err.span, E0373,
|
self.cannot_capture_in_long_lived_closure(err.span,
|
||||||
"closure may outlive the current function, \
|
&cmt_path_or_string,
|
||||||
but it borrows {}, \
|
capture_span,
|
||||||
which is owned by the current function",
|
Origin::Ast)
|
||||||
cmt_path_or_string)
|
|
||||||
.span_label(capture_span,
|
|
||||||
format!("{} is borrowed here",
|
|
||||||
cmt_path_or_string))
|
|
||||||
.span_label(err.span,
|
|
||||||
format!("may outlive borrowed value {}",
|
|
||||||
cmt_path_or_string))
|
|
||||||
.span_suggestion(err.span,
|
.span_suggestion(err.span,
|
||||||
&format!("to force the closure to take ownership of {} \
|
&format!("to force the closure to take ownership of {} \
|
||||||
(and any other referenced variables), \
|
(and any other referenced variables), \
|
||||||
|
|
118
src/librustc_borrowck/borrowck/unused.rs
Normal file
118
src/librustc_borrowck/borrowck/unused.rs
Normal file
|
@ -0,0 +1,118 @@
|
||||||
|
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
|
||||||
|
// file at the top-level directory of this distribution and at
|
||||||
|
// http://rust-lang.org/COPYRIGHT.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||||
|
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||||
|
// option. This file may not be copied, modified, or distributed
|
||||||
|
// except according to those terms.
|
||||||
|
|
||||||
|
use rustc::hir::intravisit::{Visitor, NestedVisitorMap};
|
||||||
|
use rustc::hir::{self, HirId};
|
||||||
|
use rustc::lint::builtin::UNUSED_MUT;
|
||||||
|
use rustc::ty;
|
||||||
|
use rustc::util::nodemap::{FxHashMap, FxHashSet};
|
||||||
|
use rustc_back::slice;
|
||||||
|
use syntax::ptr::P;
|
||||||
|
|
||||||
|
use borrowck::BorrowckCtxt;
|
||||||
|
|
||||||
|
pub fn check<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>, body: &'tcx hir::Body) {
|
||||||
|
let mut used_mut = bccx.used_mut_nodes.borrow().clone();
|
||||||
|
UsedMutFinder {
|
||||||
|
bccx,
|
||||||
|
set: &mut used_mut,
|
||||||
|
}.visit_expr(&body.value);
|
||||||
|
let mut cx = UnusedMutCx { bccx, used_mut };
|
||||||
|
for arg in body.arguments.iter() {
|
||||||
|
cx.check_unused_mut_pat(slice::ref_slice(&arg.pat));
|
||||||
|
}
|
||||||
|
cx.visit_expr(&body.value);
|
||||||
|
}
|
||||||
|
|
||||||
|
struct UsedMutFinder<'a, 'tcx: 'a> {
|
||||||
|
bccx: &'a BorrowckCtxt<'a, 'tcx>,
|
||||||
|
set: &'a mut FxHashSet<HirId>,
|
||||||
|
}
|
||||||
|
|
||||||
|
struct UnusedMutCx<'a, 'tcx: 'a> {
|
||||||
|
bccx: &'a BorrowckCtxt<'a, 'tcx>,
|
||||||
|
used_mut: FxHashSet<HirId>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, 'tcx> UnusedMutCx<'a, 'tcx> {
|
||||||
|
fn check_unused_mut_pat(&self, pats: &[P<hir::Pat>]) {
|
||||||
|
let tcx = self.bccx.tcx;
|
||||||
|
let mut mutables = FxHashMap();
|
||||||
|
for p in pats {
|
||||||
|
p.each_binding(|_, id, span, path1| {
|
||||||
|
let name = path1.node;
|
||||||
|
|
||||||
|
// Skip anything that looks like `_foo`
|
||||||
|
if name.as_str().starts_with("_") {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Skip anything that looks like `&foo` or `&mut foo`, only look
|
||||||
|
// for by-value bindings
|
||||||
|
let hir_id = tcx.hir.node_to_hir_id(id);
|
||||||
|
let bm = match self.bccx.tables.pat_binding_modes().get(hir_id) {
|
||||||
|
Some(&bm) => bm,
|
||||||
|
None => span_bug!(span, "missing binding mode"),
|
||||||
|
};
|
||||||
|
match bm {
|
||||||
|
ty::BindByValue(hir::MutMutable) => {}
|
||||||
|
_ => return,
|
||||||
|
}
|
||||||
|
|
||||||
|
mutables.entry(name).or_insert(Vec::new()).push((id, hir_id, span));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
for (_name, ids) in mutables {
|
||||||
|
// If any id for this name was used mutably then consider them all
|
||||||
|
// ok, so move on to the next
|
||||||
|
if ids.iter().any(|&(_, ref id, _)| self.used_mut.contains(id)) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut_span = tcx.sess.codemap().span_until_char(ids[0].2, ' ');
|
||||||
|
|
||||||
|
// Ok, every name wasn't used mutably, so issue a warning that this
|
||||||
|
// didn't need to be mutable.
|
||||||
|
tcx.struct_span_lint_node(UNUSED_MUT,
|
||||||
|
ids[0].0,
|
||||||
|
ids[0].2,
|
||||||
|
"variable does not need to be mutable")
|
||||||
|
.span_suggestion_short(mut_span, "remove this `mut`", "".to_owned())
|
||||||
|
.emit();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, 'tcx> Visitor<'tcx> for UnusedMutCx<'a, 'tcx> {
|
||||||
|
fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
|
||||||
|
NestedVisitorMap::OnlyBodies(&self.bccx.tcx.hir)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn visit_arm(&mut self, arm: &hir::Arm) {
|
||||||
|
self.check_unused_mut_pat(&arm.pats)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn visit_local(&mut self, local: &hir::Local) {
|
||||||
|
self.check_unused_mut_pat(slice::ref_slice(&local.pat));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, 'tcx> Visitor<'tcx> for UsedMutFinder<'a, 'tcx> {
|
||||||
|
fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
|
||||||
|
NestedVisitorMap::OnlyBodies(&self.bccx.tcx.hir)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn visit_nested_body(&mut self, id: hir::BodyId) {
|
||||||
|
let def_id = self.bccx.tcx.hir.body_owner_def_id(id);
|
||||||
|
self.set.extend(self.bccx.tcx.borrowck(def_id).used_mut_nodes.iter().cloned());
|
||||||
|
self.visit_body(self.bccx.tcx.hir.body(id));
|
||||||
|
}
|
||||||
|
}
|
|
@ -9,472 +9,3 @@
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
#![allow(non_snake_case)]
|
#![allow(non_snake_case)]
|
||||||
|
|
||||||
register_long_diagnostics! {
|
|
||||||
|
|
||||||
E0373: r##"
|
|
||||||
This error occurs when an attempt is made to use data captured by a closure,
|
|
||||||
when that data may no longer exist. It's most commonly seen when attempting to
|
|
||||||
return a closure:
|
|
||||||
|
|
||||||
```compile_fail,E0373
|
|
||||||
fn foo() -> Box<Fn(u32) -> u32> {
|
|
||||||
let x = 0u32;
|
|
||||||
Box::new(|y| x + y)
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Notice that `x` is stack-allocated by `foo()`. By default, Rust captures
|
|
||||||
closed-over data by reference. This means that once `foo()` returns, `x` no
|
|
||||||
longer exists. An attempt to access `x` within the closure would thus be
|
|
||||||
unsafe.
|
|
||||||
|
|
||||||
Another situation where this might be encountered is when spawning threads:
|
|
||||||
|
|
||||||
```compile_fail,E0373
|
|
||||||
fn foo() {
|
|
||||||
let x = 0u32;
|
|
||||||
let y = 1u32;
|
|
||||||
|
|
||||||
let thr = std::thread::spawn(|| {
|
|
||||||
x + y
|
|
||||||
});
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Since our new thread runs in parallel, the stack frame containing `x` and `y`
|
|
||||||
may well have disappeared by the time we try to use them. Even if we call
|
|
||||||
`thr.join()` within foo (which blocks until `thr` has completed, ensuring the
|
|
||||||
stack frame won't disappear), we will not succeed: the compiler cannot prove
|
|
||||||
that this behaviour is safe, and so won't let us do it.
|
|
||||||
|
|
||||||
The solution to this problem is usually to switch to using a `move` closure.
|
|
||||||
This approach moves (or copies, where possible) data into the closure, rather
|
|
||||||
than taking references to it. For example:
|
|
||||||
|
|
||||||
```
|
|
||||||
fn foo() -> Box<Fn(u32) -> u32> {
|
|
||||||
let x = 0u32;
|
|
||||||
Box::new(move |y| x + y)
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Now that the closure has its own copy of the data, there's no need to worry
|
|
||||||
about safety.
|
|
||||||
"##,
|
|
||||||
|
|
||||||
E0382: r##"
|
|
||||||
This error occurs when an attempt is made to use a variable after its contents
|
|
||||||
have been moved elsewhere. For example:
|
|
||||||
|
|
||||||
```compile_fail,E0382
|
|
||||||
struct MyStruct { s: u32 }
|
|
||||||
|
|
||||||
fn main() {
|
|
||||||
let mut x = MyStruct{ s: 5u32 };
|
|
||||||
let y = x;
|
|
||||||
x.s = 6;
|
|
||||||
println!("{}", x.s);
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Since `MyStruct` is a type that is not marked `Copy`, the data gets moved out
|
|
||||||
of `x` when we set `y`. This is fundamental to Rust's ownership system: outside
|
|
||||||
of workarounds like `Rc`, a value cannot be owned by more than one variable.
|
|
||||||
|
|
||||||
If we own the type, the easiest way to address this problem is to implement
|
|
||||||
`Copy` and `Clone` on it, as shown below. This allows `y` to copy the
|
|
||||||
information in `x`, while leaving the original version owned by `x`. Subsequent
|
|
||||||
changes to `x` will not be reflected when accessing `y`.
|
|
||||||
|
|
||||||
```
|
|
||||||
#[derive(Copy, Clone)]
|
|
||||||
struct MyStruct { s: u32 }
|
|
||||||
|
|
||||||
fn main() {
|
|
||||||
let mut x = MyStruct{ s: 5u32 };
|
|
||||||
let y = x;
|
|
||||||
x.s = 6;
|
|
||||||
println!("{}", x.s);
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Alternatively, if we don't control the struct's definition, or mutable shared
|
|
||||||
ownership is truly required, we can use `Rc` and `RefCell`:
|
|
||||||
|
|
||||||
```
|
|
||||||
use std::cell::RefCell;
|
|
||||||
use std::rc::Rc;
|
|
||||||
|
|
||||||
struct MyStruct { s: u32 }
|
|
||||||
|
|
||||||
fn main() {
|
|
||||||
let mut x = Rc::new(RefCell::new(MyStruct{ s: 5u32 }));
|
|
||||||
let y = x.clone();
|
|
||||||
x.borrow_mut().s = 6;
|
|
||||||
println!("{}", x.borrow().s);
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
With this approach, x and y share ownership of the data via the `Rc` (reference
|
|
||||||
count type). `RefCell` essentially performs runtime borrow checking: ensuring
|
|
||||||
that at most one writer or multiple readers can access the data at any one time.
|
|
||||||
|
|
||||||
If you wish to learn more about ownership in Rust, start with the chapter in the
|
|
||||||
Book:
|
|
||||||
|
|
||||||
https://doc.rust-lang.org/book/first-edition/ownership.html
|
|
||||||
"##,
|
|
||||||
|
|
||||||
E0383: r##"
|
|
||||||
This error occurs when an attempt is made to partially reinitialize a
|
|
||||||
structure that is currently uninitialized.
|
|
||||||
|
|
||||||
For example, this can happen when a drop has taken place:
|
|
||||||
|
|
||||||
```compile_fail,E0383
|
|
||||||
struct Foo {
|
|
||||||
a: u32,
|
|
||||||
}
|
|
||||||
impl Drop for Foo {
|
|
||||||
fn drop(&mut self) { /* ... */ }
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut x = Foo { a: 1 };
|
|
||||||
drop(x); // `x` is now uninitialized
|
|
||||||
x.a = 2; // error, partial reinitialization of uninitialized structure `t`
|
|
||||||
```
|
|
||||||
|
|
||||||
This error can be fixed by fully reinitializing the structure in question:
|
|
||||||
|
|
||||||
```
|
|
||||||
struct Foo {
|
|
||||||
a: u32,
|
|
||||||
}
|
|
||||||
impl Drop for Foo {
|
|
||||||
fn drop(&mut self) { /* ... */ }
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut x = Foo { a: 1 };
|
|
||||||
drop(x);
|
|
||||||
x = Foo { a: 2 };
|
|
||||||
```
|
|
||||||
"##,
|
|
||||||
|
|
||||||
/*E0386: r##"
|
|
||||||
This error occurs when an attempt is made to mutate the target of a mutable
|
|
||||||
reference stored inside an immutable container.
|
|
||||||
|
|
||||||
For example, this can happen when storing a `&mut` inside an immutable `Box`:
|
|
||||||
|
|
||||||
```compile_fail,E0386
|
|
||||||
let mut x: i64 = 1;
|
|
||||||
let y: Box<_> = Box::new(&mut x);
|
|
||||||
**y = 2; // error, cannot assign to data in an immutable container
|
|
||||||
```
|
|
||||||
|
|
||||||
This error can be fixed by making the container mutable:
|
|
||||||
|
|
||||||
```
|
|
||||||
let mut x: i64 = 1;
|
|
||||||
let mut y: Box<_> = Box::new(&mut x);
|
|
||||||
**y = 2;
|
|
||||||
```
|
|
||||||
|
|
||||||
It can also be fixed by using a type with interior mutability, such as `Cell`
|
|
||||||
or `RefCell`:
|
|
||||||
|
|
||||||
```
|
|
||||||
use std::cell::Cell;
|
|
||||||
|
|
||||||
let x: i64 = 1;
|
|
||||||
let y: Box<Cell<_>> = Box::new(Cell::new(x));
|
|
||||||
y.set(2);
|
|
||||||
```
|
|
||||||
"##,*/
|
|
||||||
|
|
||||||
E0387: r##"
|
|
||||||
This error occurs when an attempt is made to mutate or mutably reference data
|
|
||||||
that a closure has captured immutably. Examples of this error are shown below:
|
|
||||||
|
|
||||||
```compile_fail,E0387
|
|
||||||
// Accepts a function or a closure that captures its environment immutably.
|
|
||||||
// Closures passed to foo will not be able to mutate their closed-over state.
|
|
||||||
fn foo<F: Fn()>(f: F) { }
|
|
||||||
|
|
||||||
// Attempts to mutate closed-over data. Error message reads:
|
|
||||||
// `cannot assign to data in a captured outer variable...`
|
|
||||||
fn mutable() {
|
|
||||||
let mut x = 0u32;
|
|
||||||
foo(|| x = 2);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Attempts to take a mutable reference to closed-over data. Error message
|
|
||||||
// reads: `cannot borrow data mutably in a captured outer variable...`
|
|
||||||
fn mut_addr() {
|
|
||||||
let mut x = 0u32;
|
|
||||||
foo(|| { let y = &mut x; });
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
The problem here is that foo is defined as accepting a parameter of type `Fn`.
|
|
||||||
Closures passed into foo will thus be inferred to be of type `Fn`, meaning that
|
|
||||||
they capture their context immutably.
|
|
||||||
|
|
||||||
If the definition of `foo` is under your control, the simplest solution is to
|
|
||||||
capture the data mutably. This can be done by defining `foo` to take FnMut
|
|
||||||
rather than Fn:
|
|
||||||
|
|
||||||
```
|
|
||||||
fn foo<F: FnMut()>(f: F) { }
|
|
||||||
```
|
|
||||||
|
|
||||||
Alternatively, we can consider using the `Cell` and `RefCell` types to achieve
|
|
||||||
interior mutability through a shared reference. Our example's `mutable`
|
|
||||||
function could be redefined as below:
|
|
||||||
|
|
||||||
```
|
|
||||||
use std::cell::Cell;
|
|
||||||
|
|
||||||
fn foo<F: Fn()>(f: F) { }
|
|
||||||
|
|
||||||
fn mutable() {
|
|
||||||
let x = Cell::new(0u32);
|
|
||||||
foo(|| x.set(2));
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
You can read more about cell types in the API documentation:
|
|
||||||
|
|
||||||
https://doc.rust-lang.org/std/cell/
|
|
||||||
"##,
|
|
||||||
|
|
||||||
E0388: r##"
|
|
||||||
E0388 was removed and is no longer issued.
|
|
||||||
"##,
|
|
||||||
|
|
||||||
E0389: r##"
|
|
||||||
An attempt was made to mutate data using a non-mutable reference. This
|
|
||||||
commonly occurs when attempting to assign to a non-mutable reference of a
|
|
||||||
mutable reference (`&(&mut T)`).
|
|
||||||
|
|
||||||
Example of erroneous code:
|
|
||||||
|
|
||||||
```compile_fail,E0389
|
|
||||||
struct FancyNum {
|
|
||||||
num: u8,
|
|
||||||
}
|
|
||||||
|
|
||||||
fn main() {
|
|
||||||
let mut fancy = FancyNum{ num: 5 };
|
|
||||||
let fancy_ref = &(&mut fancy);
|
|
||||||
fancy_ref.num = 6; // error: cannot assign to data in a `&` reference
|
|
||||||
println!("{}", fancy_ref.num);
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Here, `&mut fancy` is mutable, but `&(&mut fancy)` is not. Creating an
|
|
||||||
immutable reference to a value borrows it immutably. There can be multiple
|
|
||||||
references of type `&(&mut T)` that point to the same value, so they must be
|
|
||||||
immutable to prevent multiple mutable references to the same value.
|
|
||||||
|
|
||||||
To fix this, either remove the outer reference:
|
|
||||||
|
|
||||||
```
|
|
||||||
struct FancyNum {
|
|
||||||
num: u8,
|
|
||||||
}
|
|
||||||
|
|
||||||
fn main() {
|
|
||||||
let mut fancy = FancyNum{ num: 5 };
|
|
||||||
|
|
||||||
let fancy_ref = &mut fancy;
|
|
||||||
// `fancy_ref` is now &mut FancyNum, rather than &(&mut FancyNum)
|
|
||||||
|
|
||||||
fancy_ref.num = 6; // No error!
|
|
||||||
|
|
||||||
println!("{}", fancy_ref.num);
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Or make the outer reference mutable:
|
|
||||||
|
|
||||||
```
|
|
||||||
struct FancyNum {
|
|
||||||
num: u8
|
|
||||||
}
|
|
||||||
|
|
||||||
fn main() {
|
|
||||||
let mut fancy = FancyNum{ num: 5 };
|
|
||||||
|
|
||||||
let fancy_ref = &mut (&mut fancy);
|
|
||||||
// `fancy_ref` is now &mut(&mut FancyNum), rather than &(&mut FancyNum)
|
|
||||||
|
|
||||||
fancy_ref.num = 6; // No error!
|
|
||||||
|
|
||||||
println!("{}", fancy_ref.num);
|
|
||||||
}
|
|
||||||
```
|
|
||||||
"##,
|
|
||||||
|
|
||||||
E0595: r##"
|
|
||||||
Closures cannot mutate immutable captured variables.
|
|
||||||
|
|
||||||
Erroneous code example:
|
|
||||||
|
|
||||||
```compile_fail,E0595
|
|
||||||
let x = 3; // error: closure cannot assign to immutable local variable `x`
|
|
||||||
let mut c = || { x += 1 };
|
|
||||||
```
|
|
||||||
|
|
||||||
Make the variable binding mutable:
|
|
||||||
|
|
||||||
```
|
|
||||||
let mut x = 3; // ok!
|
|
||||||
let mut c = || { x += 1 };
|
|
||||||
```
|
|
||||||
"##,
|
|
||||||
|
|
||||||
E0596: r##"
|
|
||||||
This error occurs because you tried to mutably borrow a non-mutable variable.
|
|
||||||
|
|
||||||
Example of erroneous code:
|
|
||||||
|
|
||||||
```compile_fail,E0596
|
|
||||||
let x = 1;
|
|
||||||
let y = &mut x; // error: cannot borrow mutably
|
|
||||||
```
|
|
||||||
|
|
||||||
In here, `x` isn't mutable, so when we try to mutably borrow it in `y`, it
|
|
||||||
fails. To fix this error, you need to make `x` mutable:
|
|
||||||
|
|
||||||
```
|
|
||||||
let mut x = 1;
|
|
||||||
let y = &mut x; // ok!
|
|
||||||
```
|
|
||||||
"##,
|
|
||||||
|
|
||||||
E0597: r##"
|
|
||||||
This error occurs because a borrow was made inside a variable which has a
|
|
||||||
greater lifetime than the borrowed one.
|
|
||||||
|
|
||||||
Example of erroneous code:
|
|
||||||
|
|
||||||
```compile_fail,E0597
|
|
||||||
struct Foo<'a> {
|
|
||||||
x: Option<&'a u32>,
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut x = Foo { x: None };
|
|
||||||
let y = 0;
|
|
||||||
x.x = Some(&y); // error: `y` does not live long enough
|
|
||||||
```
|
|
||||||
|
|
||||||
In here, `x` is created before `y` and therefore has a greater lifetime. Always
|
|
||||||
keep in mind that values in a scope are dropped in the opposite order they are
|
|
||||||
created. So to fix the previous example, just make the `y` lifetime greater than
|
|
||||||
the `x`'s one:
|
|
||||||
|
|
||||||
```
|
|
||||||
struct Foo<'a> {
|
|
||||||
x: Option<&'a u32>,
|
|
||||||
}
|
|
||||||
|
|
||||||
let y = 0;
|
|
||||||
let mut x = Foo { x: None };
|
|
||||||
x.x = Some(&y);
|
|
||||||
```
|
|
||||||
"##,
|
|
||||||
|
|
||||||
E0626: r##"
|
|
||||||
This error occurs because a borrow in a generator persists across a
|
|
||||||
yield point.
|
|
||||||
|
|
||||||
```compile_fail,E0626
|
|
||||||
# #![feature(generators, generator_trait)]
|
|
||||||
# use std::ops::Generator;
|
|
||||||
let mut b = || {
|
|
||||||
let a = &String::new(); // <-- This borrow...
|
|
||||||
yield (); // ...is still in scope here, when the yield occurs.
|
|
||||||
println!("{}", a);
|
|
||||||
};
|
|
||||||
b.resume();
|
|
||||||
```
|
|
||||||
|
|
||||||
At present, it is not permitted to have a yield that occurs while a
|
|
||||||
borrow is still in scope. To resolve this error, the borrow must
|
|
||||||
either be "contained" to a smaller scope that does not overlap the
|
|
||||||
yield or else eliminated in another way. So, for example, we might
|
|
||||||
resolve the previous example by removing the borrow and just storing
|
|
||||||
the integer by value:
|
|
||||||
|
|
||||||
```
|
|
||||||
# #![feature(generators, generator_trait)]
|
|
||||||
# use std::ops::Generator;
|
|
||||||
let mut b = || {
|
|
||||||
let a = 3;
|
|
||||||
yield ();
|
|
||||||
println!("{}", a);
|
|
||||||
};
|
|
||||||
b.resume();
|
|
||||||
```
|
|
||||||
|
|
||||||
This is a very simple case, of course. In more complex cases, we may
|
|
||||||
wish to have more than one reference to the value that was borrowed --
|
|
||||||
in those cases, something like the `Rc` or `Arc` types may be useful.
|
|
||||||
|
|
||||||
This error also frequently arises with iteration:
|
|
||||||
|
|
||||||
```compile_fail,E0626
|
|
||||||
# #![feature(generators, generator_trait)]
|
|
||||||
# use std::ops::Generator;
|
|
||||||
let mut b = || {
|
|
||||||
let v = vec![1,2,3];
|
|
||||||
for &x in &v { // <-- borrow of `v` is still in scope...
|
|
||||||
yield x; // ...when this yield occurs.
|
|
||||||
}
|
|
||||||
};
|
|
||||||
b.resume();
|
|
||||||
```
|
|
||||||
|
|
||||||
Such cases can sometimes be resolved by iterating "by value" (or using
|
|
||||||
`into_iter()`) to avoid borrowing:
|
|
||||||
|
|
||||||
```
|
|
||||||
# #![feature(generators, generator_trait)]
|
|
||||||
# use std::ops::Generator;
|
|
||||||
let mut b = || {
|
|
||||||
let v = vec![1,2,3];
|
|
||||||
for x in v { // <-- Take ownership of the values instead!
|
|
||||||
yield x; // <-- Now yield is OK.
|
|
||||||
}
|
|
||||||
};
|
|
||||||
b.resume();
|
|
||||||
```
|
|
||||||
|
|
||||||
If taking ownership is not an option, using indices can work too:
|
|
||||||
|
|
||||||
```
|
|
||||||
# #![feature(generators, generator_trait)]
|
|
||||||
# use std::ops::Generator;
|
|
||||||
let mut b = || {
|
|
||||||
let v = vec![1,2,3];
|
|
||||||
let len = v.len(); // (*)
|
|
||||||
for i in 0..len {
|
|
||||||
let x = v[i]; // (*)
|
|
||||||
yield x; // <-- Now yield is OK.
|
|
||||||
}
|
|
||||||
};
|
|
||||||
b.resume();
|
|
||||||
|
|
||||||
// (*) -- Unfortunately, these temporaries are currently required.
|
|
||||||
// See <https://github.com/rust-lang/rust/issues/43122>.
|
|
||||||
```
|
|
||||||
"##,
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
register_diagnostics! {
|
|
||||||
// E0385, // {} in an aliasable location
|
|
||||||
E0598, // lifetime of {} is too short to guarantee its contents can be...
|
|
||||||
}
|
|
||||||
|
|
|
@ -16,12 +16,12 @@
|
||||||
#![allow(non_camel_case_types)]
|
#![allow(non_camel_case_types)]
|
||||||
|
|
||||||
#![feature(quote)]
|
#![feature(quote)]
|
||||||
#![feature(rustc_diagnostic_macros)]
|
|
||||||
|
|
||||||
#[macro_use] extern crate log;
|
#[macro_use] extern crate log;
|
||||||
#[macro_use] extern crate syntax;
|
extern crate syntax;
|
||||||
extern crate syntax_pos;
|
extern crate syntax_pos;
|
||||||
extern crate rustc_errors as errors;
|
extern crate rustc_errors as errors;
|
||||||
|
extern crate rustc_back;
|
||||||
|
|
||||||
// for "clarity", rename the graphviz crate to dot; graphviz within `borrowck`
|
// for "clarity", rename the graphviz crate to dot; graphviz within `borrowck`
|
||||||
// refers to the borrowck-specific graphviz adapter traits.
|
// refers to the borrowck-specific graphviz adapter traits.
|
||||||
|
@ -33,14 +33,8 @@ extern crate rustc_mir;
|
||||||
pub use borrowck::check_crate;
|
pub use borrowck::check_crate;
|
||||||
pub use borrowck::build_borrowck_dataflow_data_for_fn;
|
pub use borrowck::build_borrowck_dataflow_data_for_fn;
|
||||||
|
|
||||||
// NB: This module needs to be declared first so diagnostics are
|
|
||||||
// registered before they are used.
|
|
||||||
mod diagnostics;
|
|
||||||
|
|
||||||
mod borrowck;
|
mod borrowck;
|
||||||
|
|
||||||
pub mod graphviz;
|
pub mod graphviz;
|
||||||
|
|
||||||
pub use borrowck::provide;
|
pub use borrowck::provide;
|
||||||
|
|
||||||
__build_diagnostic_array! { librustc_borrowck, DIAGNOSTICS }
|
|
||||||
|
|
|
@ -31,7 +31,7 @@
|
||||||
//! be indexed by the direction (see the type `Direction`).
|
//! be indexed by the direction (see the type `Direction`).
|
||||||
|
|
||||||
use bitvec::BitVector;
|
use bitvec::BitVector;
|
||||||
use std::fmt::{Formatter, Error, Debug};
|
use std::fmt::Debug;
|
||||||
use std::usize;
|
use std::usize;
|
||||||
use snapshot_vec::{SnapshotVec, SnapshotVecDelegate};
|
use snapshot_vec::{SnapshotVec, SnapshotVecDelegate};
|
||||||
|
|
||||||
|
@ -48,6 +48,7 @@ pub struct Node<N> {
|
||||||
pub data: N,
|
pub data: N,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
pub struct Edge<E> {
|
pub struct Edge<E> {
|
||||||
next_edge: [EdgeIndex; 2], // see module comment
|
next_edge: [EdgeIndex; 2], // see module comment
|
||||||
source: NodeIndex,
|
source: NodeIndex,
|
||||||
|
@ -69,18 +70,6 @@ impl<N> SnapshotVecDelegate for Edge<N> {
|
||||||
fn reverse(_: &mut Vec<Edge<N>>, _: ()) {}
|
fn reverse(_: &mut Vec<Edge<N>>, _: ()) {}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<E: Debug> Debug for Edge<E> {
|
|
||||||
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
|
|
||||||
write!(f,
|
|
||||||
"Edge {{ next_edge: [{:?}, {:?}], source: {:?}, target: {:?}, data: {:?} }}",
|
|
||||||
self.next_edge[0],
|
|
||||||
self.next_edge[1],
|
|
||||||
self.source,
|
|
||||||
self.target,
|
|
||||||
self.data)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)]
|
#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)]
|
||||||
pub struct NodeIndex(pub usize);
|
pub struct NodeIndex(pub usize);
|
||||||
|
|
||||||
|
|
|
@ -40,39 +40,80 @@ impl Idx for u32 {
|
||||||
|
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! newtype_index {
|
macro_rules! newtype_index {
|
||||||
|
// ---- public rules ----
|
||||||
|
|
||||||
|
// Use default constants
|
||||||
($name:ident) => (
|
($name:ident) => (
|
||||||
newtype_index!($name, unsafe { ::std::intrinsics::type_name::<$name>() });
|
newtype_index!(
|
||||||
|
@type[$name]
|
||||||
|
@max[::std::u32::MAX]
|
||||||
|
@debug_name[unsafe {::std::intrinsics::type_name::<$name>() }]);
|
||||||
);
|
);
|
||||||
|
|
||||||
($name:ident, $debug_name:expr) => (
|
// Define any constants
|
||||||
|
($name:ident { $($tokens:tt)+ }) => (
|
||||||
|
newtype_index!(
|
||||||
|
@type[$name]
|
||||||
|
@max[::std::u32::MAX]
|
||||||
|
@debug_name[unsafe {::std::intrinsics::type_name::<$name>() }]
|
||||||
|
$($tokens)+);
|
||||||
|
);
|
||||||
|
|
||||||
|
// ---- private rules ----
|
||||||
|
|
||||||
|
// Base case, user-defined constants (if any) have already been defined
|
||||||
|
(@type[$type:ident] @max[$max:expr] @debug_name[$debug_name:expr]) => (
|
||||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord,
|
#[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord,
|
||||||
RustcEncodable, RustcDecodable)]
|
RustcEncodable, RustcDecodable)]
|
||||||
pub struct $name(u32);
|
pub struct $type(u32);
|
||||||
|
|
||||||
impl $name {
|
impl Idx for $type {
|
||||||
// HACK use for constants
|
|
||||||
#[allow(unused)]
|
|
||||||
const fn const_new(x: u32) -> Self {
|
|
||||||
$name(x)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Idx for $name {
|
|
||||||
fn new(value: usize) -> Self {
|
fn new(value: usize) -> Self {
|
||||||
assert!(value < (::std::u32::MAX) as usize);
|
assert!(value < ($max) as usize);
|
||||||
$name(value as u32)
|
$type(value as u32)
|
||||||
}
|
}
|
||||||
fn index(self) -> usize {
|
fn index(self) -> usize {
|
||||||
self.0 as usize
|
self.0 as usize
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ::std::fmt::Debug for $name {
|
impl ::std::fmt::Debug for $type {
|
||||||
fn fmt(&self, fmt: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
|
fn fmt(&self, fmt: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
|
||||||
write!(fmt, "{}{}", $debug_name, self.0)
|
write!(fmt, "{}{}", $debug_name, self.0)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
)
|
);
|
||||||
|
|
||||||
|
// Rewrite final without comma to one that includes comma
|
||||||
|
(@type[$type:ident] @max[$max:expr] @debug_name[$debug_name:expr]
|
||||||
|
$name:ident = $constant:expr) => (
|
||||||
|
newtype_index!(@type[$type] @max[$max] @debug_name[$debug_name] $name = $constant,);
|
||||||
|
);
|
||||||
|
|
||||||
|
// Rewrite final const without comma to one that includes comma
|
||||||
|
(@type[$type:ident] @max[$_max:expr] @debug_name[$debug_name:expr]
|
||||||
|
const $name:ident = $constant:expr) => (
|
||||||
|
newtype_index!(@type[$type] @max[$max] @debug_name[$debug_name] const $name = $constant,);
|
||||||
|
);
|
||||||
|
|
||||||
|
// Replace existing default for max
|
||||||
|
(@type[$type:ident] @max[$_max:expr] @debug_name[$debug_name:expr]
|
||||||
|
MAX = $max:expr, $($tokens:tt)*) => (
|
||||||
|
newtype_index!(@type[$type] @max[$max] @debug_name[$debug_name] $(tokens)*);
|
||||||
|
);
|
||||||
|
|
||||||
|
// Replace existing default for debug_name
|
||||||
|
(@type[$type:ident] @max[$max:expr] @debug_name[$_debug_name:expr]
|
||||||
|
DEBUG_NAME = $debug_name:expr, $($tokens:tt)*) => (
|
||||||
|
newtype_index!(@type[$type] @max[$max] @debug_name[$debug_name] $($tokens)*);
|
||||||
|
);
|
||||||
|
|
||||||
|
// Assign a user-defined constant (as final param)
|
||||||
|
(@type[$type:ident] @max[$max:expr] @debug_name[$debug_name:expr]
|
||||||
|
const $name:ident = $constant:expr, $($tokens:tt)*) => (
|
||||||
|
pub const $name: $type = $type($constant);
|
||||||
|
newtype_index!(@type[$type] @max[$max] @debug_name[$debug_name] $($tokens)*);
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq)]
|
#[derive(Clone, PartialEq, Eq)]
|
||||||
|
|
|
@ -1238,7 +1238,7 @@ pub fn monitor<F: FnOnce() + Send + 'static>(f: F) {
|
||||||
errors::Level::Note);
|
errors::Level::Note);
|
||||||
}
|
}
|
||||||
|
|
||||||
writeln!(io::stderr(), "{}", str::from_utf8(&data.lock().unwrap()).unwrap()).unwrap();
|
eprintln!("{}", str::from_utf8(&data.lock().unwrap()).unwrap());
|
||||||
}
|
}
|
||||||
|
|
||||||
exit_on_err();
|
exit_on_err();
|
||||||
|
@ -1259,7 +1259,6 @@ pub fn diagnostics_registry() -> errors::registry::Registry {
|
||||||
let mut all_errors = Vec::new();
|
let mut all_errors = Vec::new();
|
||||||
all_errors.extend_from_slice(&rustc::DIAGNOSTICS);
|
all_errors.extend_from_slice(&rustc::DIAGNOSTICS);
|
||||||
all_errors.extend_from_slice(&rustc_typeck::DIAGNOSTICS);
|
all_errors.extend_from_slice(&rustc_typeck::DIAGNOSTICS);
|
||||||
all_errors.extend_from_slice(&rustc_borrowck::DIAGNOSTICS);
|
|
||||||
all_errors.extend_from_slice(&rustc_resolve::DIAGNOSTICS);
|
all_errors.extend_from_slice(&rustc_resolve::DIAGNOSTICS);
|
||||||
all_errors.extend_from_slice(&rustc_privacy::DIAGNOSTICS);
|
all_errors.extend_from_slice(&rustc_privacy::DIAGNOSTICS);
|
||||||
#[cfg(feature="llvm")]
|
#[cfg(feature="llvm")]
|
||||||
|
|
|
@ -13,12 +13,12 @@
|
||||||
//! we will compare the fingerprint from the current and from the previous
|
//! we will compare the fingerprint from the current and from the previous
|
||||||
//! compilation session as appropriate:
|
//! compilation session as appropriate:
|
||||||
//!
|
//!
|
||||||
//! - `#[rustc_dirty(label="TypeckTables", cfg="rev2")]` if we are
|
//! - `#[rustc_clean(cfg="rev2", except="TypeckTables")]` if we are
|
||||||
//! in `#[cfg(rev2)]`, then the fingerprints associated with
|
//! in `#[cfg(rev2)]`, then the fingerprints associated with
|
||||||
//! `DepNode::TypeckTables(X)` must be DIFFERENT (`X` is the def-id of the
|
//! `DepNode::TypeckTables(X)` must be DIFFERENT (`X` is the def-id of the
|
||||||
//! current node).
|
//! current node).
|
||||||
//! - `#[rustc_clean(label="TypeckTables", cfg="rev2")]` same as above,
|
//! - `#[rustc_clean(cfg="rev2")]` same as above, except that the
|
||||||
//! except that the fingerprints must be the SAME.
|
//! fingerprints must be the SAME (along with all other fingerprints).
|
||||||
//!
|
//!
|
||||||
//! Errors are reported if we are in the suitable configuration but
|
//! Errors are reported if we are in the suitable configuration but
|
||||||
//! the required condition is not met.
|
//! the required condition is not met.
|
||||||
|
@ -40,9 +40,12 @@
|
||||||
//!
|
//!
|
||||||
|
|
||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
|
use std::iter::FromIterator;
|
||||||
use std::vec::Vec;
|
use std::vec::Vec;
|
||||||
use rustc::dep_graph::DepNode;
|
use rustc::dep_graph::{DepNode, label_strs};
|
||||||
use rustc::hir;
|
use rustc::hir;
|
||||||
|
use rustc::hir::{Item_ as HirItem, ImplItemKind, TraitItemKind};
|
||||||
|
use rustc::hir::map::Node as HirNode;
|
||||||
use rustc::hir::def_id::DefId;
|
use rustc::hir::def_id::DefId;
|
||||||
use rustc::hir::itemlikevisit::ItemLikeVisitor;
|
use rustc::hir::itemlikevisit::ItemLikeVisitor;
|
||||||
use rustc::hir::intravisit;
|
use rustc::hir::intravisit;
|
||||||
|
@ -53,11 +56,183 @@ use rustc_data_structures::fx::{FxHashSet, FxHashMap};
|
||||||
use syntax_pos::Span;
|
use syntax_pos::Span;
|
||||||
use rustc::ty::TyCtxt;
|
use rustc::ty::TyCtxt;
|
||||||
|
|
||||||
const LABEL: &'static str = "label";
|
const EXCEPT: &str = "except";
|
||||||
const CFG: &'static str = "cfg";
|
const LABEL: &str = "label";
|
||||||
|
const CFG: &str = "cfg";
|
||||||
|
|
||||||
|
// Base and Extra labels to build up the labels
|
||||||
|
|
||||||
|
/// For typedef, constants, and statics
|
||||||
|
const BASE_CONST: &[&str] = &[
|
||||||
|
label_strs::TypeOfItem,
|
||||||
|
];
|
||||||
|
|
||||||
|
/// DepNodes for functions + methods
|
||||||
|
const BASE_FN: &[&str] = &[
|
||||||
|
// Callers will depend on the signature of these items, so we better test
|
||||||
|
label_strs::FnSignature,
|
||||||
|
label_strs::GenericsOfItem,
|
||||||
|
label_strs::PredicatesOfItem,
|
||||||
|
label_strs::TypeOfItem,
|
||||||
|
|
||||||
|
// And a big part of compilation (that we eventually want to cache) is type inference
|
||||||
|
// information:
|
||||||
|
label_strs::TypeckTables,
|
||||||
|
];
|
||||||
|
|
||||||
|
/// DepNodes for Hir, which is pretty much everything
|
||||||
|
const BASE_HIR: &[&str] = &[
|
||||||
|
// Hir and HirBody should be computed for all nodes
|
||||||
|
label_strs::Hir,
|
||||||
|
label_strs::HirBody,
|
||||||
|
];
|
||||||
|
|
||||||
|
/// `impl` implementation of struct/trait
|
||||||
|
const BASE_IMPL: &[&str] = &[
|
||||||
|
label_strs::AssociatedItemDefIds,
|
||||||
|
label_strs::GenericsOfItem,
|
||||||
|
label_strs::ImplTraitRef,
|
||||||
|
];
|
||||||
|
|
||||||
|
/// DepNodes for MirValidated/Optimized, which is relevant in "executable"
|
||||||
|
/// code, i.e. functions+methods
|
||||||
|
const BASE_MIR: &[&str] = &[
|
||||||
|
label_strs::MirOptimized,
|
||||||
|
label_strs::MirValidated,
|
||||||
|
];
|
||||||
|
|
||||||
|
/// Struct, Enum and Union DepNodes
|
||||||
|
///
|
||||||
|
/// Note that changing the type of a field does not change the type of the struct or enum, but
|
||||||
|
/// adding/removing fields or changing a fields name or visibility does.
|
||||||
|
const BASE_STRUCT: &[&str] = &[
|
||||||
|
label_strs::GenericsOfItem,
|
||||||
|
label_strs::PredicatesOfItem,
|
||||||
|
label_strs::TypeOfItem,
|
||||||
|
];
|
||||||
|
|
||||||
|
/// Trait Definition DepNodes
|
||||||
|
const BASE_TRAIT_DEF: &[&str] = &[
|
||||||
|
label_strs::AssociatedItemDefIds,
|
||||||
|
label_strs::GenericsOfItem,
|
||||||
|
label_strs::ObjectSafety,
|
||||||
|
label_strs::PredicatesOfItem,
|
||||||
|
label_strs::SpecializationGraph,
|
||||||
|
label_strs::TraitDefOfItem,
|
||||||
|
label_strs::TraitImpls,
|
||||||
|
];
|
||||||
|
|
||||||
|
/// extra DepNodes for methods (+fn)
|
||||||
|
const EXTRA_ASSOCIATED: &[&str] = &[
|
||||||
|
label_strs::AssociatedItems,
|
||||||
|
];
|
||||||
|
|
||||||
|
const EXTRA_TRAIT: &[&str] = &[
|
||||||
|
label_strs::TraitOfItem,
|
||||||
|
];
|
||||||
|
|
||||||
|
// Fully Built Labels
|
||||||
|
|
||||||
|
const LABELS_CONST: &[&[&str]] = &[
|
||||||
|
BASE_HIR,
|
||||||
|
BASE_CONST,
|
||||||
|
];
|
||||||
|
|
||||||
|
/// Constant/Typedef in an impl
|
||||||
|
const LABELS_CONST_IN_IMPL: &[&[&str]] = &[
|
||||||
|
BASE_HIR,
|
||||||
|
BASE_CONST,
|
||||||
|
EXTRA_ASSOCIATED,
|
||||||
|
];
|
||||||
|
|
||||||
|
/// Trait-Const/Typedef DepNodes
|
||||||
|
const LABELS_CONST_IN_TRAIT: &[&[&str]] = &[
|
||||||
|
BASE_HIR,
|
||||||
|
BASE_CONST,
|
||||||
|
EXTRA_ASSOCIATED,
|
||||||
|
EXTRA_TRAIT,
|
||||||
|
];
|
||||||
|
|
||||||
|
/// Function DepNode
|
||||||
|
const LABELS_FN: &[&[&str]] = &[
|
||||||
|
BASE_HIR,
|
||||||
|
BASE_MIR,
|
||||||
|
BASE_FN,
|
||||||
|
];
|
||||||
|
|
||||||
|
/// Method DepNodes
|
||||||
|
const LABELS_FN_IN_IMPL: &[&[&str]] = &[
|
||||||
|
BASE_HIR,
|
||||||
|
BASE_MIR,
|
||||||
|
BASE_FN,
|
||||||
|
EXTRA_ASSOCIATED,
|
||||||
|
];
|
||||||
|
|
||||||
|
/// Trait-Method DepNodes
|
||||||
|
const LABELS_FN_IN_TRAIT: &[&[&str]] = &[
|
||||||
|
BASE_HIR,
|
||||||
|
BASE_MIR,
|
||||||
|
BASE_FN,
|
||||||
|
EXTRA_ASSOCIATED,
|
||||||
|
EXTRA_TRAIT,
|
||||||
|
];
|
||||||
|
|
||||||
|
/// For generic cases like inline-assemply/mod/etc
|
||||||
|
const LABELS_HIR_ONLY: &[&[&str]] = &[
|
||||||
|
BASE_HIR,
|
||||||
|
];
|
||||||
|
|
||||||
|
/// Impl DepNodes
|
||||||
|
const LABELS_IMPL: &[&[&str]] = &[
|
||||||
|
BASE_HIR,
|
||||||
|
BASE_IMPL,
|
||||||
|
];
|
||||||
|
|
||||||
|
/// Abstract Data Type (Struct, Enum, Unions) DepNodes
|
||||||
|
const LABELS_ADT: &[&[&str]] = &[
|
||||||
|
BASE_HIR,
|
||||||
|
BASE_STRUCT,
|
||||||
|
];
|
||||||
|
|
||||||
|
/// Trait Definition DepNodes
|
||||||
|
#[allow(dead_code)]
|
||||||
|
const LABELS_TRAIT: &[&[&str]] = &[
|
||||||
|
BASE_HIR,
|
||||||
|
BASE_TRAIT_DEF,
|
||||||
|
];
|
||||||
|
|
||||||
|
|
||||||
|
// FIXME: Struct/Enum/Unions Fields (there is currently no way to attach these)
|
||||||
|
//
|
||||||
|
// Fields are kind of separate from their containers, as they can change independently from
|
||||||
|
// them. We should at least check
|
||||||
|
//
|
||||||
|
// TypeOfItem for these.
|
||||||
|
|
||||||
type Labels = HashSet<String>;
|
type Labels = HashSet<String>;
|
||||||
|
|
||||||
|
/// Represents the requested configuration by rustc_clean/dirty
|
||||||
|
struct Assertion {
|
||||||
|
clean: Labels,
|
||||||
|
dirty: Labels,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Assertion {
|
||||||
|
fn from_clean_labels(labels: Labels) -> Assertion {
|
||||||
|
Assertion {
|
||||||
|
clean: labels,
|
||||||
|
dirty: Labels::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn from_dirty_labels(labels: Labels) -> Assertion {
|
||||||
|
Assertion {
|
||||||
|
clean: Labels::new(),
|
||||||
|
dirty: labels,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn check_dirty_clean_annotations<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
|
pub fn check_dirty_clean_annotations<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
|
||||||
// can't add `#[rustc_dirty]` etc without opting in to this feature
|
// can't add `#[rustc_dirty]` etc without opting in to this feature
|
||||||
if !tcx.sess.features.borrow().rustc_attrs {
|
if !tcx.sess.features.borrow().rustc_attrs {
|
||||||
|
@ -91,14 +266,189 @@ pub struct DirtyCleanVisitor<'a, 'tcx:'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 'tcx> DirtyCleanVisitor<'a, 'tcx> {
|
impl<'a, 'tcx> DirtyCleanVisitor<'a, 'tcx> {
|
||||||
fn labels(&self, attr: &Attribute) -> Labels {
|
|
||||||
|
/// Possibly "deserialize" the attribute into a clean/dirty assertion
|
||||||
|
fn assertion_maybe(&mut self, item_id: ast::NodeId, attr: &Attribute)
|
||||||
|
-> Option<Assertion>
|
||||||
|
{
|
||||||
|
let is_clean = if attr.check_name(ATTR_DIRTY) {
|
||||||
|
false
|
||||||
|
} else if attr.check_name(ATTR_CLEAN) {
|
||||||
|
true
|
||||||
|
} else {
|
||||||
|
// skip: not rustc_clean/dirty
|
||||||
|
return None
|
||||||
|
};
|
||||||
|
if !check_config(self.tcx, attr) {
|
||||||
|
// skip: not the correct `cfg=`
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
let assertion = if let Some(labels) = self.labels(attr) {
|
||||||
|
if is_clean {
|
||||||
|
Assertion::from_clean_labels(labels)
|
||||||
|
} else {
|
||||||
|
Assertion::from_dirty_labels(labels)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
self.assertion_auto(item_id, attr, is_clean)
|
||||||
|
};
|
||||||
|
Some(assertion)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the "auto" assertion on pre-validated attr, along with the `except` labels
|
||||||
|
fn assertion_auto(&mut self, item_id: ast::NodeId, attr: &Attribute, is_clean: bool)
|
||||||
|
-> Assertion
|
||||||
|
{
|
||||||
|
let (name, mut auto) = self.auto_labels(item_id, attr);
|
||||||
|
let except = self.except(attr);
|
||||||
|
for e in except.iter() {
|
||||||
|
if !auto.remove(e) {
|
||||||
|
let msg = format!(
|
||||||
|
"`except` specified DepNodes that can not be affected for \"{}\": \"{}\"",
|
||||||
|
name,
|
||||||
|
e
|
||||||
|
);
|
||||||
|
self.tcx.sess.span_fatal(attr.span, &msg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if is_clean {
|
||||||
|
Assertion {
|
||||||
|
clean: auto,
|
||||||
|
dirty: except,
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Assertion {
|
||||||
|
clean: except,
|
||||||
|
dirty: auto,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn labels(&self, attr: &Attribute) -> Option<Labels> {
|
||||||
for item in attr.meta_item_list().unwrap_or_else(Vec::new) {
|
for item in attr.meta_item_list().unwrap_or_else(Vec::new) {
|
||||||
if item.check_name(LABEL) {
|
if item.check_name(LABEL) {
|
||||||
|
let value = expect_associated_value(self.tcx, &item);
|
||||||
|
return Some(self.resolve_labels(&item, value.as_str().as_ref()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
/// `except=` attribute value
|
||||||
|
fn except(&self, attr: &Attribute) -> Labels {
|
||||||
|
for item in attr.meta_item_list().unwrap_or_else(Vec::new) {
|
||||||
|
if item.check_name(EXCEPT) {
|
||||||
let value = expect_associated_value(self.tcx, &item);
|
let value = expect_associated_value(self.tcx, &item);
|
||||||
return self.resolve_labels(&item, value.as_str().as_ref());
|
return self.resolve_labels(&item, value.as_str().as_ref());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
self.tcx.sess.span_fatal(attr.span, "no `label` found");
|
// if no `label` or `except` is given, only the node's group are asserted
|
||||||
|
Labels::new()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Return all DepNode labels that should be asserted for this item.
|
||||||
|
/// index=0 is the "name" used for error messages
|
||||||
|
fn auto_labels(&mut self, item_id: ast::NodeId, attr: &Attribute) -> (&'static str, Labels) {
|
||||||
|
let node = self.tcx.hir.get(item_id);
|
||||||
|
let (name, labels) = match node {
|
||||||
|
HirNode::NodeItem(item) => {
|
||||||
|
match item.node {
|
||||||
|
// note: these are in the same order as hir::Item_;
|
||||||
|
// FIXME(michaelwoerister): do commented out ones
|
||||||
|
|
||||||
|
// // An `extern crate` item, with optional original crate name,
|
||||||
|
// HirItem::ItemExternCrate(..), // intentionally no assertions
|
||||||
|
|
||||||
|
// // `use foo::bar::*;` or `use foo::bar::baz as quux;`
|
||||||
|
// HirItem::ItemUse(..), // intentionally no assertions
|
||||||
|
|
||||||
|
// A `static` item
|
||||||
|
HirItem::ItemStatic(..) => ("ItemStatic", LABELS_CONST),
|
||||||
|
|
||||||
|
// A `const` item
|
||||||
|
HirItem::ItemConst(..) => ("ItemConst", LABELS_CONST),
|
||||||
|
|
||||||
|
// A function declaration
|
||||||
|
HirItem::ItemFn(..) => ("ItemFn", LABELS_FN),
|
||||||
|
|
||||||
|
// // A module
|
||||||
|
HirItem::ItemMod(..) =>("ItemMod", LABELS_HIR_ONLY),
|
||||||
|
|
||||||
|
// // An external module
|
||||||
|
HirItem::ItemForeignMod(..) => ("ItemForeignMod", LABELS_HIR_ONLY),
|
||||||
|
|
||||||
|
// Module-level inline assembly (from global_asm!)
|
||||||
|
HirItem::ItemGlobalAsm(..) => ("ItemGlobalAsm", LABELS_HIR_ONLY),
|
||||||
|
|
||||||
|
// A type alias, e.g. `type Foo = Bar<u8>`
|
||||||
|
HirItem::ItemTy(..) => ("ItemTy", LABELS_HIR_ONLY),
|
||||||
|
|
||||||
|
// An enum definition, e.g. `enum Foo<A, B> {C<A>, D<B>}`
|
||||||
|
HirItem::ItemEnum(..) => ("ItemEnum", LABELS_ADT),
|
||||||
|
|
||||||
|
// A struct definition, e.g. `struct Foo<A> {x: A}`
|
||||||
|
HirItem::ItemStruct(..) => ("ItemStruct", LABELS_ADT),
|
||||||
|
|
||||||
|
// A union definition, e.g. `union Foo<A, B> {x: A, y: B}`
|
||||||
|
HirItem::ItemUnion(..) => ("ItemUnion", LABELS_ADT),
|
||||||
|
|
||||||
|
// Represents a Trait Declaration
|
||||||
|
// FIXME(michaelwoerister): trait declaration is buggy because sometimes some of
|
||||||
|
// the depnodes don't exist (because they legitametely didn't need to be
|
||||||
|
// calculated)
|
||||||
|
//
|
||||||
|
// michaelwoerister and vitiral came up with a possible solution,
|
||||||
|
// to just do this before every query
|
||||||
|
// ```
|
||||||
|
// ::rustc::ty::maps::plumbing::force_from_dep_node(tcx, dep_node)
|
||||||
|
// ```
|
||||||
|
//
|
||||||
|
// However, this did not seem to work effectively and more bugs were hit.
|
||||||
|
// Nebie @vitiral gave up :)
|
||||||
|
//
|
||||||
|
//HirItem::ItemTrait(..) => ("ItemTrait", LABELS_TRAIT),
|
||||||
|
|
||||||
|
// `impl Trait for .. {}`
|
||||||
|
HirItem::ItemDefaultImpl(..) => ("ItemDefaultImpl", LABELS_IMPL),
|
||||||
|
|
||||||
|
// An implementation, eg `impl<A> Trait for Foo { .. }`
|
||||||
|
HirItem::ItemImpl(..) => ("ItemImpl", LABELS_IMPL),
|
||||||
|
|
||||||
|
_ => self.tcx.sess.span_fatal(
|
||||||
|
attr.span,
|
||||||
|
&format!(
|
||||||
|
"clean/dirty auto-assertions not yet defined for NodeItem.node={:?}",
|
||||||
|
item.node
|
||||||
|
)
|
||||||
|
),
|
||||||
|
}
|
||||||
|
},
|
||||||
|
HirNode::NodeTraitItem(item) => {
|
||||||
|
match item.node {
|
||||||
|
TraitItemKind::Method(..) => ("NodeTraitItem", LABELS_FN_IN_TRAIT),
|
||||||
|
TraitItemKind::Const(..) => ("NodeTraitConst", LABELS_CONST_IN_TRAIT),
|
||||||
|
TraitItemKind::Type(..) => ("NodeTraitType", LABELS_CONST_IN_TRAIT),
|
||||||
|
}
|
||||||
|
},
|
||||||
|
HirNode::NodeImplItem(item) => {
|
||||||
|
match item.node {
|
||||||
|
ImplItemKind::Method(..) => ("NodeImplItem", LABELS_FN_IN_IMPL),
|
||||||
|
ImplItemKind::Const(..) => ("NodeImplConst", LABELS_CONST_IN_IMPL),
|
||||||
|
ImplItemKind::Type(..) => ("NodeImplType", LABELS_CONST_IN_IMPL),
|
||||||
|
}
|
||||||
|
},
|
||||||
|
_ => self.tcx.sess.span_fatal(
|
||||||
|
attr.span,
|
||||||
|
&format!(
|
||||||
|
"clean/dirty auto-assertions not yet defined for {:?}",
|
||||||
|
node
|
||||||
|
)
|
||||||
|
),
|
||||||
|
};
|
||||||
|
let labels = Labels::from_iter(
|
||||||
|
labels.iter().flat_map(|s| s.iter().map(|l| l.to_string()))
|
||||||
|
);
|
||||||
|
(name, labels)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn resolve_labels(&self, item: &NestedMetaItem, value: &str) -> Labels {
|
fn resolve_labels(&self, item: &NestedMetaItem, value: &str) -> Labels {
|
||||||
|
@ -174,22 +524,16 @@ impl<'a, 'tcx> DirtyCleanVisitor<'a, 'tcx> {
|
||||||
fn check_item(&mut self, item_id: ast::NodeId, item_span: Span) {
|
fn check_item(&mut self, item_id: ast::NodeId, item_span: Span) {
|
||||||
let def_id = self.tcx.hir.local_def_id(item_id);
|
let def_id = self.tcx.hir.local_def_id(item_id);
|
||||||
for attr in self.tcx.get_attrs(def_id).iter() {
|
for attr in self.tcx.get_attrs(def_id).iter() {
|
||||||
if attr.check_name(ATTR_DIRTY) {
|
let assertion = match self.assertion_maybe(item_id, attr) {
|
||||||
if check_config(self.tcx, attr) {
|
Some(a) => a,
|
||||||
|
None => continue,
|
||||||
|
};
|
||||||
self.checked_attrs.insert(attr.id);
|
self.checked_attrs.insert(attr.id);
|
||||||
let labels = self.labels(attr);
|
for dep_node in self.dep_nodes(&assertion.clean, def_id) {
|
||||||
for dep_node in self.dep_nodes(&labels, def_id) {
|
|
||||||
self.assert_dirty(item_span, dep_node);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else if attr.check_name(ATTR_CLEAN) {
|
|
||||||
if check_config(self.tcx, attr) {
|
|
||||||
self.checked_attrs.insert(attr.id);
|
|
||||||
let labels = self.labels(attr);
|
|
||||||
for dep_node in self.dep_nodes(&labels, def_id) {
|
|
||||||
self.assert_clean(item_span, dep_node);
|
self.assert_clean(item_span, dep_node);
|
||||||
}
|
}
|
||||||
}
|
for dep_node in self.dep_nodes(&assertion.dirty, def_id) {
|
||||||
|
self.assert_dirty(item_span, dep_node);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -363,21 +707,42 @@ impl<'a, 'tcx, 'm> DirtyCleanMetadataVisitor<'a, 'tcx, 'm> {
|
||||||
/// Given a `#[rustc_dirty]` or `#[rustc_clean]` attribute, scan
|
/// Given a `#[rustc_dirty]` or `#[rustc_clean]` attribute, scan
|
||||||
/// for a `cfg="foo"` attribute and check whether we have a cfg
|
/// for a `cfg="foo"` attribute and check whether we have a cfg
|
||||||
/// flag called `foo`.
|
/// flag called `foo`.
|
||||||
|
///
|
||||||
|
/// Also make sure that the `label` and `except` fields do not
|
||||||
|
/// both exist.
|
||||||
fn check_config(tcx: TyCtxt, attr: &Attribute) -> bool {
|
fn check_config(tcx: TyCtxt, attr: &Attribute) -> bool {
|
||||||
debug!("check_config(attr={:?})", attr);
|
debug!("check_config(attr={:?})", attr);
|
||||||
let config = &tcx.sess.parse_sess.config;
|
let config = &tcx.sess.parse_sess.config;
|
||||||
debug!("check_config: config={:?}", config);
|
debug!("check_config: config={:?}", config);
|
||||||
|
let (mut cfg, mut except, mut label) = (None, false, false);
|
||||||
for item in attr.meta_item_list().unwrap_or_else(Vec::new) {
|
for item in attr.meta_item_list().unwrap_or_else(Vec::new) {
|
||||||
if item.check_name(CFG) {
|
if item.check_name(CFG) {
|
||||||
let value = expect_associated_value(tcx, &item);
|
let value = expect_associated_value(tcx, &item);
|
||||||
debug!("check_config: searching for cfg {:?}", value);
|
debug!("check_config: searching for cfg {:?}", value);
|
||||||
return config.contains(&(value, None));
|
cfg = Some(config.contains(&(value, None)));
|
||||||
|
}
|
||||||
|
if item.check_name(LABEL) {
|
||||||
|
label = true;
|
||||||
|
}
|
||||||
|
if item.check_name(EXCEPT) {
|
||||||
|
except = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if label && except {
|
||||||
tcx.sess.span_fatal(
|
tcx.sess.span_fatal(
|
||||||
attr.span,
|
attr.span,
|
||||||
"no cfg attribute");
|
"must specify only one of: `label`, `except`"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
match cfg {
|
||||||
|
None => tcx.sess.span_fatal(
|
||||||
|
attr.span,
|
||||||
|
"no cfg attribute"
|
||||||
|
),
|
||||||
|
Some(c) => c,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expect_associated_value(tcx: TyCtxt, item: &NestedMetaItem) -> ast::Name {
|
fn expect_associated_value(tcx: TyCtxt, item: &NestedMetaItem) -> ast::Name {
|
||||||
|
|
|
@ -117,7 +117,7 @@ fn report_format_mismatch(sess: &Session, file: &Path, message: &str) {
|
||||||
debug!("read_file: {}", message);
|
debug!("read_file: {}", message);
|
||||||
|
|
||||||
if sess.opts.debugging_opts.incremental_info {
|
if sess.opts.debugging_opts.incremental_info {
|
||||||
eprintln!("incremental: ignoring cache artifact `{}`: {}",
|
println!("[incremental] ignoring cache artifact `{}`: {}",
|
||||||
file.file_name().unwrap().to_string_lossy(),
|
file.file_name().unwrap().to_string_lossy(),
|
||||||
message);
|
message);
|
||||||
}
|
}
|
||||||
|
|
|
@ -256,11 +256,12 @@ pub fn prepare_session_directory(sess: &Session,
|
||||||
debug!("attempting to copy data from source: {}",
|
debug!("attempting to copy data from source: {}",
|
||||||
source_directory.display());
|
source_directory.display());
|
||||||
|
|
||||||
let print_file_copy_stats = sess.opts.debugging_opts.incremental_info;
|
|
||||||
|
|
||||||
// Try copying over all files from the source directory
|
// Try copying over all files from the source directory
|
||||||
if let Ok(allows_links) = copy_files(&session_dir, &source_directory,
|
if let Ok(allows_links) = copy_files(sess,
|
||||||
print_file_copy_stats) {
|
&session_dir,
|
||||||
|
&source_directory) {
|
||||||
debug!("successfully copied data from: {}",
|
debug!("successfully copied data from: {}",
|
||||||
source_directory.display());
|
source_directory.display());
|
||||||
|
|
||||||
|
@ -390,9 +391,9 @@ pub fn delete_all_session_dir_contents(sess: &Session) -> io::Result<()> {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn copy_files(target_dir: &Path,
|
fn copy_files(sess: &Session,
|
||||||
source_dir: &Path,
|
target_dir: &Path,
|
||||||
print_stats_on_success: bool)
|
source_dir: &Path)
|
||||||
-> Result<bool, ()> {
|
-> Result<bool, ()> {
|
||||||
// We acquire a shared lock on the lock file of the directory, so that
|
// We acquire a shared lock on the lock file of the directory, so that
|
||||||
// nobody deletes it out from under us while we are reading from it.
|
// nobody deletes it out from under us while we are reading from it.
|
||||||
|
@ -440,9 +441,11 @@ fn copy_files(target_dir: &Path,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if print_stats_on_success {
|
if sess.opts.debugging_opts.incremental_info {
|
||||||
eprintln!("incremental: session directory: {} files hard-linked", files_linked);
|
println!("[incremental] session directory: \
|
||||||
eprintln!("incremental: session directory: {} files copied", files_copied);
|
{} files hard-linked", files_linked);
|
||||||
|
println!("[incremental] session directory: \
|
||||||
|
{} files copied", files_copied);
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(files_linked > 0 || files_copied == 0)
|
Ok(files_linked > 0 || files_copied == 0)
|
||||||
|
|
|
@ -177,7 +177,7 @@ pub fn load_dep_graph(sess: &Session) -> PreviousDepGraph {
|
||||||
|
|
||||||
if prev_commandline_args_hash != sess.opts.dep_tracking_hash() {
|
if prev_commandline_args_hash != sess.opts.dep_tracking_hash() {
|
||||||
if sess.opts.debugging_opts.incremental_info {
|
if sess.opts.debugging_opts.incremental_info {
|
||||||
eprintln!("incremental: completely ignoring cache because of \
|
println!("[incremental] completely ignoring cache because of \
|
||||||
differing commandline arguments");
|
differing commandline arguments");
|
||||||
}
|
}
|
||||||
// We can't reuse the cache, purge it.
|
// We can't reuse the cache, purge it.
|
||||||
|
|
|
@ -8,7 +8,7 @@
|
||||||
// option. This file may not be copied, modified, or distributed
|
// option. This file may not be copied, modified, or distributed
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
use rustc::dep_graph::DepGraph;
|
use rustc::dep_graph::{DepGraph, DepKind};
|
||||||
use rustc::hir::def_id::DefId;
|
use rustc::hir::def_id::DefId;
|
||||||
use rustc::hir::svh::Svh;
|
use rustc::hir::svh::Svh;
|
||||||
use rustc::ich::Fingerprint;
|
use rustc::ich::Fingerprint;
|
||||||
|
@ -170,6 +170,77 @@ fn encode_dep_graph(tcx: TyCtxt,
|
||||||
|
|
||||||
// Encode the graph data.
|
// Encode the graph data.
|
||||||
let serialized_graph = tcx.dep_graph.serialize();
|
let serialized_graph = tcx.dep_graph.serialize();
|
||||||
|
|
||||||
|
if tcx.sess.opts.debugging_opts.incremental_info {
|
||||||
|
#[derive(Clone)]
|
||||||
|
struct Stat {
|
||||||
|
kind: DepKind,
|
||||||
|
node_counter: u64,
|
||||||
|
edge_counter: u64,
|
||||||
|
}
|
||||||
|
|
||||||
|
let total_node_count = serialized_graph.nodes.len();
|
||||||
|
let total_edge_count = serialized_graph.edge_list_data.len();
|
||||||
|
|
||||||
|
let mut counts: FxHashMap<_, Stat> = FxHashMap();
|
||||||
|
|
||||||
|
for (i, &(node, _)) in serialized_graph.nodes.iter_enumerated() {
|
||||||
|
let stat = counts.entry(node.kind).or_insert(Stat {
|
||||||
|
kind: node.kind,
|
||||||
|
node_counter: 0,
|
||||||
|
edge_counter: 0,
|
||||||
|
});
|
||||||
|
|
||||||
|
stat.node_counter += 1;
|
||||||
|
let (edge_start, edge_end) = serialized_graph.edge_list_indices[i];
|
||||||
|
stat.edge_counter += (edge_end - edge_start) as u64;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut counts: Vec<_> = counts.values().cloned().collect();
|
||||||
|
counts.sort_by_key(|s| -(s.node_counter as i64));
|
||||||
|
|
||||||
|
let percentage_of_all_nodes: Vec<f64> = counts.iter().map(|s| {
|
||||||
|
(100.0 * (s.node_counter as f64)) / (total_node_count as f64)
|
||||||
|
}).collect();
|
||||||
|
|
||||||
|
let average_edges_per_kind: Vec<f64> = counts.iter().map(|s| {
|
||||||
|
(s.edge_counter as f64) / (s.node_counter as f64)
|
||||||
|
}).collect();
|
||||||
|
|
||||||
|
println!("[incremental]");
|
||||||
|
println!("[incremental] DepGraph Statistics");
|
||||||
|
|
||||||
|
const SEPARATOR: &str = "[incremental] --------------------------------\
|
||||||
|
----------------------------------------------\
|
||||||
|
------------";
|
||||||
|
|
||||||
|
println!("{}", SEPARATOR);
|
||||||
|
println!("[incremental]");
|
||||||
|
println!("[incremental] Total Node Count: {}", total_node_count);
|
||||||
|
println!("[incremental] Total Edge Count: {}", total_edge_count);
|
||||||
|
println!("[incremental]");
|
||||||
|
println!("[incremental] {:<36}| {:<17}| {:<12}| {:<17}|",
|
||||||
|
"Node Kind",
|
||||||
|
"Node Frequency",
|
||||||
|
"Node Count",
|
||||||
|
"Avg. Edge Count");
|
||||||
|
println!("[incremental] -------------------------------------\
|
||||||
|
|------------------\
|
||||||
|
|-------------\
|
||||||
|
|------------------|");
|
||||||
|
|
||||||
|
for (i, stat) in counts.iter().enumerate() {
|
||||||
|
println!("[incremental] {:<36}|{:>16.1}% |{:>12} |{:>17.1} |",
|
||||||
|
format!("{:?}", stat.kind),
|
||||||
|
percentage_of_all_nodes[i],
|
||||||
|
stat.node_counter,
|
||||||
|
average_edges_per_kind[i]);
|
||||||
|
}
|
||||||
|
|
||||||
|
println!("{}", SEPARATOR);
|
||||||
|
println!("[incremental]");
|
||||||
|
}
|
||||||
|
|
||||||
serialized_graph.encode(encoder)?;
|
serialized_graph.encode(encoder)?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
|
@ -12,7 +12,6 @@ test = false
|
||||||
[dependencies]
|
[dependencies]
|
||||||
log = "0.3"
|
log = "0.3"
|
||||||
rustc = { path = "../librustc" }
|
rustc = { path = "../librustc" }
|
||||||
rustc_back = { path = "../librustc_back" }
|
|
||||||
rustc_const_eval = { path = "../librustc_const_eval" }
|
rustc_const_eval = { path = "../librustc_const_eval" }
|
||||||
syntax = { path = "../libsyntax" }
|
syntax = { path = "../libsyntax" }
|
||||||
syntax_pos = { path = "../libsyntax_pos" }
|
syntax_pos = { path = "../libsyntax_pos" }
|
||||||
|
|
|
@ -38,7 +38,6 @@ extern crate syntax;
|
||||||
extern crate rustc;
|
extern crate rustc;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate log;
|
extern crate log;
|
||||||
extern crate rustc_back;
|
|
||||||
extern crate rustc_const_eval;
|
extern crate rustc_const_eval;
|
||||||
extern crate syntax_pos;
|
extern crate syntax_pos;
|
||||||
|
|
||||||
|
@ -129,7 +128,6 @@ pub fn register_builtins(store: &mut lint::LintStore, sess: Option<&Session>) {
|
||||||
NonUpperCaseGlobals,
|
NonUpperCaseGlobals,
|
||||||
NonShorthandFieldPatterns,
|
NonShorthandFieldPatterns,
|
||||||
UnsafeCode,
|
UnsafeCode,
|
||||||
UnusedMut,
|
|
||||||
UnusedAllocation,
|
UnusedAllocation,
|
||||||
MissingCopyImplementations,
|
MissingCopyImplementations,
|
||||||
UnstableFeatures,
|
UnstableFeatures,
|
||||||
|
|
|
@ -11,105 +11,18 @@
|
||||||
use rustc::hir::def_id::DefId;
|
use rustc::hir::def_id::DefId;
|
||||||
use rustc::ty;
|
use rustc::ty;
|
||||||
use rustc::ty::adjustment;
|
use rustc::ty::adjustment;
|
||||||
use util::nodemap::FxHashMap;
|
|
||||||
use lint::{LateContext, EarlyContext, LintContext, LintArray};
|
use lint::{LateContext, EarlyContext, LintContext, LintArray};
|
||||||
use lint::{LintPass, EarlyLintPass, LateLintPass};
|
use lint::{LintPass, EarlyLintPass, LateLintPass};
|
||||||
|
|
||||||
use std::collections::hash_map::Entry::{Occupied, Vacant};
|
|
||||||
|
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use syntax::attr;
|
use syntax::attr;
|
||||||
use syntax::feature_gate::{BUILTIN_ATTRIBUTES, AttributeType};
|
use syntax::feature_gate::{BUILTIN_ATTRIBUTES, AttributeType};
|
||||||
use syntax::symbol::keywords;
|
|
||||||
use syntax::ptr::P;
|
|
||||||
use syntax::print::pprust;
|
use syntax::print::pprust;
|
||||||
|
use syntax::symbol::keywords;
|
||||||
use syntax::util::parser;
|
use syntax::util::parser;
|
||||||
use syntax_pos::Span;
|
use syntax_pos::Span;
|
||||||
|
|
||||||
use rustc_back::slice;
|
|
||||||
use rustc::hir;
|
use rustc::hir;
|
||||||
use rustc::hir::intravisit::FnKind;
|
|
||||||
|
|
||||||
declare_lint! {
|
|
||||||
pub UNUSED_MUT,
|
|
||||||
Warn,
|
|
||||||
"detect mut variables which don't need to be mutable"
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Copy, Clone)]
|
|
||||||
pub struct UnusedMut;
|
|
||||||
|
|
||||||
impl UnusedMut {
|
|
||||||
fn check_unused_mut_pat(&self, cx: &LateContext, pats: &[P<hir::Pat>]) {
|
|
||||||
// collect all mutable pattern and group their NodeIDs by their Identifier to
|
|
||||||
// avoid false warnings in match arms with multiple patterns
|
|
||||||
|
|
||||||
let mut mutables = FxHashMap();
|
|
||||||
for p in pats {
|
|
||||||
p.each_binding(|_, id, span, path1| {
|
|
||||||
let hir_id = cx.tcx.hir.node_to_hir_id(id);
|
|
||||||
let bm = match cx.tables.pat_binding_modes().get(hir_id) {
|
|
||||||
Some(&bm) => bm,
|
|
||||||
None => span_bug!(span, "missing binding mode"),
|
|
||||||
};
|
|
||||||
let name = path1.node;
|
|
||||||
if let ty::BindByValue(hir::MutMutable) = bm {
|
|
||||||
if !name.as_str().starts_with("_") {
|
|
||||||
match mutables.entry(name) {
|
|
||||||
Vacant(entry) => {
|
|
||||||
entry.insert(vec![id]);
|
|
||||||
}
|
|
||||||
Occupied(mut entry) => {
|
|
||||||
entry.get_mut().push(id);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
let used_mutables = cx.tcx.used_mut_nodes.borrow();
|
|
||||||
for (_, v) in &mutables {
|
|
||||||
if !v.iter().any(|e| used_mutables.contains(e)) {
|
|
||||||
let binding_span = cx.tcx.hir.span(v[0]);
|
|
||||||
let mut_span = cx.tcx.sess.codemap().span_until_char(binding_span, ' ');
|
|
||||||
let mut err = cx.struct_span_lint(UNUSED_MUT,
|
|
||||||
binding_span,
|
|
||||||
"variable does not need to be mutable");
|
|
||||||
err.span_suggestion_short(mut_span, "remove this `mut`", "".to_owned());
|
|
||||||
err.emit();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl LintPass for UnusedMut {
|
|
||||||
fn get_lints(&self) -> LintArray {
|
|
||||||
lint_array!(UNUSED_MUT)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UnusedMut {
|
|
||||||
fn check_arm(&mut self, cx: &LateContext, a: &hir::Arm) {
|
|
||||||
self.check_unused_mut_pat(cx, &a.pats)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn check_local(&mut self, cx: &LateContext, l: &hir::Local) {
|
|
||||||
self.check_unused_mut_pat(cx, slice::ref_slice(&l.pat));
|
|
||||||
}
|
|
||||||
|
|
||||||
fn check_fn(&mut self,
|
|
||||||
cx: &LateContext,
|
|
||||||
_: FnKind,
|
|
||||||
_: &hir::FnDecl,
|
|
||||||
body: &hir::Body,
|
|
||||||
_: Span,
|
|
||||||
_: ast::NodeId) {
|
|
||||||
for a in &body.arguments {
|
|
||||||
self.check_unused_mut_pat(cx, slice::ref_slice(&a.pat));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
declare_lint! {
|
declare_lint! {
|
||||||
pub UNUSED_MUST_USE,
|
pub UNUSED_MUST_USE,
|
||||||
|
|
|
@ -18,4 +18,4 @@ rustc_cratesio_shim = { path = "../librustc_cratesio_shim" }
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
build_helper = { path = "../build_helper" }
|
build_helper = { path = "../build_helper" }
|
||||||
cc = "1.0"
|
cc = "1.0.1"
|
||||||
|
|
|
@ -14,7 +14,7 @@ use rustc::hir::def_id::{DefId};
|
||||||
use rustc::infer::{InferCtxt};
|
use rustc::infer::{InferCtxt};
|
||||||
use rustc::ty::{self, TyCtxt, ParamEnv};
|
use rustc::ty::{self, TyCtxt, ParamEnv};
|
||||||
use rustc::ty::maps::Providers;
|
use rustc::ty::maps::Providers;
|
||||||
use rustc::mir::{AssertMessage, BasicBlock, BorrowKind, Location, Lvalue};
|
use rustc::mir::{AssertMessage, BasicBlock, BorrowKind, Location, Lvalue, Local};
|
||||||
use rustc::mir::{Mir, Mutability, Operand, Projection, ProjectionElem, Rvalue};
|
use rustc::mir::{Mir, Mutability, Operand, Projection, ProjectionElem, Rvalue};
|
||||||
use rustc::mir::{Statement, StatementKind, Terminator, TerminatorKind};
|
use rustc::mir::{Statement, StatementKind, Terminator, TerminatorKind};
|
||||||
use rustc::mir::transform::{MirSource};
|
use rustc::mir::transform::{MirSource};
|
||||||
|
@ -586,7 +586,7 @@ impl<'c, 'b, 'a: 'b+'c, 'gcx, 'tcx: 'a> MirBorrowckCtxt<'c, 'b, 'a, 'gcx, 'tcx>
|
||||||
context: Context,
|
context: Context,
|
||||||
(lvalue, span): (&Lvalue<'gcx>, Span),
|
(lvalue, span): (&Lvalue<'gcx>, Span),
|
||||||
flow_state: &InProgress<'b, 'gcx>) {
|
flow_state: &InProgress<'b, 'gcx>) {
|
||||||
let move_data = flow_state.inits.base_results.operator().move_data();
|
let move_data = self.move_data;
|
||||||
|
|
||||||
// determine if this path has a non-mut owner (and thus needs checking).
|
// determine if this path has a non-mut owner (and thus needs checking).
|
||||||
let mut l = lvalue;
|
let mut l = lvalue;
|
||||||
|
@ -611,7 +611,7 @@ impl<'c, 'b, 'a: 'b+'c, 'gcx, 'tcx: 'a> MirBorrowckCtxt<'c, 'b, 'a, 'gcx, 'tcx>
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(mpi) = self.move_path_for_lvalue(context, move_data, lvalue) {
|
if let Some(mpi) = self.move_path_for_lvalue(lvalue) {
|
||||||
if flow_state.inits.curr_state.contains(&mpi) {
|
if flow_state.inits.curr_state.contains(&mpi) {
|
||||||
// may already be assigned before reaching this statement;
|
// may already be assigned before reaching this statement;
|
||||||
// report error.
|
// report error.
|
||||||
|
@ -642,21 +642,107 @@ impl<'c, 'b, 'a: 'b+'c, 'gcx, 'tcx: 'a> MirBorrowckCtxt<'c, 'b, 'a, 'gcx, 'tcx>
|
||||||
let lvalue = self.base_path(lvalue_span.0);
|
let lvalue = self.base_path(lvalue_span.0);
|
||||||
|
|
||||||
let maybe_uninits = &flow_state.uninits;
|
let maybe_uninits = &flow_state.uninits;
|
||||||
let move_data = maybe_uninits.base_results.operator().move_data();
|
|
||||||
if let Some(mpi) = self.move_path_for_lvalue(context, move_data, lvalue) {
|
// Bad scenarios:
|
||||||
|
//
|
||||||
|
// 1. Move of `a.b.c`, use of `a.b.c`
|
||||||
|
// 2. Move of `a.b.c`, use of `a.b.c.d` (without first reinitializing `a.b.c.d`)
|
||||||
|
// 3. Move of `a.b.c`, use of `a` or `a.b`
|
||||||
|
// 4. Uninitialized `(a.b.c: &_)`, use of `*a.b.c`; note that with
|
||||||
|
// partial initialization support, one might have `a.x`
|
||||||
|
// initialized but not `a.b`.
|
||||||
|
//
|
||||||
|
// OK scenarios:
|
||||||
|
//
|
||||||
|
// 5. Move of `a.b.c`, use of `a.b.d`
|
||||||
|
// 6. Uninitialized `a.x`, initialized `a.b`, use of `a.b`
|
||||||
|
// 7. Copied `(a.b: &_)`, use of `*(a.b).c`; note that `a.b`
|
||||||
|
// must have been initialized for the use to be sound.
|
||||||
|
// 8. Move of `a.b.c` then reinit of `a.b.c.d`, use of `a.b.c.d`
|
||||||
|
|
||||||
|
// The dataflow tracks shallow prefixes distinctly (that is,
|
||||||
|
// field-accesses on P distinctly from P itself), in order to
|
||||||
|
// track substructure initialization separately from the whole
|
||||||
|
// structure.
|
||||||
|
//
|
||||||
|
// E.g., when looking at (*a.b.c).d, if the closest prefix for
|
||||||
|
// which we have a MovePath is `a.b`, then that means that the
|
||||||
|
// initialization state of `a.b` is all we need to inspect to
|
||||||
|
// know if `a.b.c` is valid (and from that we infer that the
|
||||||
|
// dereference and `.d` access is also valid, since we assume
|
||||||
|
// `a.b.c` is assigned a reference to a initialized and
|
||||||
|
// well-formed record structure.)
|
||||||
|
|
||||||
|
// Therefore, if we seek out the *closest* prefix for which we
|
||||||
|
// have a MovePath, that should capture the initialization
|
||||||
|
// state for the lvalue scenario.
|
||||||
|
//
|
||||||
|
// This code covers scenarios 1, 2, and 4.
|
||||||
|
|
||||||
|
debug!("check_if_path_is_moved part1 lvalue: {:?}", lvalue);
|
||||||
|
match self.move_path_closest_to(lvalue) {
|
||||||
|
Ok(mpi) => {
|
||||||
if maybe_uninits.curr_state.contains(&mpi) {
|
if maybe_uninits.curr_state.contains(&mpi) {
|
||||||
// find and report move(s) that could cause this to be uninitialized
|
|
||||||
self.report_use_of_moved(context, desired_action, lvalue_span);
|
self.report_use_of_moved(context, desired_action, lvalue_span);
|
||||||
} else {
|
return; // don't bother finding other problems.
|
||||||
// sanity check: initialized on *some* path, right?
|
}
|
||||||
assert!(flow_state.inits.curr_state.contains(&mpi));
|
}
|
||||||
|
Err(NoMovePathFound::ReachedStatic) => {
|
||||||
|
// Okay: we do not build MoveData for static variables
|
||||||
|
}
|
||||||
|
|
||||||
|
// Only query longest prefix with a MovePath, not further
|
||||||
|
// ancestors; dataflow recurs on children when parents
|
||||||
|
// move (to support partial (re)inits).
|
||||||
|
//
|
||||||
|
// (I.e. querying parents breaks scenario 8; but may want
|
||||||
|
// to do such a query based on partial-init feature-gate.)
|
||||||
|
}
|
||||||
|
|
||||||
|
// A move of any shallow suffix of `lvalue` also interferes
|
||||||
|
// with an attempt to use `lvalue`. This is scenario 3 above.
|
||||||
|
//
|
||||||
|
// (Distinct from handling of scenarios 1+2+4 above because
|
||||||
|
// `lvalue` does not interfere with suffixes of its prefixes,
|
||||||
|
// e.g. `a.b.c` does not interfere with `a.b.d`)
|
||||||
|
|
||||||
|
debug!("check_if_path_is_moved part2 lvalue: {:?}", lvalue);
|
||||||
|
if let Some(mpi) = self.move_path_for_lvalue(lvalue) {
|
||||||
|
if let Some(_) = maybe_uninits.has_any_child_of(mpi) {
|
||||||
|
self.report_use_of_moved(context, desired_action, lvalue_span);
|
||||||
|
return; // don't bother finding other problems.
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Currently MoveData does not store entries for all lvalues in
|
||||||
|
/// the input MIR. For example it will currently filter out
|
||||||
|
/// lvalues that are Copy; thus we do not track lvalues of shared
|
||||||
|
/// reference type. This routine will walk up an lvalue along its
|
||||||
|
/// prefixes, searching for a foundational lvalue that *is*
|
||||||
|
/// tracked in the MoveData.
|
||||||
|
///
|
||||||
|
/// An Err result includes a tag indicated why the search failed.
|
||||||
|
/// Currenly this can only occur if the lvalue is built off of a
|
||||||
|
/// static variable, as we do not track those in the MoveData.
|
||||||
|
fn move_path_closest_to(&mut self, lvalue: &Lvalue<'gcx>)
|
||||||
|
-> Result<MovePathIndex, NoMovePathFound>
|
||||||
|
{
|
||||||
|
let mut last_prefix = lvalue;
|
||||||
|
for prefix in self.prefixes(lvalue, PrefixSet::All) {
|
||||||
|
if let Some(mpi) = self.move_path_for_lvalue(prefix) {
|
||||||
|
return Ok(mpi);
|
||||||
|
}
|
||||||
|
last_prefix = prefix;
|
||||||
|
}
|
||||||
|
match *last_prefix {
|
||||||
|
Lvalue::Local(_) => panic!("should have move path for every Local"),
|
||||||
|
Lvalue::Projection(_) => panic!("PrefixSet::All meant dont stop for Projection"),
|
||||||
|
Lvalue::Static(_) => return Err(NoMovePathFound::ReachedStatic),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn move_path_for_lvalue(&mut self,
|
fn move_path_for_lvalue(&mut self,
|
||||||
_context: Context,
|
|
||||||
move_data: &MoveData<'gcx>,
|
|
||||||
lvalue: &Lvalue<'gcx>)
|
lvalue: &Lvalue<'gcx>)
|
||||||
-> Option<MovePathIndex>
|
-> Option<MovePathIndex>
|
||||||
{
|
{
|
||||||
|
@ -664,7 +750,7 @@ impl<'c, 'b, 'a: 'b+'c, 'gcx, 'tcx: 'a> MirBorrowckCtxt<'c, 'b, 'a, 'gcx, 'tcx>
|
||||||
// to a direct owner of `lvalue` (which means there is nothing
|
// to a direct owner of `lvalue` (which means there is nothing
|
||||||
// that borrowck tracks for its analysis).
|
// that borrowck tracks for its analysis).
|
||||||
|
|
||||||
match move_data.rev_lookup.find(lvalue) {
|
match self.move_data.rev_lookup.find(lvalue) {
|
||||||
LookupResult::Parent(_) => None,
|
LookupResult::Parent(_) => None,
|
||||||
LookupResult::Exact(mpi) => Some(mpi),
|
LookupResult::Exact(mpi) => Some(mpi),
|
||||||
}
|
}
|
||||||
|
@ -733,6 +819,11 @@ impl<'c, 'b, 'a: 'b+'c, 'gcx, 'tcx: 'a> MirBorrowckCtxt<'c, 'b, 'a, 'gcx, 'tcx>
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
|
||||||
|
enum NoMovePathFound {
|
||||||
|
ReachedStatic,
|
||||||
|
}
|
||||||
|
|
||||||
impl<'c, 'b, 'a: 'b+'c, 'gcx, 'tcx: 'a> MirBorrowckCtxt<'c, 'b, 'a, 'gcx, 'tcx> {
|
impl<'c, 'b, 'a: 'b+'c, 'gcx, 'tcx: 'a> MirBorrowckCtxt<'c, 'b, 'a, 'gcx, 'tcx> {
|
||||||
fn each_borrow_involving_path<F>(&mut self,
|
fn each_borrow_involving_path<F>(&mut self,
|
||||||
_context: Context,
|
_context: Context,
|
||||||
|
@ -846,12 +937,19 @@ mod prefixes {
|
||||||
|
|
||||||
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
|
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
|
||||||
pub(super) enum PrefixSet {
|
pub(super) enum PrefixSet {
|
||||||
|
/// Doesn't stop until it returns the base case (a Local or
|
||||||
|
/// Static prefix).
|
||||||
All,
|
All,
|
||||||
|
/// Stops at any dereference.
|
||||||
Shallow,
|
Shallow,
|
||||||
|
/// Stops at the deref of a shared reference.
|
||||||
Supporting,
|
Supporting,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'c, 'b, 'a: 'b+'c, 'gcx, 'tcx: 'a> MirBorrowckCtxt<'c, 'b, 'a, 'gcx, 'tcx> {
|
impl<'c, 'b, 'a: 'b+'c, 'gcx, 'tcx: 'a> MirBorrowckCtxt<'c, 'b, 'a, 'gcx, 'tcx> {
|
||||||
|
/// Returns an iterator over the prefixes of `lvalue`
|
||||||
|
/// (inclusive) from longest to smallest, potentially
|
||||||
|
/// terminating the iteration early based on `kind`.
|
||||||
pub(super) fn prefixes<'d>(&self,
|
pub(super) fn prefixes<'d>(&self,
|
||||||
lvalue: &'d Lvalue<'gcx>,
|
lvalue: &'d Lvalue<'gcx>,
|
||||||
kind: PrefixSet)
|
kind: PrefixSet)
|
||||||
|
@ -1080,49 +1178,52 @@ impl<'c, 'b, 'a: 'b+'c, 'gcx, 'tcx: 'a> MirBorrowckCtxt<'c, 'b, 'a, 'gcx, 'tcx>
|
||||||
// End-user visible description of `lvalue`
|
// End-user visible description of `lvalue`
|
||||||
fn describe_lvalue(&self, lvalue: &Lvalue) -> String {
|
fn describe_lvalue(&self, lvalue: &Lvalue) -> String {
|
||||||
let mut buf = String::new();
|
let mut buf = String::new();
|
||||||
self.append_lvalue_to_string(lvalue, &mut buf);
|
self.append_lvalue_to_string(lvalue, &mut buf, None);
|
||||||
buf
|
buf
|
||||||
}
|
}
|
||||||
|
|
||||||
// Appends end-user visible description of `lvalue` to `buf`.
|
// Appends end-user visible description of `lvalue` to `buf`.
|
||||||
fn append_lvalue_to_string(&self, lvalue: &Lvalue, buf: &mut String) {
|
fn append_lvalue_to_string(&self, lvalue: &Lvalue, buf: &mut String, autoderef: Option<bool>) {
|
||||||
match *lvalue {
|
match *lvalue {
|
||||||
Lvalue::Local(local) => {
|
Lvalue::Local(local) => {
|
||||||
let local = &self.mir.local_decls[local];
|
self.append_local_to_string(local, buf, "_");
|
||||||
match local.name {
|
|
||||||
Some(name) => buf.push_str(&format!("{}", name)),
|
|
||||||
None => buf.push_str("_"),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
Lvalue::Static(ref static_) => {
|
Lvalue::Static(ref static_) => {
|
||||||
buf.push_str(&format!("{}", &self.tcx.item_name(static_.def_id)));
|
buf.push_str(&format!("{}", &self.tcx.item_name(static_.def_id)));
|
||||||
}
|
}
|
||||||
Lvalue::Projection(ref proj) => {
|
Lvalue::Projection(ref proj) => {
|
||||||
|
let mut autoderef = autoderef.unwrap_or(false);
|
||||||
let (prefix, suffix, index_operand) = match proj.elem {
|
let (prefix, suffix, index_operand) = match proj.elem {
|
||||||
ProjectionElem::Deref =>
|
ProjectionElem::Deref => {
|
||||||
("(*", format!(")"), None),
|
if autoderef {
|
||||||
|
("", format!(""), None)
|
||||||
|
} else {
|
||||||
|
("(*", format!(")"), None)
|
||||||
|
}
|
||||||
|
},
|
||||||
ProjectionElem::Downcast(..) =>
|
ProjectionElem::Downcast(..) =>
|
||||||
("", format!(""), None), // (dont emit downcast info)
|
("", format!(""), None), // (dont emit downcast info)
|
||||||
ProjectionElem::Field(field, _ty) =>
|
ProjectionElem::Field(field, _ty) => {
|
||||||
("", format!(".{}", field.index()), None), // FIXME: report name of field
|
autoderef = true;
|
||||||
ProjectionElem::Index(index) =>
|
("", format!(".{}", self.describe_field(&proj.base, field.index())), None)
|
||||||
("", format!(""), Some(index)),
|
},
|
||||||
ProjectionElem::ConstantIndex { offset, min_length, from_end: true } =>
|
ProjectionElem::Index(index) => {
|
||||||
("", format!("[{} of {}]", offset, min_length), None),
|
autoderef = true;
|
||||||
ProjectionElem::ConstantIndex { offset, min_length, from_end: false } =>
|
("", format!(""), Some(index))
|
||||||
("", format!("[-{} of {}]", offset, min_length), None),
|
},
|
||||||
ProjectionElem::Subslice { from, to: 0 } =>
|
ProjectionElem::ConstantIndex { .. } | ProjectionElem::Subslice { .. } => {
|
||||||
("", format!("[{}:]", from), None),
|
autoderef = true;
|
||||||
ProjectionElem::Subslice { from: 0, to } =>
|
// Since it isn't possible to borrow an element on a particular index and
|
||||||
("", format!("[:-{}]", to), None),
|
// then use another while the borrow is held, don't output indices details
|
||||||
ProjectionElem::Subslice { from, to } =>
|
// to avoid confusing the end-user
|
||||||
("", format!("[{}:-{}]", from, to), None),
|
("", format!("[..]"), None)
|
||||||
|
},
|
||||||
};
|
};
|
||||||
buf.push_str(prefix);
|
buf.push_str(prefix);
|
||||||
self.append_lvalue_to_string(&proj.base, buf);
|
self.append_lvalue_to_string(&proj.base, buf, Some(autoderef));
|
||||||
if let Some(index) = index_operand {
|
if let Some(index) = index_operand {
|
||||||
buf.push_str("[");
|
buf.push_str("[");
|
||||||
self.append_lvalue_to_string(&Lvalue::Local(index), buf);
|
self.append_local_to_string(index, buf, "..");
|
||||||
buf.push_str("]");
|
buf.push_str("]");
|
||||||
} else {
|
} else {
|
||||||
buf.push_str(&suffix);
|
buf.push_str(&suffix);
|
||||||
|
@ -1131,6 +1232,77 @@ impl<'c, 'b, 'a: 'b+'c, 'gcx, 'tcx: 'a> MirBorrowckCtxt<'c, 'b, 'a, 'gcx, 'tcx>
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Appends end-user visible description of the `local` lvalue to `buf`. If `local` doesn't have
|
||||||
|
// a name, then `none_string` is appended instead
|
||||||
|
fn append_local_to_string(&self, local_index: Local, buf: &mut String, none_string: &str) {
|
||||||
|
let local = &self.mir.local_decls[local_index];
|
||||||
|
match local.name {
|
||||||
|
Some(name) => buf.push_str(&format!("{}", name)),
|
||||||
|
None => buf.push_str(none_string)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// End-user visible description of the `field_index`nth field of `base`
|
||||||
|
fn describe_field(&self, base: &Lvalue, field_index: usize) -> String {
|
||||||
|
match *base {
|
||||||
|
Lvalue::Local(local) => {
|
||||||
|
let local = &self.mir.local_decls[local];
|
||||||
|
self.describe_field_from_ty(&local.ty, field_index)
|
||||||
|
},
|
||||||
|
Lvalue::Static(ref static_) => {
|
||||||
|
self.describe_field_from_ty(&static_.ty, field_index)
|
||||||
|
},
|
||||||
|
Lvalue::Projection(ref proj) => {
|
||||||
|
match proj.elem {
|
||||||
|
ProjectionElem::Deref =>
|
||||||
|
self.describe_field(&proj.base, field_index),
|
||||||
|
ProjectionElem::Downcast(def, variant_index) =>
|
||||||
|
format!("{}", def.variants[variant_index].fields[field_index].name),
|
||||||
|
ProjectionElem::Field(_, field_type) =>
|
||||||
|
self.describe_field_from_ty(&field_type, field_index),
|
||||||
|
ProjectionElem::Index(..)
|
||||||
|
| ProjectionElem::ConstantIndex { .. }
|
||||||
|
| ProjectionElem::Subslice { .. } =>
|
||||||
|
format!("{}", self.describe_field(&proj.base, field_index)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// End-user visible description of the `field_index`nth field of `ty`
|
||||||
|
fn describe_field_from_ty(&self, ty: &ty::Ty, field_index: usize) -> String {
|
||||||
|
if ty.is_box() {
|
||||||
|
// If the type is a box, the field is described from the boxed type
|
||||||
|
self.describe_field_from_ty(&ty.boxed_ty(), field_index)
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
match ty.sty {
|
||||||
|
ty::TyAdt(def, _) => {
|
||||||
|
if def.is_enum() {
|
||||||
|
format!("{}", field_index)
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
format!("{}", def.struct_variant().fields[field_index].name)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
ty::TyTuple(_, _) => {
|
||||||
|
format!("{}", field_index)
|
||||||
|
},
|
||||||
|
ty::TyRef(_, tnm) | ty::TyRawPtr(tnm) => {
|
||||||
|
self.describe_field_from_ty(&tnm.ty, field_index)
|
||||||
|
},
|
||||||
|
ty::TyArray(ty, _) | ty::TySlice(ty) => {
|
||||||
|
self.describe_field_from_ty(&ty, field_index)
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
// Might need a revision when the fields in trait RFC is implemented
|
||||||
|
// (https://github.com/rust-lang/rfcs/pull/1546)
|
||||||
|
bug!("End-user description not implemented for field access on `{:?}`", ty.sty);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Retrieve span of given borrow from the current MIR representation
|
// Retrieve span of given borrow from the current MIR representation
|
||||||
fn retrieve_borrow_span(&self, borrow: &BorrowData) -> Span {
|
fn retrieve_borrow_span(&self, borrow: &BorrowData) -> Span {
|
||||||
self.mir.source_info(borrow.location).span
|
self.mir.source_info(borrow.location).span
|
||||||
|
@ -1266,6 +1438,35 @@ impl<'b, 'tcx: 'b> InProgress<'b, 'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<'b, 'tcx> FlowInProgress<MaybeUninitializedLvals<'b, 'tcx>> {
|
||||||
|
fn has_any_child_of(&self, mpi: MovePathIndex) -> Option<MovePathIndex> {
|
||||||
|
let move_data = self.base_results.operator().move_data();
|
||||||
|
|
||||||
|
let mut todo = vec![mpi];
|
||||||
|
let mut push_siblings = false; // don't look at siblings of original `mpi`.
|
||||||
|
while let Some(mpi) = todo.pop() {
|
||||||
|
if self.curr_state.contains(&mpi) {
|
||||||
|
return Some(mpi);
|
||||||
|
}
|
||||||
|
let move_path = &move_data.move_paths[mpi];
|
||||||
|
if let Some(child) = move_path.first_child {
|
||||||
|
todo.push(child);
|
||||||
|
}
|
||||||
|
if push_siblings {
|
||||||
|
if let Some(sibling) = move_path.next_sibling {
|
||||||
|
todo.push(sibling);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// after we've processed the original `mpi`, we should
|
||||||
|
// always traverse the siblings of any of its
|
||||||
|
// children.
|
||||||
|
push_siblings = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl<BD> FlowInProgress<BD> where BD: BitDenotation {
|
impl<BD> FlowInProgress<BD> where BD: BitDenotation {
|
||||||
fn each_state_bit<F>(&self, f: F) where F: FnMut(BD::Idx) {
|
fn each_state_bit<F>(&self, f: F) where F: FnMut(BD::Idx) {
|
||||||
self.curr_state.each_bit(self.base_results.operator().bits_per_block(), f)
|
self.curr_state.each_bit(self.base_results.operator().bits_per_block(), f)
|
||||||
|
|
|
@ -21,7 +21,7 @@ use rustc::mir::*;
|
||||||
use rustc::hir;
|
use rustc::hir;
|
||||||
use hair::*;
|
use hair::*;
|
||||||
use syntax::ast::{Name, NodeId};
|
use syntax::ast::{Name, NodeId};
|
||||||
use syntax_pos::Span;
|
use syntax_pos::{DUMMY_SP, Span};
|
||||||
|
|
||||||
// helper functions, broken out by category:
|
// helper functions, broken out by category:
|
||||||
mod simplify;
|
mod simplify;
|
||||||
|
@ -398,10 +398,12 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
|
||||||
candidates.iter().take_while(|c| c.match_pairs.is_empty()).count();
|
candidates.iter().take_while(|c| c.match_pairs.is_empty()).count();
|
||||||
debug!("match_candidates: {:?} candidates fully matched", fully_matched);
|
debug!("match_candidates: {:?} candidates fully matched", fully_matched);
|
||||||
let mut unmatched_candidates = candidates.split_off(fully_matched);
|
let mut unmatched_candidates = candidates.split_off(fully_matched);
|
||||||
for candidate in candidates {
|
for (index, candidate) in candidates.into_iter().enumerate() {
|
||||||
// If so, apply any bindings, test the guard (if any), and
|
// If so, apply any bindings, test the guard (if any), and
|
||||||
// branch to the arm.
|
// branch to the arm.
|
||||||
if let Some(b) = self.bind_and_guard_matched_candidate(block, arm_blocks, candidate) {
|
let is_last = index == fully_matched - 1;
|
||||||
|
if let Some(b) = self.bind_and_guard_matched_candidate(block, arm_blocks,
|
||||||
|
candidate, is_last) {
|
||||||
block = b;
|
block = b;
|
||||||
} else {
|
} else {
|
||||||
// if None is returned, then any remaining candidates
|
// if None is returned, then any remaining candidates
|
||||||
|
@ -664,7 +666,8 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
|
||||||
fn bind_and_guard_matched_candidate<'pat>(&mut self,
|
fn bind_and_guard_matched_candidate<'pat>(&mut self,
|
||||||
mut block: BasicBlock,
|
mut block: BasicBlock,
|
||||||
arm_blocks: &mut ArmBlocks,
|
arm_blocks: &mut ArmBlocks,
|
||||||
candidate: Candidate<'pat, 'tcx>)
|
candidate: Candidate<'pat, 'tcx>,
|
||||||
|
is_last_arm: bool)
|
||||||
-> Option<BasicBlock> {
|
-> Option<BasicBlock> {
|
||||||
debug!("bind_and_guard_matched_candidate(block={:?}, candidate={:?})",
|
debug!("bind_and_guard_matched_candidate(block={:?}, candidate={:?})",
|
||||||
block, candidate);
|
block, candidate);
|
||||||
|
@ -685,6 +688,22 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
|
||||||
self.cfg.terminate(block, source_info,
|
self.cfg.terminate(block, source_info,
|
||||||
TerminatorKind::if_(self.hir.tcx(), cond, arm_block, otherwise));
|
TerminatorKind::if_(self.hir.tcx(), cond, arm_block, otherwise));
|
||||||
Some(otherwise)
|
Some(otherwise)
|
||||||
|
} else if !is_last_arm {
|
||||||
|
// Add always true guard in case of more than one arm
|
||||||
|
// it creates false edges and allow MIR borrowck detects errors
|
||||||
|
// FIXME(#45184) -- permit "false edges"
|
||||||
|
let source_info = self.source_info(candidate.span);
|
||||||
|
let true_expr = Expr {
|
||||||
|
temp_lifetime: None,
|
||||||
|
ty: self.hir.tcx().types.bool,
|
||||||
|
span: DUMMY_SP,
|
||||||
|
kind: ExprKind::Literal{literal: self.hir.true_literal()},
|
||||||
|
};
|
||||||
|
let cond = unpack!(block = self.as_local_operand(block, true_expr));
|
||||||
|
let otherwise = self.cfg.start_new_block();
|
||||||
|
self.cfg.terminate(block, source_info,
|
||||||
|
TerminatorKind::if_(self.hir.tcx(), cond, arm_block, otherwise));
|
||||||
|
Some(otherwise)
|
||||||
} else {
|
} else {
|
||||||
let source_info = self.source_info(candidate.span);
|
let source_info = self.source_info(candidate.span);
|
||||||
self.cfg.terminate(block, source_info,
|
self.cfg.terminate(block, source_info,
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue