Merge remote-tracking branch 'origin/master' into proc_macro_api
This commit is contained in:
commit
fd95db25b3
605 changed files with 6469 additions and 3681 deletions
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -51,6 +51,7 @@
|
||||||
.hg/
|
.hg/
|
||||||
.hgignore
|
.hgignore
|
||||||
.idea
|
.idea
|
||||||
|
*.iml
|
||||||
__pycache__/
|
__pycache__/
|
||||||
*.py[cod]
|
*.py[cod]
|
||||||
*$py.class
|
*$py.class
|
||||||
|
|
14
.travis.yml
14
.travis.yml
|
@ -171,16 +171,22 @@ before_script:
|
||||||
if [[ "$SKIP_BUILD" == true ]]; then
|
if [[ "$SKIP_BUILD" == true ]]; then
|
||||||
export RUN_SCRIPT="echo 'skipping, not a full build'";
|
export RUN_SCRIPT="echo 'skipping, not a full build'";
|
||||||
else
|
else
|
||||||
RUN_SCRIPT="stamp src/ci/init_repo.sh . $HOME/rustsrc";
|
RUN_SCRIPT="src/ci/init_repo.sh . $HOME/rustsrc";
|
||||||
if [ "$TRAVIS_OS_NAME" = "osx" ]; then
|
if [ "$TRAVIS_OS_NAME" = "osx" ]; then
|
||||||
export RUN_SCRIPT="$RUN_SCRIPT && stamp src/ci/run.sh";
|
export RUN_SCRIPT="$RUN_SCRIPT && src/ci/run.sh";
|
||||||
else
|
else
|
||||||
export RUN_SCRIPT="$RUN_SCRIPT && stamp src/ci/docker/run.sh $IMAGE";
|
export RUN_SCRIPT="$RUN_SCRIPT && src/ci/docker/run.sh $IMAGE";
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# Log time information from this machine and an external machine for insight into possible
|
||||||
|
# clock drift. Timezones don't matter since relative deltas give all the necessary info.
|
||||||
script:
|
script:
|
||||||
- sh -x -c "$RUN_SCRIPT"
|
- >
|
||||||
|
date && curl -s --head https://google.com | grep ^Date: | sed 's/Date: //g'
|
||||||
|
- stamp sh -x -c "$RUN_SCRIPT"
|
||||||
|
- >
|
||||||
|
date && curl -s --head https://google.com | grep ^Date: | sed 's/Date: //g'
|
||||||
|
|
||||||
after_success:
|
after_success:
|
||||||
- >
|
- >
|
||||||
|
|
|
@ -40,8 +40,9 @@ Read ["Installation"] from [The Book].
|
||||||
|
|
||||||
> ***Note:*** Install locations can be adjusted by copying the config file
|
> ***Note:*** Install locations can be adjusted by copying the config file
|
||||||
> from `./src/bootstrap/config.toml.example` to `./config.toml`, and
|
> from `./src/bootstrap/config.toml.example` to `./config.toml`, and
|
||||||
> adjusting the `prefix` option under `[install]`. Various other options are
|
> adjusting the `prefix` option under `[install]`. Various other options, such
|
||||||
> also supported, and are documented in the config file.
|
> as enabling debug information, are also supported, and are documented in
|
||||||
|
> the config file.
|
||||||
|
|
||||||
When complete, `sudo ./x.py install` will place several programs into
|
When complete, `sudo ./x.py install` will place several programs into
|
||||||
`/usr/local/bin`: `rustc`, the Rust compiler, and `rustdoc`, the
|
`/usr/local/bin`: `rustc`, the Rust compiler, and `rustdoc`, the
|
||||||
|
|
160
src/Cargo.lock
generated
160
src/Cargo.lock
generated
|
@ -129,7 +129,7 @@ dependencies = [
|
||||||
"gcc 0.3.51 (registry+https://github.com/rust-lang/crates.io-index)",
|
"gcc 0.3.51 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)",
|
"getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"libc 0.2.24 (registry+https://github.com/rust-lang/crates.io-index)",
|
"libc 0.2.24 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"num_cpus 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"num_cpus 1.6.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
|
"rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"toml 0.1.30 (registry+https://github.com/rust-lang/crates.io-index)",
|
"toml 0.1.30 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
@ -157,7 +157,7 @@ dependencies = [
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cargo"
|
name = "cargo"
|
||||||
version = "0.21.0"
|
version = "0.21.0"
|
||||||
source = "git+https://github.com/rust-lang/cargo#50b1c24d146fa072db71f12005deed319ac5ba9a"
|
source = "git+https://github.com/rust-lang/cargo#eb6cf012a6cc23c9c89c4009564de9fccc38b9cb"
|
||||||
replace = "cargo 0.21.0"
|
replace = "cargo 0.21.0"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -170,8 +170,8 @@ dependencies = [
|
||||||
"cargotest 0.1.0",
|
"cargotest 0.1.0",
|
||||||
"crates-io 0.10.0",
|
"crates-io 0.10.0",
|
||||||
"crossbeam 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
"crossbeam 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"curl 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
"curl 0.4.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"docopt 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"docopt 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"env_logger 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
"env_logger 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"error-chain 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"error-chain 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
"filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
@ -188,7 +188,7 @@ dependencies = [
|
||||||
"libgit2-sys 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)",
|
"libgit2-sys 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
"log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"miow 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"miow 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"num_cpus 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"num_cpus 1.6.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"openssl 0.9.14 (registry+https://github.com/rust-lang/crates.io-index)",
|
"openssl 0.9.14 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"psapi-sys 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"psapi-sys 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"scoped-tls 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"scoped-tls 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
@ -201,8 +201,8 @@ dependencies = [
|
||||||
"tar 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)",
|
"tar 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
"tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"termcolor 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"termcolor 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"toml 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"toml 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"url 1.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"url 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -224,7 +224,7 @@ dependencies = [
|
||||||
"serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"tar 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)",
|
"tar 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
"tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"url 1.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"url 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -301,12 +301,12 @@ dependencies = [
|
||||||
name = "crates-io"
|
name = "crates-io"
|
||||||
version = "0.10.0"
|
version = "0.10.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"curl 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
"curl 0.4.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"error-chain 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"error-chain 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"serde 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
"serde 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"serde_derive 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
"serde_derive 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"url 1.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"url 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -316,19 +316,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "curl"
|
name = "curl"
|
||||||
version = "0.4.6"
|
version = "0.4.7"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"curl-sys 0.3.12 (registry+https://github.com/rust-lang/crates.io-index)",
|
"curl-sys 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"libc 0.2.24 (registry+https://github.com/rust-lang/crates.io-index)",
|
"libc 0.2.24 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"openssl-probe 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"openssl-probe 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"openssl-sys 0.9.14 (registry+https://github.com/rust-lang/crates.io-index)",
|
"openssl-sys 0.9.14 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"socket2 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "curl-sys"
|
name = "curl-sys"
|
||||||
version = "0.3.12"
|
version = "0.3.14"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"gcc 0.3.51 (registry+https://github.com/rust-lang/crates.io-index)",
|
"gcc 0.3.51 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
@ -365,7 +366,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "docopt"
|
name = "docopt"
|
||||||
version = "0.8.0"
|
version = "0.8.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"lazy_static 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
"lazy_static 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
@ -474,7 +475,7 @@ dependencies = [
|
||||||
"libgit2-sys 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)",
|
"libgit2-sys 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"openssl-probe 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"openssl-probe 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"openssl-sys 0.9.14 (registry+https://github.com/rust-lang/crates.io-index)",
|
"openssl-sys 0.9.14 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"url 1.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"url 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -482,10 +483,10 @@ name = "git2-curl"
|
||||||
version = "0.7.0"
|
version = "0.7.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"curl 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
"curl 0.4.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"git2 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
"git2 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
"log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"url 1.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"url 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -575,14 +576,14 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "languageserver-types"
|
name = "languageserver-types"
|
||||||
version = "0.10.0"
|
version = "0.11.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"enum_primitive 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"enum_primitive 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"serde 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
"serde 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"serde_derive 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
"serde_derive 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"url 1.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"url 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"url_serde 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"url_serde 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -609,7 +610,7 @@ version = "0.6.12"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cmake 0.1.24 (registry+https://github.com/rust-lang/crates.io-index)",
|
"cmake 0.1.24 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"curl-sys 0.3.12 (registry+https://github.com/rust-lang/crates.io-index)",
|
"curl-sys 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"gcc 0.3.51 (registry+https://github.com/rust-lang/crates.io-index)",
|
"gcc 0.3.51 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"libc 0.2.24 (registry+https://github.com/rust-lang/crates.io-index)",
|
"libc 0.2.24 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"libssh2-sys 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
"libssh2-sys 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
@ -798,7 +799,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "num_cpus"
|
name = "num_cpus"
|
||||||
version = "1.5.1"
|
version = "1.6.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"libc 0.2.24 (registry+https://github.com/rust-lang/crates.io-index)",
|
"libc 0.2.24 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
@ -936,11 +937,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "racer"
|
name = "racer"
|
||||||
version = "2.0.8"
|
version = "2.0.9"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"clap 2.24.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"clap 2.24.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"env_logger 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
"env_logger 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"lazy_static 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
"log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"regex 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"regex 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"syntex_errors 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"syntex_errors 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
@ -984,7 +986,7 @@ dependencies = [
|
||||||
"aho-corasick 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
"aho-corasick 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"memchr 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"memchr 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"regex-syntax 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"regex-syntax 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"thread_local 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
"thread_local 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"utf8-ranges 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"utf8-ranges 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -1013,39 +1015,30 @@ dependencies = [
|
||||||
"cargo 0.21.0 (git+https://github.com/rust-lang/cargo)",
|
"cargo 0.21.0 (git+https://github.com/rust-lang/cargo)",
|
||||||
"derive-new 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"derive-new 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"env_logger 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
"env_logger 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"languageserver-types 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"languageserver-types 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
"log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"racer 2.0.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
"racer 2.0.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"rls-analysis 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"rls-analysis 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"rls-data 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"rls-data 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"rls-span 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"rls-span 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"rls-vfs 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"rls-vfs 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"rustfmt-nightly 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
"rustfmt-nightly 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"serde 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
"serde 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"serde_derive 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
"serde_derive 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"toml 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"toml 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"url 1.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"url 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"url_serde 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"url_serde 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rls-analysis"
|
name = "rls-analysis"
|
||||||
version = "0.3.2"
|
version = "0.4.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"derive-new 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"derive-new 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
"log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"rls-data 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"rls-data 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"rls-span 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rls-data"
|
|
||||||
version = "0.4.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
dependencies = [
|
|
||||||
"rls-span 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"rls-span 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
|
"rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
@ -1071,10 +1064,10 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rls-vfs"
|
name = "rls-vfs"
|
||||||
version = "0.4.2"
|
version = "0.4.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"racer 2.0.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
"racer 2.0.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"rls-span 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"rls-span 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -1396,6 +1389,7 @@ dependencies = [
|
||||||
"log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
"log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"owning_ref 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
"owning_ref 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"rustc 0.0.0",
|
"rustc 0.0.0",
|
||||||
|
"rustc-demangle 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"rustc_back 0.0.0",
|
"rustc_back 0.0.0",
|
||||||
"rustc_bitflags 0.0.0",
|
"rustc_bitflags 0.0.0",
|
||||||
"rustc_const_math 0.0.0",
|
"rustc_const_math 0.0.0",
|
||||||
|
@ -1463,7 +1457,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rustfmt-nightly"
|
name = "rustfmt-nightly"
|
||||||
version = "0.1.3"
|
version = "0.1.7"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"diff 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
"diff 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
@ -1477,8 +1471,8 @@ dependencies = [
|
||||||
"serde_derive 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
"serde_derive 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"strings 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"strings 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"term 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
"term 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"toml 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"toml 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"unicode-segmentation 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"unicode-segmentation 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
@ -1579,6 +1573,18 @@ name = "shell-escape"
|
||||||
version = "0.1.3"
|
version = "0.1.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "socket2"
|
||||||
|
version = "0.2.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
dependencies = [
|
||||||
|
"cfg-if 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"libc 0.2.24 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "stable_deref_trait"
|
name = "stable_deref_trait"
|
||||||
version = "1.0.0"
|
version = "1.0.0"
|
||||||
|
@ -1697,7 +1703,7 @@ dependencies = [
|
||||||
"log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
"log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
|
"rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"syntex_pos 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"syntex_pos 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"term 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
"term 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"unicode-xid 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
"unicode-xid 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -1720,7 +1726,7 @@ dependencies = [
|
||||||
"rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
|
"rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"syntex_errors 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"syntex_errors 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"syntex_pos 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"syntex_pos 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"term 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
"term 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"unicode-xid 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
"unicode-xid 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -1748,7 +1754,7 @@ version = "0.0.0"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "term"
|
name = "term"
|
||||||
version = "0.4.5"
|
version = "0.4.6"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
@ -1790,15 +1796,6 @@ dependencies = [
|
||||||
"libc 0.2.24 (registry+https://github.com/rust-lang/crates.io-index)",
|
"libc 0.2.24 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "thread-id"
|
|
||||||
version = "3.1.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
dependencies = [
|
|
||||||
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"libc 0.2.24 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "thread_local"
|
name = "thread_local"
|
||||||
version = "0.2.7"
|
version = "0.2.7"
|
||||||
|
@ -1809,11 +1806,11 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "thread_local"
|
name = "thread_local"
|
||||||
version = "0.3.3"
|
version = "0.3.4"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"thread-id 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"lazy_static 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"unreachable 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"unreachable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -1846,7 +1843,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "toml"
|
name = "toml"
|
||||||
version = "0.4.1"
|
version = "0.4.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"serde 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
"serde 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
@ -1892,7 +1889,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "unreachable"
|
name = "unreachable"
|
||||||
version = "0.1.1"
|
version = "1.0.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
@ -1907,7 +1904,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "url"
|
name = "url"
|
||||||
version = "1.5.0"
|
version = "1.5.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"idna 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"idna 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
@ -1921,7 +1918,7 @@ version = "0.2.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"serde 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
"serde 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"url 1.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"url 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -2026,12 +2023,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
"checksum clap 2.24.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6b8f69e518f967224e628896b54e41ff6acfb4dcfefc5076325c36525dac900f"
|
"checksum clap 2.24.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6b8f69e518f967224e628896b54e41ff6acfb4dcfefc5076325c36525dac900f"
|
||||||
"checksum cmake 0.1.24 (registry+https://github.com/rust-lang/crates.io-index)" = "b8ebbb35d3dc9cd09497168f33de1acb79b265d350ab0ac34133b98f8509af1f"
|
"checksum cmake 0.1.24 (registry+https://github.com/rust-lang/crates.io-index)" = "b8ebbb35d3dc9cd09497168f33de1acb79b265d350ab0ac34133b98f8509af1f"
|
||||||
"checksum crossbeam 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)" = "0c5ea215664ca264da8a9d9c3be80d2eaf30923c259d03e870388eb927508f97"
|
"checksum crossbeam 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)" = "0c5ea215664ca264da8a9d9c3be80d2eaf30923c259d03e870388eb927508f97"
|
||||||
"checksum curl 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "c90e1240ef340dd4027ade439e5c7c2064dd9dc652682117bd50d1486a3add7b"
|
"checksum curl 0.4.7 (registry+https://github.com/rust-lang/crates.io-index)" = "6689276ab61f97c660669a5ecc117c36875dfc1ba301c986b16c653415bdf9d7"
|
||||||
"checksum curl-sys 0.3.12 (registry+https://github.com/rust-lang/crates.io-index)" = "f00c8ba847fb0730c293069b4d1203dc01bf3c2e1f90b4e55f426ed8f4a1eeac"
|
"checksum curl-sys 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)" = "d5481162dc4f424d088581db2f979fa7d4c238fe9794595de61d8d7522e277de"
|
||||||
"checksum dbghelp-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "97590ba53bcb8ac28279161ca943a924d1fd4a8fb3fa63302591647c4fc5b850"
|
"checksum dbghelp-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "97590ba53bcb8ac28279161ca943a924d1fd4a8fb3fa63302591647c4fc5b850"
|
||||||
"checksum derive-new 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "41be6ca3b99e0c0483fb2389685448f650459c3ecbe4e18d7705d8010ec4ab8e"
|
"checksum derive-new 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "41be6ca3b99e0c0483fb2389685448f650459c3ecbe4e18d7705d8010ec4ab8e"
|
||||||
"checksum diff 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "0a515461b6c8c08419850ced27bc29e86166dcdcde8fbe76f8b1f0589bb49472"
|
"checksum diff 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "0a515461b6c8c08419850ced27bc29e86166dcdcde8fbe76f8b1f0589bb49472"
|
||||||
"checksum docopt 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "63e408eee8a772c5c61f62353992e3ebf51ef5c832dd04d986b3dc7d48c5b440"
|
"checksum docopt 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3b5b93718f8b3e5544fcc914c43de828ca6c6ace23e0332c6080a2977b49787a"
|
||||||
"checksum dtoa 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "80c8b71fd71146990a9742fc06dcbbde19161a267e0ad4e572c35162f4578c90"
|
"checksum dtoa 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "80c8b71fd71146990a9742fc06dcbbde19161a267e0ad4e572c35162f4578c90"
|
||||||
"checksum enum_primitive 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "be4551092f4d519593039259a9ed8daedf0da12e5109c5280338073eaeb81180"
|
"checksum enum_primitive 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "be4551092f4d519593039259a9ed8daedf0da12e5109c5280338073eaeb81180"
|
||||||
"checksum env_logger 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "15abd780e45b3ea4f76b4e9a26ff4843258dd8a3eed2775a0e7368c2e7936c2f"
|
"checksum env_logger 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "15abd780e45b3ea4f76b4e9a26ff4843258dd8a3eed2775a0e7368c2e7936c2f"
|
||||||
|
@ -2053,7 +2050,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
"checksum itoa 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "eb2f404fbc66fd9aac13e998248505e7ecb2ad8e44ab6388684c5fb11c6c251c"
|
"checksum itoa 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "eb2f404fbc66fd9aac13e998248505e7ecb2ad8e44ab6388684c5fb11c6c251c"
|
||||||
"checksum jobserver 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "4e28adc987f6d0521ef66ad60b055968107b164b3bb3cf3dc8474e0a380474a6"
|
"checksum jobserver 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "4e28adc987f6d0521ef66ad60b055968107b164b3bb3cf3dc8474e0a380474a6"
|
||||||
"checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
|
"checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
|
||||||
"checksum languageserver-types 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "97c2985bfcbbcb0189cfa25e1c10c1ac7111df2b6214b652c690127aefdf4e5b"
|
"checksum languageserver-types 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c178b944c3187527293fb9f8a0b0db3c5fb62eb127cacd65296f651a2440f5b1"
|
||||||
"checksum lazy_static 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "3b37545ab726dd833ec6420aaba8231c5b320814b9029ad585555d2a03e94fbf"
|
"checksum lazy_static 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "3b37545ab726dd833ec6420aaba8231c5b320814b9029ad585555d2a03e94fbf"
|
||||||
"checksum libc 0.2.24 (registry+https://github.com/rust-lang/crates.io-index)" = "38f5c2b18a287cf78b4097db62e20f43cace381dc76ae5c0a3073067f78b7ddc"
|
"checksum libc 0.2.24 (registry+https://github.com/rust-lang/crates.io-index)" = "38f5c2b18a287cf78b4097db62e20f43cace381dc76ae5c0a3073067f78b7ddc"
|
||||||
"checksum libgit2-sys 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)" = "df18a822100352d9863b302faf6f8f25c0e77f0e60feb40e5dbe1238b7f13b1d"
|
"checksum libgit2-sys 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)" = "df18a822100352d9863b302faf6f8f25c0e77f0e60feb40e5dbe1238b7f13b1d"
|
||||||
|
@ -2075,7 +2072,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
"checksum num-iter 0.1.33 (registry+https://github.com/rust-lang/crates.io-index)" = "f7d1891bd7b936f12349b7d1403761c8a0b85a18b148e9da4429d5d102c1a41e"
|
"checksum num-iter 0.1.33 (registry+https://github.com/rust-lang/crates.io-index)" = "f7d1891bd7b936f12349b7d1403761c8a0b85a18b148e9da4429d5d102c1a41e"
|
||||||
"checksum num-rational 0.1.38 (registry+https://github.com/rust-lang/crates.io-index)" = "33c881e104a26e1accc09449374c095ff2312c8e0c27fab7bbefe16eac7c776d"
|
"checksum num-rational 0.1.38 (registry+https://github.com/rust-lang/crates.io-index)" = "33c881e104a26e1accc09449374c095ff2312c8e0c27fab7bbefe16eac7c776d"
|
||||||
"checksum num-traits 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)" = "1708c0628602a98b52fad936cf3edb9a107af06e52e49fdf0707e884456a6af6"
|
"checksum num-traits 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)" = "1708c0628602a98b52fad936cf3edb9a107af06e52e49fdf0707e884456a6af6"
|
||||||
"checksum num_cpus 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "6e416ba127a4bb3ff398cb19546a8d0414f73352efe2857f4060d36f5fe5983a"
|
"checksum num_cpus 1.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "aec53c34f2d0247c5ca5d32cca1478762f301740468ee9ee6dcb7a0dd7a0c584"
|
||||||
"checksum open 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3478ed1686bd1300c8a981a940abc92b06fac9cbef747f4c668d4e032ff7b842"
|
"checksum open 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3478ed1686bd1300c8a981a940abc92b06fac9cbef747f4c668d4e032ff7b842"
|
||||||
"checksum openssl 0.9.14 (registry+https://github.com/rust-lang/crates.io-index)" = "11ba043cb65fc9af71a431b8a36ffe8686cd4751cdf70a473ec1d01066ac7e41"
|
"checksum openssl 0.9.14 (registry+https://github.com/rust-lang/crates.io-index)" = "11ba043cb65fc9af71a431b8a36ffe8686cd4751cdf70a473ec1d01066ac7e41"
|
||||||
"checksum openssl-probe 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d98df0270d404ccd3c050a41d579c52d1db15375168bb3471e04ec0f5f378daf"
|
"checksum openssl-probe 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d98df0270d404ccd3c050a41d579c52d1db15375168bb3471e04ec0f5f378daf"
|
||||||
|
@ -2090,20 +2087,19 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
"checksum quick-error 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3c36987d4978eb1be2e422b1e0423a557923a5c3e7e6f31d5699e9aafaefa469"
|
"checksum quick-error 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3c36987d4978eb1be2e422b1e0423a557923a5c3e7e6f31d5699e9aafaefa469"
|
||||||
"checksum quote 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "4c5cf478fe1006dbcc72567121d23dbdae5f1632386068c5c86ff4f645628504"
|
"checksum quote 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "4c5cf478fe1006dbcc72567121d23dbdae5f1632386068c5c86ff4f645628504"
|
||||||
"checksum quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e920b65c65f10b2ae65c831a81a073a89edd28c7cce89475bff467ab4167a"
|
"checksum quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e920b65c65f10b2ae65c831a81a073a89edd28c7cce89475bff467ab4167a"
|
||||||
"checksum racer 2.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "edf2dfc188373ef96168bec3646a0415c5c21111c6144c0c36104fc720587ecd"
|
"checksum racer 2.0.9 (registry+https://github.com/rust-lang/crates.io-index)" = "9079a128fdb6f0c8850010e1478b215d4c00134654bf995bfda41824951ce9bd"
|
||||||
"checksum rand 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)" = "022e0636ec2519ddae48154b028864bdce4eaf7d35226ab8e65c611be97b189d"
|
"checksum rand 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)" = "022e0636ec2519ddae48154b028864bdce4eaf7d35226ab8e65c611be97b189d"
|
||||||
"checksum regex 0.1.80 (registry+https://github.com/rust-lang/crates.io-index)" = "4fd4ace6a8cf7860714a2c2280d6c1f7e6a413486c13298bbc86fd3da019402f"
|
"checksum regex 0.1.80 (registry+https://github.com/rust-lang/crates.io-index)" = "4fd4ace6a8cf7860714a2c2280d6c1f7e6a413486c13298bbc86fd3da019402f"
|
||||||
"checksum regex 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1731164734096285ec2a5ec7fea5248ae2f5485b3feeb0115af4fda2183b2d1b"
|
"checksum regex 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1731164734096285ec2a5ec7fea5248ae2f5485b3feeb0115af4fda2183b2d1b"
|
||||||
"checksum regex-syntax 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "f9ec002c35e86791825ed294b50008eea9ddfc8def4420124fbc6b08db834957"
|
"checksum regex-syntax 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "f9ec002c35e86791825ed294b50008eea9ddfc8def4420124fbc6b08db834957"
|
||||||
"checksum regex-syntax 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ad890a5eef7953f55427c50575c680c42841653abd2b028b68cd223d157f62db"
|
"checksum regex-syntax 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ad890a5eef7953f55427c50575c680c42841653abd2b028b68cd223d157f62db"
|
||||||
"checksum rls-analysis 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "8d77d58e8933752142b5b92e3f8ba6d6f1630be6da5627c492268a43f79ffbda"
|
"checksum rls-analysis 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "78a05b130793ebc781c2d933299d7214a10d014fdebe5184eb652c81ba8d3184"
|
||||||
"checksum rls-data 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "374a8fad31cc0681a7bfd8a04079dd4afd0e981d34e18a171b1a467445bdf51e"
|
|
||||||
"checksum rls-data 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e502ac679bc35e023e982506c32d0278ef89e29af1e4ad21cb70c44b525b87a9"
|
"checksum rls-data 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e502ac679bc35e023e982506c32d0278ef89e29af1e4ad21cb70c44b525b87a9"
|
||||||
"checksum rls-span 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5d7c7046dc6a92f2ae02ed302746db4382e75131b9ce20ce967259f6b5867a6a"
|
"checksum rls-span 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5d7c7046dc6a92f2ae02ed302746db4382e75131b9ce20ce967259f6b5867a6a"
|
||||||
"checksum rls-vfs 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "ace07060dd154731b39254864245cbdd33c8f5f64fe1f630a089c72e2468f854"
|
"checksum rls-vfs 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "1f19246a0fda45f2fb6eb34addef2a692c044cbf1c90ec7695583450fb5f23e7"
|
||||||
"checksum rustc-demangle 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "3058a43ada2c2d0b92b3ae38007a2d0fa5e9db971be260e0171408a4ff471c95"
|
"checksum rustc-demangle 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "3058a43ada2c2d0b92b3ae38007a2d0fa5e9db971be260e0171408a4ff471c95"
|
||||||
"checksum rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)" = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda"
|
"checksum rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)" = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda"
|
||||||
"checksum rustfmt-nightly 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "277deb9c0ee9c4788ee94faef5988fa334179cd7363bf281a2cae027edbbc8bf"
|
"checksum rustfmt-nightly 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "31ac6fe40a9844ee2de3d51d0be2bbcdb361bad6f3667a02db8c4e2330afbbb5"
|
||||||
"checksum same-file 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "d931a44fdaa43b8637009e7632a02adc4f2b2e0733c08caa4cf00e8da4a117a7"
|
"checksum same-file 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "d931a44fdaa43b8637009e7632a02adc4f2b2e0733c08caa4cf00e8da4a117a7"
|
||||||
"checksum scoped-tls 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f417c22df063e9450888a7561788e9bd46d3bb3c1466435b4eccb903807f147d"
|
"checksum scoped-tls 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f417c22df063e9450888a7561788e9bd46d3bb3c1466435b4eccb903807f147d"
|
||||||
"checksum semver 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3fdd61b85a0fa777f7fb7c454b9189b2941b110d1385ce84d7f76efdf1606a85"
|
"checksum semver 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3fdd61b85a0fa777f7fb7c454b9189b2941b110d1385ce84d7f76efdf1606a85"
|
||||||
|
@ -2116,6 +2112,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
"checksum serde_json 0.9.10 (registry+https://github.com/rust-lang/crates.io-index)" = "ad8bcf487be7d2e15d3d543f04312de991d631cfe1b43ea0ade69e6a8a5b16a1"
|
"checksum serde_json 0.9.10 (registry+https://github.com/rust-lang/crates.io-index)" = "ad8bcf487be7d2e15d3d543f04312de991d631cfe1b43ea0ade69e6a8a5b16a1"
|
||||||
"checksum serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "48b04779552e92037212c3615370f6bd57a40ebba7f20e554ff9f55e41a69a7b"
|
"checksum serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "48b04779552e92037212c3615370f6bd57a40ebba7f20e554ff9f55e41a69a7b"
|
||||||
"checksum shell-escape 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "dd5cc96481d54583947bfe88bf30c23d53f883c6cd0145368b69989d97b84ef8"
|
"checksum shell-escape 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "dd5cc96481d54583947bfe88bf30c23d53f883c6cd0145368b69989d97b84ef8"
|
||||||
|
"checksum socket2 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "12cdbddbaa27bf94cc194b8e37f5811db6fe83cea96cf99cf1f8e92b65a41371"
|
||||||
"checksum stable_deref_trait 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "15132e0e364248108c5e2c02e3ab539be8d6f5d52a01ca9bbf27ed657316f02b"
|
"checksum stable_deref_trait 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "15132e0e364248108c5e2c02e3ab539be8d6f5d52a01ca9bbf27ed657316f02b"
|
||||||
"checksum strings 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "da75d8bf2c4d210d63dd09581a041b036001f9f6e03d9b151dbff810fb7ba26a"
|
"checksum strings 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "da75d8bf2c4d210d63dd09581a041b036001f9f6e03d9b151dbff810fb7ba26a"
|
||||||
"checksum strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b4d15c810519a91cf877e7e36e63fe068815c678181439f2f29e2562147c3694"
|
"checksum strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b4d15c810519a91cf877e7e36e63fe068815c678181439f2f29e2562147c3694"
|
||||||
|
@ -2127,17 +2124,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
"checksum syntex_syntax 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)" = "76a302e717e348aa372ff577791c3832395650073b8d8432f8b3cb170b34afde"
|
"checksum syntex_syntax 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)" = "76a302e717e348aa372ff577791c3832395650073b8d8432f8b3cb170b34afde"
|
||||||
"checksum tar 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)" = "281285b717926caa919ad905ef89c63d75805c7d89437fb873100925a53f2b1b"
|
"checksum tar 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)" = "281285b717926caa919ad905ef89c63d75805c7d89437fb873100925a53f2b1b"
|
||||||
"checksum tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "87974a6f5c1dfb344d733055601650059a3363de2a6104819293baff662132d6"
|
"checksum tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "87974a6f5c1dfb344d733055601650059a3363de2a6104819293baff662132d6"
|
||||||
"checksum term 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)" = "d168af3930b369cfe245132550579d47dfd873d69470755a19c2c6568dbbd989"
|
"checksum term 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "fa63644f74ce96fbeb9b794f66aff2a52d601cbd5e80f4b97123e3899f4570f1"
|
||||||
"checksum term_size 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e2b6b55df3198cc93372e85dd2ed817f0e38ce8cc0f22eb32391bfad9c4bf209"
|
"checksum term_size 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e2b6b55df3198cc93372e85dd2ed817f0e38ce8cc0f22eb32391bfad9c4bf209"
|
||||||
"checksum termcolor 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9a5193a56b8d82014662c4b933dea6bec851daf018a2b01722e007daaf5f9dca"
|
"checksum termcolor 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9a5193a56b8d82014662c4b933dea6bec851daf018a2b01722e007daaf5f9dca"
|
||||||
"checksum thread-id 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a9539db560102d1cef46b8b78ce737ff0bb64e7e18d35b2a5688f7d097d0ff03"
|
"checksum thread-id 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a9539db560102d1cef46b8b78ce737ff0bb64e7e18d35b2a5688f7d097d0ff03"
|
||||||
"checksum thread-id 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8df7875b676fddfadffd96deea3b1124e5ede707d4884248931077518cf1f773"
|
|
||||||
"checksum thread_local 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "8576dbbfcaef9641452d5cf0df9b0e7eeab7694956dd33bb61515fb8f18cfdd5"
|
"checksum thread_local 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "8576dbbfcaef9641452d5cf0df9b0e7eeab7694956dd33bb61515fb8f18cfdd5"
|
||||||
"checksum thread_local 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "c85048c6260d17cf486ceae3282d9fb6b90be220bf5b28c400f5485ffc29f0c7"
|
"checksum thread_local 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "1697c4b57aeeb7a536b647165a2825faddffb1d3bad386d507709bd51a90bb14"
|
||||||
"checksum toml 0.1.30 (registry+https://github.com/rust-lang/crates.io-index)" = "0590d72182e50e879c4da3b11c6488dae18fccb1ae0c7a3eda18e16795844796"
|
"checksum toml 0.1.30 (registry+https://github.com/rust-lang/crates.io-index)" = "0590d72182e50e879c4da3b11c6488dae18fccb1ae0c7a3eda18e16795844796"
|
||||||
"checksum toml 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "736b60249cb25337bc196faa43ee12c705e426f3d55c214d73a4e7be06f92cb4"
|
"checksum toml 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "736b60249cb25337bc196faa43ee12c705e426f3d55c214d73a4e7be06f92cb4"
|
||||||
"checksum toml 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "bd86ad9ebee246fdedd610e0f6d0587b754a3d81438db930a244d0480ed7878f"
|
"checksum toml 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "bd86ad9ebee246fdedd610e0f6d0587b754a3d81438db930a244d0480ed7878f"
|
||||||
"checksum toml 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4cc5dbfb20a481e64b99eb7ae280859ec76730c7191570ba5edaa962394edb0a"
|
"checksum toml 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "b0601da6c97135c8d330c7a13a013ca6cd4143221b01de2f8d4edc50a9e551c7"
|
||||||
"checksum typed-arena 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5934776c3ac1bea4a9d56620d6bf2d483b20d394e49581db40f187e1118ff667"
|
"checksum typed-arena 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5934776c3ac1bea4a9d56620d6bf2d483b20d394e49581db40f187e1118ff667"
|
||||||
"checksum unicode-bidi 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a6a2c4e3710edd365cd7e78383153ed739fa31af19f9172f72d3575060f5a43a"
|
"checksum unicode-bidi 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a6a2c4e3710edd365cd7e78383153ed739fa31af19f9172f72d3575060f5a43a"
|
||||||
"checksum unicode-normalization 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "51ccda9ef9efa3f7ef5d91e8f9b83bbe6955f9bf86aec89d5cce2c874625920f"
|
"checksum unicode-normalization 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "51ccda9ef9efa3f7ef5d91e8f9b83bbe6955f9bf86aec89d5cce2c874625920f"
|
||||||
|
@ -2145,8 +2141,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
"checksum unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "bf3a113775714a22dcb774d8ea3655c53a32debae63a063acc00a91cc586245f"
|
"checksum unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "bf3a113775714a22dcb774d8ea3655c53a32debae63a063acc00a91cc586245f"
|
||||||
"checksum unicode-xid 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "36dff09cafb4ec7c8cf0023eb0b686cb6ce65499116a12201c9e11840ca01beb"
|
"checksum unicode-xid 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "36dff09cafb4ec7c8cf0023eb0b686cb6ce65499116a12201c9e11840ca01beb"
|
||||||
"checksum unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "8c1f860d7d29cf02cb2f3f359fd35991af3d30bac52c57d265a3c461074cb4dc"
|
"checksum unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "8c1f860d7d29cf02cb2f3f359fd35991af3d30bac52c57d265a3c461074cb4dc"
|
||||||
"checksum unreachable 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1f2ae5ddb18e1c92664717616dd9549dde73f539f01bd7b77c2edb2446bdff91"
|
"checksum unreachable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "382810877fe448991dfc7f0dd6e3ae5d58088fd0ea5e35189655f84e6814fa56"
|
||||||
"checksum url 1.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a69a2e36a5e5ed3f3063c8c64a3b028c4d50d689fa6c862abd7cfe65f882595c"
|
"checksum url 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "eeb819346883532a271eb626deb43c4a1bb4c4dd47c519bd78137c3e72a4fe27"
|
||||||
"checksum url_serde 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "74e7d099f1ee52f823d4bdd60c93c3602043c728f5db3b97bdb548467f7bddea"
|
"checksum url_serde 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "74e7d099f1ee52f823d4bdd60c93c3602043c728f5db3b97bdb548467f7bddea"
|
||||||
"checksum utf8-ranges 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a1ca13c08c41c9c3e04224ed9ff80461d97e121589ff27c753a16cb10830ae0f"
|
"checksum utf8-ranges 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a1ca13c08c41c9c3e04224ed9ff80461d97e121589ff27c753a16cb10830ae0f"
|
||||||
"checksum utf8-ranges 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "662fab6525a98beff2921d7f61a39e7d59e0b425ebc7d0d9e66d316e55124122"
|
"checksum utf8-ranges 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "662fab6525a98beff2921d7f61a39e7d59e0b425ebc7d0d9e66d316e55124122"
|
||||||
|
|
|
@ -75,16 +75,11 @@ fn main() {
|
||||||
Err(_) => 0,
|
Err(_) => 0,
|
||||||
};
|
};
|
||||||
|
|
||||||
// Build scripts always use the snapshot compiler which is guaranteed to be
|
// Use a different compiler for build scripts, since there may not yet be a
|
||||||
// able to produce an executable, whereas intermediate compilers may not
|
// libstd for the real compiler to use. However, if Cargo is attempting to
|
||||||
// have the standard library built yet and may not be able to produce an
|
// determine the version of the compiler, the real compiler needs to be
|
||||||
// executable. Otherwise we just use the standard compiler we're
|
// used. Currently, these two states are differentiated based on whether
|
||||||
// bootstrapping with.
|
// --target and -vV is/isn't passed.
|
||||||
//
|
|
||||||
// Also note that cargo will detect the version of the compiler to trigger
|
|
||||||
// a rebuild when the compiler changes. If this happens, we want to make
|
|
||||||
// sure to use the actual compiler instead of the snapshot compiler becase
|
|
||||||
// that's the one that's actually changing.
|
|
||||||
let (rustc, libdir) = if target.is_none() && version.is_none() {
|
let (rustc, libdir) = if target.is_none() && version.is_none() {
|
||||||
("RUSTC_SNAPSHOT", "RUSTC_SNAPSHOT_LIBDIR")
|
("RUSTC_SNAPSHOT", "RUSTC_SNAPSHOT_LIBDIR")
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -25,10 +25,11 @@ from time import time
|
||||||
|
|
||||||
|
|
||||||
def get(url, path, verbose=False):
|
def get(url, path, verbose=False):
|
||||||
sha_url = url + ".sha256"
|
suffix = '.sha256'
|
||||||
|
sha_url = url + suffix
|
||||||
with tempfile.NamedTemporaryFile(delete=False) as temp_file:
|
with tempfile.NamedTemporaryFile(delete=False) as temp_file:
|
||||||
temp_path = temp_file.name
|
temp_path = temp_file.name
|
||||||
with tempfile.NamedTemporaryFile(suffix=".sha256", delete=False) as sha_file:
|
with tempfile.NamedTemporaryFile(suffix=suffix, delete=False) as sha_file:
|
||||||
sha_path = sha_file.name
|
sha_path = sha_file.name
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -55,6 +56,7 @@ def get(url, path, verbose=False):
|
||||||
|
|
||||||
|
|
||||||
def delete_if_present(path, verbose):
|
def delete_if_present(path, verbose):
|
||||||
|
"""Remove the given file if present"""
|
||||||
if os.path.isfile(path):
|
if os.path.isfile(path):
|
||||||
if verbose:
|
if verbose:
|
||||||
print("removing " + path)
|
print("removing " + path)
|
||||||
|
@ -92,12 +94,13 @@ def _download(path, url, probably_big, verbose, exception):
|
||||||
|
|
||||||
|
|
||||||
def verify(path, sha_path, verbose):
|
def verify(path, sha_path, verbose):
|
||||||
|
"""Check if the sha256 sum of the given path is valid"""
|
||||||
if verbose:
|
if verbose:
|
||||||
print("verifying " + path)
|
print("verifying " + path)
|
||||||
with open(path, "rb") as f:
|
with open(path, "rb") as source:
|
||||||
found = hashlib.sha256(f.read()).hexdigest()
|
found = hashlib.sha256(source.read()).hexdigest()
|
||||||
with open(sha_path, "r") as f:
|
with open(sha_path, "r") as sha256sum:
|
||||||
expected = f.readline().split()[0]
|
expected = sha256sum.readline().split()[0]
|
||||||
verified = found == expected
|
verified = found == expected
|
||||||
if not verified:
|
if not verified:
|
||||||
print("invalid checksum:\n"
|
print("invalid checksum:\n"
|
||||||
|
@ -107,6 +110,7 @@ def verify(path, sha_path, verbose):
|
||||||
|
|
||||||
|
|
||||||
def unpack(tarball, dst, verbose=False, match=None):
|
def unpack(tarball, dst, verbose=False, match=None):
|
||||||
|
"""Unpack the given tarball file"""
|
||||||
print("extracting " + tarball)
|
print("extracting " + tarball)
|
||||||
fname = os.path.basename(tarball).replace(".tar.gz", "")
|
fname = os.path.basename(tarball).replace(".tar.gz", "")
|
||||||
with contextlib.closing(tarfile.open(tarball)) as tar:
|
with contextlib.closing(tarfile.open(tarball)) as tar:
|
||||||
|
@ -128,6 +132,7 @@ def unpack(tarball, dst, verbose=False, match=None):
|
||||||
shutil.move(tp, fp)
|
shutil.move(tp, fp)
|
||||||
shutil.rmtree(os.path.join(dst, fname))
|
shutil.rmtree(os.path.join(dst, fname))
|
||||||
|
|
||||||
|
|
||||||
def run(args, verbose=False, exception=False, **kwargs):
|
def run(args, verbose=False, exception=False, **kwargs):
|
||||||
if verbose:
|
if verbose:
|
||||||
print("running: " + ' '.join(args))
|
print("running: " + ' '.join(args))
|
||||||
|
@ -245,7 +250,8 @@ class RustBuild(object):
|
||||||
return
|
return
|
||||||
|
|
||||||
# At this point we're pretty sure the user is running NixOS
|
# At this point we're pretty sure the user is running NixOS
|
||||||
print("info: you seem to be running NixOS. Attempting to patch " + fname)
|
nix_os_msg = "info: you seem to be running NixOS. Attempting to patch"
|
||||||
|
print(nix_os_msg, fname)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
interpreter = subprocess.check_output(
|
interpreter = subprocess.check_output(
|
||||||
|
@ -293,18 +299,22 @@ class RustBuild(object):
|
||||||
return self._cargo_channel
|
return self._cargo_channel
|
||||||
|
|
||||||
def rustc_stamp(self):
|
def rustc_stamp(self):
|
||||||
|
"""Return the path for .rustc-stamp"""
|
||||||
return os.path.join(self.bin_root(), '.rustc-stamp')
|
return os.path.join(self.bin_root(), '.rustc-stamp')
|
||||||
|
|
||||||
def cargo_stamp(self):
|
def cargo_stamp(self):
|
||||||
|
"""Return the path for .cargo-stamp"""
|
||||||
return os.path.join(self.bin_root(), '.cargo-stamp')
|
return os.path.join(self.bin_root(), '.cargo-stamp')
|
||||||
|
|
||||||
def rustc_out_of_date(self):
|
def rustc_out_of_date(self):
|
||||||
|
"""Check if rustc is out of date"""
|
||||||
if not os.path.exists(self.rustc_stamp()) or self.clean:
|
if not os.path.exists(self.rustc_stamp()) or self.clean:
|
||||||
return True
|
return True
|
||||||
with open(self.rustc_stamp(), 'r') as f:
|
with open(self.rustc_stamp(), 'r') as f:
|
||||||
return self.stage0_date() != f.read()
|
return self.stage0_date() != f.read()
|
||||||
|
|
||||||
def cargo_out_of_date(self):
|
def cargo_out_of_date(self):
|
||||||
|
"""Check if cargo is out of date"""
|
||||||
if not os.path.exists(self.cargo_stamp()) or self.clean:
|
if not os.path.exists(self.cargo_stamp()) or self.clean:
|
||||||
return True
|
return True
|
||||||
with open(self.cargo_stamp(), 'r') as f:
|
with open(self.cargo_stamp(), 'r') as f:
|
||||||
|
@ -357,8 +367,7 @@ class RustBuild(object):
|
||||||
def exe_suffix(self):
|
def exe_suffix(self):
|
||||||
if sys.platform == 'win32':
|
if sys.platform == 'win32':
|
||||||
return '.exe'
|
return '.exe'
|
||||||
else:
|
return ''
|
||||||
return ''
|
|
||||||
|
|
||||||
def print_what_it_means_to_bootstrap(self):
|
def print_what_it_means_to_bootstrap(self):
|
||||||
if hasattr(self, 'printed'):
|
if hasattr(self, 'printed'):
|
||||||
|
@ -366,7 +375,7 @@ class RustBuild(object):
|
||||||
self.printed = True
|
self.printed = True
|
||||||
if os.path.exists(self.bootstrap_binary()):
|
if os.path.exists(self.bootstrap_binary()):
|
||||||
return
|
return
|
||||||
if not '--help' in sys.argv or len(sys.argv) == 1:
|
if '--help' not in sys.argv or len(sys.argv) == 1:
|
||||||
return
|
return
|
||||||
|
|
||||||
print('info: the build system for Rust is written in Rust, so this')
|
print('info: the build system for Rust is written in Rust, so this')
|
||||||
|
@ -461,8 +470,8 @@ class RustBuild(object):
|
||||||
# always emit 'i386' on x86/amd64 systems). As such, isainfo -k
|
# always emit 'i386' on x86/amd64 systems). As such, isainfo -k
|
||||||
# must be used instead.
|
# must be used instead.
|
||||||
try:
|
try:
|
||||||
cputype = subprocess.check_output(['isainfo',
|
cputype = subprocess.check_output(
|
||||||
'-k']).strip().decode(default_encoding)
|
['isainfo', '-k']).strip().decode(default_encoding)
|
||||||
except (subprocess.CalledProcessError, OSError):
|
except (subprocess.CalledProcessError, OSError):
|
||||||
err = "isainfo not found"
|
err = "isainfo not found"
|
||||||
if self.verbose:
|
if self.verbose:
|
||||||
|
@ -562,21 +571,26 @@ class RustBuild(object):
|
||||||
default_encoding = sys.getdefaultencoding()
|
default_encoding = sys.getdefaultencoding()
|
||||||
run(["git", "submodule", "-q", "sync"], cwd=self.rust_root)
|
run(["git", "submodule", "-q", "sync"], cwd=self.rust_root)
|
||||||
submodules = [s.split(' ', 1)[1] for s in subprocess.check_output(
|
submodules = [s.split(' ', 1)[1] for s in subprocess.check_output(
|
||||||
["git", "config", "--file", os.path.join(self.rust_root, ".gitmodules"),
|
["git", "config", "--file",
|
||||||
|
os.path.join(self.rust_root, ".gitmodules"),
|
||||||
"--get-regexp", "path"]
|
"--get-regexp", "path"]
|
||||||
).decode(default_encoding).splitlines()]
|
).decode(default_encoding).splitlines()]
|
||||||
submodules = [module for module in submodules
|
submodules = [module for module in submodules
|
||||||
if not ((module.endswith("llvm") and
|
if not ((module.endswith("llvm") and
|
||||||
(self.get_toml('llvm-config') or self.get_mk('CFG_LLVM_ROOT'))) or
|
(self.get_toml('llvm-config') or
|
||||||
|
self.get_mk('CFG_LLVM_ROOT'))) or
|
||||||
(module.endswith("jemalloc") and
|
(module.endswith("jemalloc") and
|
||||||
(self.get_toml('jemalloc') or self.get_mk('CFG_JEMALLOC_ROOT'))))
|
(self.get_toml('jemalloc') or
|
||||||
]
|
self.get_mk('CFG_JEMALLOC_ROOT'))))]
|
||||||
run(["git", "submodule", "update",
|
run(["git", "submodule", "update",
|
||||||
"--init"] + submodules, cwd=self.rust_root, verbose=self.verbose)
|
"--init"] + submodules,
|
||||||
|
cwd=self.rust_root, verbose=self.verbose)
|
||||||
run(["git", "submodule", "-q", "foreach", "git",
|
run(["git", "submodule", "-q", "foreach", "git",
|
||||||
"reset", "-q", "--hard"], cwd=self.rust_root, verbose=self.verbose)
|
"reset", "-q", "--hard"],
|
||||||
|
cwd=self.rust_root, verbose=self.verbose)
|
||||||
run(["git", "submodule", "-q", "foreach", "git",
|
run(["git", "submodule", "-q", "foreach", "git",
|
||||||
"clean", "-qdfx"], cwd=self.rust_root, verbose=self.verbose)
|
"clean", "-qdfx"],
|
||||||
|
cwd=self.rust_root, verbose=self.verbose)
|
||||||
|
|
||||||
|
|
||||||
def bootstrap():
|
def bootstrap():
|
||||||
|
@ -692,5 +706,6 @@ def main():
|
||||||
format_build_time(time() - start_time))
|
format_build_time(time() - start_time))
|
||||||
sys.exit(exit_code)
|
sys.exit(exit_code)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main()
|
main()
|
||||||
|
|
|
@ -42,10 +42,13 @@ use config::Target;
|
||||||
pub fn find(build: &mut Build) {
|
pub fn find(build: &mut Build) {
|
||||||
// For all targets we're going to need a C compiler for building some shims
|
// For all targets we're going to need a C compiler for building some shims
|
||||||
// and such as well as for being a linker for Rust code.
|
// and such as well as for being a linker for Rust code.
|
||||||
for target in build.config.target.iter() {
|
//
|
||||||
|
// This includes targets that aren't necessarily passed on the commandline
|
||||||
|
// (FIXME: Perhaps it shouldn't?)
|
||||||
|
for target in &build.config.target {
|
||||||
let mut cfg = gcc::Config::new();
|
let mut cfg = gcc::Config::new();
|
||||||
cfg.cargo_metadata(false).opt_level(0).debug(false)
|
cfg.cargo_metadata(false).opt_level(0).debug(false)
|
||||||
.target(target).host(&build.config.build);
|
.target(target).host(&build.build);
|
||||||
|
|
||||||
let config = build.config.target_config.get(target);
|
let config = build.config.target_config.get(target);
|
||||||
if let Some(cc) = config.and_then(|c| c.cc.as_ref()) {
|
if let Some(cc) = config.and_then(|c| c.cc.as_ref()) {
|
||||||
|
@ -64,10 +67,13 @@ pub fn find(build: &mut Build) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// For all host triples we need to find a C++ compiler as well
|
// For all host triples we need to find a C++ compiler as well
|
||||||
for host in build.config.host.iter() {
|
//
|
||||||
|
// This includes hosts that aren't necessarily passed on the commandline
|
||||||
|
// (FIXME: Perhaps it shouldn't?)
|
||||||
|
for host in &build.config.host {
|
||||||
let mut cfg = gcc::Config::new();
|
let mut cfg = gcc::Config::new();
|
||||||
cfg.cargo_metadata(false).opt_level(0).debug(false).cpp(true)
|
cfg.cargo_metadata(false).opt_level(0).debug(false).cpp(true)
|
||||||
.target(host).host(&build.config.build);
|
.target(host).host(&build.build);
|
||||||
let config = build.config.target_config.get(host);
|
let config = build.config.target_config.get(host);
|
||||||
if let Some(cxx) = config.and_then(|c| c.cxx.as_ref()) {
|
if let Some(cxx) = config.and_then(|c| c.cxx.as_ref()) {
|
||||||
cfg.compiler(cxx);
|
cfg.compiler(cxx);
|
||||||
|
|
|
@ -23,12 +23,12 @@ use build_helper::output;
|
||||||
use Build;
|
use Build;
|
||||||
|
|
||||||
// The version number
|
// The version number
|
||||||
pub const CFG_RELEASE_NUM: &'static str = "1.20.0";
|
pub const CFG_RELEASE_NUM: &str = "1.20.0";
|
||||||
|
|
||||||
// An optional number to put after the label, e.g. '.2' -> '-beta.2'
|
// An optional number to put after the label, e.g. '.2' -> '-beta.2'
|
||||||
// Be sure to make this starts with a dot to conform to semver pre-release
|
// Be sure to make this starts with a dot to conform to semver pre-release
|
||||||
// versions (section 9)
|
// versions (section 9)
|
||||||
pub const CFG_PRERELEASE_VERSION: &'static str = ".1";
|
pub const CFG_PRERELEASE_VERSION: &str = ".1";
|
||||||
|
|
||||||
pub struct GitInfo {
|
pub struct GitInfo {
|
||||||
inner: Option<Info>,
|
inner: Option<Info>,
|
||||||
|
@ -99,6 +99,10 @@ impl GitInfo {
|
||||||
version.push_str(&inner.commit_date);
|
version.push_str(&inner.commit_date);
|
||||||
version.push_str(")");
|
version.push_str(")");
|
||||||
}
|
}
|
||||||
return version
|
version
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_git(&self) -> bool {
|
||||||
|
self.inner.is_some()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,23 +13,22 @@
|
||||||
//! This file implements the various regression test suites that we execute on
|
//! This file implements the various regression test suites that we execute on
|
||||||
//! our CI.
|
//! our CI.
|
||||||
|
|
||||||
extern crate build_helper;
|
|
||||||
|
|
||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
use std::env;
|
use std::env;
|
||||||
|
use std::iter;
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
use std::fs::{self, File};
|
use std::fs::{self, File};
|
||||||
use std::path::{PathBuf, Path};
|
use std::path::{PathBuf, Path};
|
||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
use std::io::Read;
|
use std::io::Read;
|
||||||
|
|
||||||
use build_helper::output;
|
use build_helper::{self, output};
|
||||||
|
|
||||||
use {Build, Compiler, Mode};
|
use {Build, Compiler, Mode};
|
||||||
use dist;
|
use dist;
|
||||||
use util::{self, dylib_path, dylib_path_var, exe};
|
use util::{self, dylib_path, dylib_path_var, exe};
|
||||||
|
|
||||||
const ADB_TEST_DIR: &'static str = "/data/tmp/work";
|
const ADB_TEST_DIR: &str = "/data/tmp/work";
|
||||||
|
|
||||||
/// The two modes of the test runner; tests or benchmarks.
|
/// The two modes of the test runner; tests or benchmarks.
|
||||||
#[derive(Copy, Clone)]
|
#[derive(Copy, Clone)]
|
||||||
|
@ -60,7 +59,7 @@ impl fmt::Display for TestKind {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn try_run(build: &Build, cmd: &mut Command) {
|
fn try_run(build: &Build, cmd: &mut Command) {
|
||||||
if build.flags.cmd.no_fail_fast() {
|
if !build.fail_fast {
|
||||||
if !build.try_run(cmd) {
|
if !build.try_run(cmd) {
|
||||||
let failures = build.delayed_failures.get();
|
let failures = build.delayed_failures.get();
|
||||||
build.delayed_failures.set(failures + 1);
|
build.delayed_failures.set(failures + 1);
|
||||||
|
@ -71,7 +70,7 @@ fn try_run(build: &Build, cmd: &mut Command) {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn try_run_quiet(build: &Build, cmd: &mut Command) {
|
fn try_run_quiet(build: &Build, cmd: &mut Command) {
|
||||||
if build.flags.cmd.no_fail_fast() {
|
if !build.fail_fast {
|
||||||
if !build.try_run_quiet(cmd) {
|
if !build.try_run_quiet(cmd) {
|
||||||
let failures = build.delayed_failures.get();
|
let failures = build.delayed_failures.get();
|
||||||
build.delayed_failures.set(failures + 1);
|
build.delayed_failures.set(failures + 1);
|
||||||
|
@ -99,7 +98,7 @@ pub fn linkcheck(build: &Build, host: &str) {
|
||||||
/// This tool in `src/tools` will check out a few Rust projects and run `cargo
|
/// This tool in `src/tools` will check out a few Rust projects and run `cargo
|
||||||
/// test` to ensure that we don't regress the test suites there.
|
/// test` to ensure that we don't regress the test suites there.
|
||||||
pub fn cargotest(build: &Build, stage: u32, host: &str) {
|
pub fn cargotest(build: &Build, stage: u32, host: &str) {
|
||||||
let ref compiler = Compiler::new(stage, host);
|
let compiler = Compiler::new(stage, host);
|
||||||
|
|
||||||
// Note that this is a short, cryptic, and not scoped directory name. This
|
// Note that this is a short, cryptic, and not scoped directory name. This
|
||||||
// is currently to minimize the length of path on Windows where we otherwise
|
// is currently to minimize the length of path on Windows where we otherwise
|
||||||
|
@ -109,11 +108,11 @@ pub fn cargotest(build: &Build, stage: u32, host: &str) {
|
||||||
|
|
||||||
let _time = util::timeit();
|
let _time = util::timeit();
|
||||||
let mut cmd = Command::new(build.tool(&Compiler::new(0, host), "cargotest"));
|
let mut cmd = Command::new(build.tool(&Compiler::new(0, host), "cargotest"));
|
||||||
build.prepare_tool_cmd(compiler, &mut cmd);
|
build.prepare_tool_cmd(&compiler, &mut cmd);
|
||||||
try_run(build, cmd.arg(&build.cargo)
|
try_run(build, cmd.arg(&build.initial_cargo)
|
||||||
.arg(&out_dir)
|
.arg(&out_dir)
|
||||||
.env("RUSTC", build.compiler_path(compiler))
|
.env("RUSTC", build.compiler_path(&compiler))
|
||||||
.env("RUSTDOC", build.rustdoc(compiler)));
|
.env("RUSTDOC", build.rustdoc(&compiler)));
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Runs `cargo test` for `cargo` packaged with Rust.
|
/// Runs `cargo test` for `cargo` packaged with Rust.
|
||||||
|
@ -124,13 +123,12 @@ pub fn cargo(build: &Build, stage: u32, host: &str) {
|
||||||
// and not RUSTC because the Cargo test suite has tests that will
|
// and not RUSTC because the Cargo test suite has tests that will
|
||||||
// fail if rustc is not spelled `rustc`.
|
// fail if rustc is not spelled `rustc`.
|
||||||
let path = build.sysroot(compiler).join("bin");
|
let path = build.sysroot(compiler).join("bin");
|
||||||
let old_path = ::std::env::var("PATH").expect("");
|
let old_path = env::var_os("PATH").unwrap_or_default();
|
||||||
let sep = if cfg!(windows) { ";" } else {":" };
|
let newpath = env::join_paths(iter::once(path).chain(env::split_paths(&old_path))).expect("");
|
||||||
let ref newpath = format!("{}{}{}", path.display(), sep, old_path);
|
|
||||||
|
|
||||||
let mut cargo = build.cargo(compiler, Mode::Tool, host, "test");
|
let mut cargo = build.cargo(compiler, Mode::Tool, host, "test");
|
||||||
cargo.arg("--manifest-path").arg(build.src.join("src/tools/cargo/Cargo.toml"));
|
cargo.arg("--manifest-path").arg(build.src.join("src/tools/cargo/Cargo.toml"));
|
||||||
if build.flags.cmd.no_fail_fast() {
|
if !build.fail_fast {
|
||||||
cargo.arg("--no-fail-fast");
|
cargo.arg("--no-fail-fast");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -198,9 +196,9 @@ pub fn compiletest(build: &Build,
|
||||||
cmd.arg("--mode").arg(mode);
|
cmd.arg("--mode").arg(mode);
|
||||||
cmd.arg("--target").arg(target);
|
cmd.arg("--target").arg(target);
|
||||||
cmd.arg("--host").arg(compiler.host);
|
cmd.arg("--host").arg(compiler.host);
|
||||||
cmd.arg("--llvm-filecheck").arg(build.llvm_filecheck(&build.config.build));
|
cmd.arg("--llvm-filecheck").arg(build.llvm_filecheck(&build.build));
|
||||||
|
|
||||||
if let Some(nodejs) = build.config.nodejs.as_ref() {
|
if let Some(ref nodejs) = build.config.nodejs {
|
||||||
cmd.arg("--nodejs").arg(nodejs);
|
cmd.arg("--nodejs").arg(nodejs);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -224,7 +222,7 @@ pub fn compiletest(build: &Build,
|
||||||
|
|
||||||
cmd.arg("--docck-python").arg(build.python());
|
cmd.arg("--docck-python").arg(build.python());
|
||||||
|
|
||||||
if build.config.build.ends_with("apple-darwin") {
|
if build.build.ends_with("apple-darwin") {
|
||||||
// Force /usr/bin/python on macOS for LLDB tests because we're loading the
|
// Force /usr/bin/python on macOS for LLDB tests because we're loading the
|
||||||
// LLDB plugin's compiled module which only works with the system python
|
// LLDB plugin's compiled module which only works with the system python
|
||||||
// (namely not Homebrew-installed python)
|
// (namely not Homebrew-installed python)
|
||||||
|
@ -251,7 +249,7 @@ pub fn compiletest(build: &Build,
|
||||||
|
|
||||||
cmd.args(&build.flags.cmd.test_args());
|
cmd.args(&build.flags.cmd.test_args());
|
||||||
|
|
||||||
if build.config.verbose() || build.flags.verbose() {
|
if build.is_verbose() {
|
||||||
cmd.arg("--verbose");
|
cmd.arg("--verbose");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -279,7 +277,7 @@ pub fn compiletest(build: &Build,
|
||||||
|
|
||||||
if build.remote_tested(target) {
|
if build.remote_tested(target) {
|
||||||
cmd.arg("--remote-test-client")
|
cmd.arg("--remote-test-client")
|
||||||
.arg(build.tool(&Compiler::new(0, &build.config.build),
|
.arg(build.tool(&Compiler::new(0, &build.build),
|
||||||
"remote-test-client"));
|
"remote-test-client"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -368,7 +366,7 @@ pub fn error_index(build: &Build, compiler: &Compiler) {
|
||||||
"error_index_generator")
|
"error_index_generator")
|
||||||
.arg("markdown")
|
.arg("markdown")
|
||||||
.arg(&output)
|
.arg(&output)
|
||||||
.env("CFG_BUILD", &build.config.build));
|
.env("CFG_BUILD", &build.build));
|
||||||
|
|
||||||
markdown_test(build, compiler, &output);
|
markdown_test(build, compiler, &output);
|
||||||
}
|
}
|
||||||
|
@ -450,7 +448,7 @@ pub fn krate(build: &Build,
|
||||||
cargo.arg("--manifest-path")
|
cargo.arg("--manifest-path")
|
||||||
.arg(build.src.join(path).join("Cargo.toml"))
|
.arg(build.src.join(path).join("Cargo.toml"))
|
||||||
.arg("--features").arg(features);
|
.arg("--features").arg(features);
|
||||||
if test_kind.subcommand() == "test" && build.flags.cmd.no_fail_fast() {
|
if test_kind.subcommand() == "test" && !build.fail_fast {
|
||||||
cargo.arg("--no-fail-fast");
|
cargo.arg("--no-fail-fast");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -520,16 +518,14 @@ fn krate_emscripten(build: &Build,
|
||||||
compiler: &Compiler,
|
compiler: &Compiler,
|
||||||
target: &str,
|
target: &str,
|
||||||
mode: Mode) {
|
mode: Mode) {
|
||||||
let mut tests = Vec::new();
|
|
||||||
let out_dir = build.cargo_out(compiler, mode, target);
|
let out_dir = build.cargo_out(compiler, mode, target);
|
||||||
find_tests(&out_dir.join("deps"), target, &mut tests);
|
let tests = find_tests(&out_dir.join("deps"), target);
|
||||||
|
|
||||||
|
let nodejs = build.config.nodejs.as_ref().expect("nodejs not configured");
|
||||||
for test in tests {
|
for test in tests {
|
||||||
let test_file_name = test.to_string_lossy().into_owned();
|
println!("running {}", test.display());
|
||||||
println!("running {}", test_file_name);
|
|
||||||
let nodejs = build.config.nodejs.as_ref().expect("nodejs not configured");
|
|
||||||
let mut cmd = Command::new(nodejs);
|
let mut cmd = Command::new(nodejs);
|
||||||
cmd.arg(&test_file_name);
|
cmd.arg(&test);
|
||||||
if build.config.quiet_tests {
|
if build.config.quiet_tests {
|
||||||
cmd.arg("--quiet");
|
cmd.arg("--quiet");
|
||||||
}
|
}
|
||||||
|
@ -541,11 +537,10 @@ fn krate_remote(build: &Build,
|
||||||
compiler: &Compiler,
|
compiler: &Compiler,
|
||||||
target: &str,
|
target: &str,
|
||||||
mode: Mode) {
|
mode: Mode) {
|
||||||
let mut tests = Vec::new();
|
|
||||||
let out_dir = build.cargo_out(compiler, mode, target);
|
let out_dir = build.cargo_out(compiler, mode, target);
|
||||||
find_tests(&out_dir.join("deps"), target, &mut tests);
|
let tests = find_tests(&out_dir.join("deps"), target);
|
||||||
|
|
||||||
let tool = build.tool(&Compiler::new(0, &build.config.build),
|
let tool = build.tool(&Compiler::new(0, &build.build),
|
||||||
"remote-test-client");
|
"remote-test-client");
|
||||||
for test in tests {
|
for test in tests {
|
||||||
let mut cmd = Command::new(&tool);
|
let mut cmd = Command::new(&tool);
|
||||||
|
@ -559,9 +554,8 @@ fn krate_remote(build: &Build,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn find_tests(dir: &Path,
|
fn find_tests(dir: &Path, target: &str) -> Vec<PathBuf> {
|
||||||
target: &str,
|
let mut dst = Vec::new();
|
||||||
dst: &mut Vec<PathBuf>) {
|
|
||||||
for e in t!(dir.read_dir()).map(|e| t!(e)) {
|
for e in t!(dir.read_dir()).map(|e| t!(e)) {
|
||||||
let file_type = t!(e.file_type());
|
let file_type = t!(e.file_type());
|
||||||
if !file_type.is_file() {
|
if !file_type.is_file() {
|
||||||
|
@ -576,6 +570,7 @@ fn find_tests(dir: &Path,
|
||||||
dst.push(e.path());
|
dst.push(e.path());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
dst
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn remote_copy_libs(build: &Build, compiler: &Compiler, target: &str) {
|
pub fn remote_copy_libs(build: &Build, compiler: &Compiler, target: &str) {
|
||||||
|
@ -590,7 +585,7 @@ pub fn remote_copy_libs(build: &Build, compiler: &Compiler, target: &str) {
|
||||||
.join(exe("remote-test-server", target));
|
.join(exe("remote-test-server", target));
|
||||||
|
|
||||||
// Spawn the emulator and wait for it to come online
|
// Spawn the emulator and wait for it to come online
|
||||||
let tool = build.tool(&Compiler::new(0, &build.config.build),
|
let tool = build.tool(&Compiler::new(0, &build.build),
|
||||||
"remote-test-client");
|
"remote-test-client");
|
||||||
let mut cmd = Command::new(&tool);
|
let mut cmd = Command::new(&tool);
|
||||||
cmd.arg("spawn-emulator")
|
cmd.arg("spawn-emulator")
|
||||||
|
@ -616,7 +611,7 @@ pub fn remote_copy_libs(build: &Build, compiler: &Compiler, target: &str) {
|
||||||
|
|
||||||
/// Run "distcheck", a 'make check' from a tarball
|
/// Run "distcheck", a 'make check' from a tarball
|
||||||
pub fn distcheck(build: &Build) {
|
pub fn distcheck(build: &Build) {
|
||||||
if build.config.build != "x86_64-unknown-linux-gnu" {
|
if build.build != "x86_64-unknown-linux-gnu" {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if !build.config.host.iter().any(|s| s == "x86_64-unknown-linux-gnu") {
|
if !build.config.host.iter().any(|s| s == "x86_64-unknown-linux-gnu") {
|
||||||
|
@ -641,7 +636,7 @@ pub fn distcheck(build: &Build) {
|
||||||
.args(&build.config.configure_args)
|
.args(&build.config.configure_args)
|
||||||
.arg("--enable-vendor")
|
.arg("--enable-vendor")
|
||||||
.current_dir(&dir));
|
.current_dir(&dir));
|
||||||
build.run(Command::new(build_helper::make(&build.config.build))
|
build.run(Command::new(build_helper::make(&build.build))
|
||||||
.arg("check")
|
.arg("check")
|
||||||
.current_dir(&dir));
|
.current_dir(&dir));
|
||||||
|
|
||||||
|
@ -659,7 +654,7 @@ pub fn distcheck(build: &Build) {
|
||||||
build.run(&mut cmd);
|
build.run(&mut cmd);
|
||||||
|
|
||||||
let toml = dir.join("rust-src/lib/rustlib/src/rust/src/libstd/Cargo.toml");
|
let toml = dir.join("rust-src/lib/rustlib/src/rust/src/libstd/Cargo.toml");
|
||||||
build.run(Command::new(&build.cargo)
|
build.run(Command::new(&build.initial_cargo)
|
||||||
.arg("generate-lockfile")
|
.arg("generate-lockfile")
|
||||||
.arg("--manifest-path")
|
.arg("--manifest-path")
|
||||||
.arg(&toml)
|
.arg(&toml)
|
||||||
|
@ -668,13 +663,13 @@ pub fn distcheck(build: &Build) {
|
||||||
|
|
||||||
/// Test the build system itself
|
/// Test the build system itself
|
||||||
pub fn bootstrap(build: &Build) {
|
pub fn bootstrap(build: &Build) {
|
||||||
let mut cmd = Command::new(&build.cargo);
|
let mut cmd = Command::new(&build.initial_cargo);
|
||||||
cmd.arg("test")
|
cmd.arg("test")
|
||||||
.current_dir(build.src.join("src/bootstrap"))
|
.current_dir(build.src.join("src/bootstrap"))
|
||||||
.env("CARGO_TARGET_DIR", build.out.join("bootstrap"))
|
.env("CARGO_TARGET_DIR", build.out.join("bootstrap"))
|
||||||
.env("RUSTC_BOOTSTRAP", "1")
|
.env("RUSTC_BOOTSTRAP", "1")
|
||||||
.env("RUSTC", &build.rustc);
|
.env("RUSTC", &build.initial_rustc);
|
||||||
if build.flags.cmd.no_fail_fast() {
|
if !build.fail_fast {
|
||||||
cmd.arg("--no-fail-fast");
|
cmd.arg("--no-fail-fast");
|
||||||
}
|
}
|
||||||
cmd.arg("--").args(&build.flags.cmd.test_args());
|
cmd.arg("--").args(&build.flags.cmd.test_args());
|
||||||
|
|
|
@ -50,7 +50,7 @@ pub fn std(build: &Build, target: &str, compiler: &Compiler) {
|
||||||
let mut cargo = build.cargo(compiler, Mode::Libstd, target, "build");
|
let mut cargo = build.cargo(compiler, Mode::Libstd, target, "build");
|
||||||
let mut features = build.std_features();
|
let mut features = build.std_features();
|
||||||
|
|
||||||
if let Ok(target) = env::var("MACOSX_STD_DEPLOYMENT_TARGET") {
|
if let Some(target) = env::var_os("MACOSX_STD_DEPLOYMENT_TARGET") {
|
||||||
cargo.env("MACOSX_DEPLOYMENT_TARGET", target);
|
cargo.env("MACOSX_DEPLOYMENT_TARGET", target);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -158,7 +158,7 @@ pub fn build_startup_objects(build: &Build, for_compiler: &Compiler, target: &st
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
let compiler = Compiler::new(0, &build.config.build);
|
let compiler = Compiler::new(0, &build.build);
|
||||||
let compiler_path = build.compiler_path(&compiler);
|
let compiler_path = build.compiler_path(&compiler);
|
||||||
let src_dir = &build.src.join("src/rtstartup");
|
let src_dir = &build.src.join("src/rtstartup");
|
||||||
let dst_dir = &build.native_dir(target).join("rtstartup");
|
let dst_dir = &build.native_dir(target).join("rtstartup");
|
||||||
|
@ -199,7 +199,7 @@ pub fn test(build: &Build, target: &str, compiler: &Compiler) {
|
||||||
let out_dir = build.cargo_out(compiler, Mode::Libtest, target);
|
let out_dir = build.cargo_out(compiler, Mode::Libtest, target);
|
||||||
build.clear_if_dirty(&out_dir, &libstd_stamp(build, compiler, target));
|
build.clear_if_dirty(&out_dir, &libstd_stamp(build, compiler, target));
|
||||||
let mut cargo = build.cargo(compiler, Mode::Libtest, target, "build");
|
let mut cargo = build.cargo(compiler, Mode::Libtest, target, "build");
|
||||||
if let Ok(target) = env::var("MACOSX_STD_DEPLOYMENT_TARGET") {
|
if let Some(target) = env::var_os("MACOSX_STD_DEPLOYMENT_TARGET") {
|
||||||
cargo.env("MACOSX_DEPLOYMENT_TARGET", target);
|
cargo.env("MACOSX_DEPLOYMENT_TARGET", target);
|
||||||
}
|
}
|
||||||
cargo.arg("--manifest-path")
|
cargo.arg("--manifest-path")
|
||||||
|
@ -247,7 +247,7 @@ pub fn rustc(build: &Build, target: &str, compiler: &Compiler) {
|
||||||
cargo.env("CFG_RELEASE", build.rust_release())
|
cargo.env("CFG_RELEASE", build.rust_release())
|
||||||
.env("CFG_RELEASE_CHANNEL", &build.config.channel)
|
.env("CFG_RELEASE_CHANNEL", &build.config.channel)
|
||||||
.env("CFG_VERSION", build.rust_version())
|
.env("CFG_VERSION", build.rust_version())
|
||||||
.env("CFG_PREFIX", build.config.prefix.clone().unwrap_or(PathBuf::new()));
|
.env("CFG_PREFIX", build.config.prefix.clone().unwrap_or_default());
|
||||||
|
|
||||||
if compiler.stage == 0 {
|
if compiler.stage == 0 {
|
||||||
cargo.env("CFG_LIBDIR_RELATIVE", "lib");
|
cargo.env("CFG_LIBDIR_RELATIVE", "lib");
|
||||||
|
@ -276,10 +276,6 @@ pub fn rustc(build: &Build, target: &str, compiler: &Compiler) {
|
||||||
if build.is_rust_llvm(target) {
|
if build.is_rust_llvm(target) {
|
||||||
cargo.env("LLVM_RUSTLLVM", "1");
|
cargo.env("LLVM_RUSTLLVM", "1");
|
||||||
}
|
}
|
||||||
if let Some(ref cfg_file) = build.flags.config {
|
|
||||||
let cfg_path = t!(PathBuf::from(cfg_file).canonicalize());
|
|
||||||
cargo.env("CFG_LLVM_TOML", cfg_path.into_os_string());
|
|
||||||
}
|
|
||||||
cargo.env("LLVM_CONFIG", build.llvm_config(target));
|
cargo.env("LLVM_CONFIG", build.llvm_config(target));
|
||||||
let target_config = build.config.target_config.get(target);
|
let target_config = build.config.target_config.get(target);
|
||||||
if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) {
|
if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) {
|
||||||
|
@ -355,7 +351,7 @@ pub fn create_sysroot(build: &Build, compiler: &Compiler) {
|
||||||
/// Prepare a new compiler from the artifacts in `stage`
|
/// Prepare a new compiler from the artifacts in `stage`
|
||||||
///
|
///
|
||||||
/// This will assemble a compiler in `build/$host/stage$stage`. The compiler
|
/// This will assemble a compiler in `build/$host/stage$stage`. The compiler
|
||||||
/// must have been previously produced by the `stage - 1` build.config.build
|
/// must have been previously produced by the `stage - 1` build.build
|
||||||
/// compiler.
|
/// compiler.
|
||||||
pub fn assemble_rustc(build: &Build, stage: u32, host: &str) {
|
pub fn assemble_rustc(build: &Build, stage: u32, host: &str) {
|
||||||
// nothing to do in stage0
|
// nothing to do in stage0
|
||||||
|
@ -369,7 +365,7 @@ pub fn assemble_rustc(build: &Build, stage: u32, host: &str) {
|
||||||
let target_compiler = Compiler::new(stage, host);
|
let target_compiler = Compiler::new(stage, host);
|
||||||
|
|
||||||
// The compiler that compiled the compiler we're assembling
|
// The compiler that compiled the compiler we're assembling
|
||||||
let build_compiler = Compiler::new(stage - 1, &build.config.build);
|
let build_compiler = Compiler::new(stage - 1, &build.build);
|
||||||
|
|
||||||
// Link in all dylibs to the libdir
|
// Link in all dylibs to the libdir
|
||||||
let sysroot = build.sysroot(&target_compiler);
|
let sysroot = build.sysroot(&target_compiler);
|
||||||
|
@ -389,7 +385,7 @@ pub fn assemble_rustc(build: &Build, stage: u32, host: &str) {
|
||||||
let rustc = out_dir.join(exe("rustc", host));
|
let rustc = out_dir.join(exe("rustc", host));
|
||||||
let bindir = sysroot.join("bin");
|
let bindir = sysroot.join("bin");
|
||||||
t!(fs::create_dir_all(&bindir));
|
t!(fs::create_dir_all(&bindir));
|
||||||
let compiler = build.compiler_path(&Compiler::new(stage, host));
|
let compiler = build.compiler_path(&target_compiler);
|
||||||
let _ = fs::remove_file(&compiler);
|
let _ = fs::remove_file(&compiler);
|
||||||
copy(&rustc, &compiler);
|
copy(&rustc, &compiler);
|
||||||
|
|
||||||
|
@ -411,6 +407,8 @@ fn add_to_sysroot(sysroot_dst: &Path, stamp: &Path) {
|
||||||
t!(fs::create_dir_all(&sysroot_dst));
|
t!(fs::create_dir_all(&sysroot_dst));
|
||||||
let mut contents = Vec::new();
|
let mut contents = Vec::new();
|
||||||
t!(t!(File::open(stamp)).read_to_end(&mut contents));
|
t!(t!(File::open(stamp)).read_to_end(&mut contents));
|
||||||
|
// This is the method we use for extracting paths from the stamp file passed to us. See
|
||||||
|
// run_cargo for more information (in this file).
|
||||||
for part in contents.split(|b| *b == 0) {
|
for part in contents.split(|b| *b == 0) {
|
||||||
if part.is_empty() {
|
if part.is_empty() {
|
||||||
continue
|
continue
|
||||||
|
@ -425,7 +423,7 @@ fn add_to_sysroot(sysroot_dst: &Path, stamp: &Path) {
|
||||||
/// This will build the specified tool with the specified `host` compiler in
|
/// This will build the specified tool with the specified `host` compiler in
|
||||||
/// `stage` into the normal cargo output directory.
|
/// `stage` into the normal cargo output directory.
|
||||||
pub fn maybe_clean_tools(build: &Build, stage: u32, target: &str, mode: Mode) {
|
pub fn maybe_clean_tools(build: &Build, stage: u32, target: &str, mode: Mode) {
|
||||||
let compiler = Compiler::new(stage, &build.config.build);
|
let compiler = Compiler::new(stage, &build.build);
|
||||||
|
|
||||||
let stamp = match mode {
|
let stamp = match mode {
|
||||||
Mode::Libstd => libstd_stamp(build, &compiler, target),
|
Mode::Libstd => libstd_stamp(build, &compiler, target),
|
||||||
|
@ -445,7 +443,7 @@ pub fn tool(build: &Build, stage: u32, target: &str, tool: &str) {
|
||||||
let _folder = build.fold_output(|| format!("stage{}-{}", stage, tool));
|
let _folder = build.fold_output(|| format!("stage{}-{}", stage, tool));
|
||||||
println!("Building stage{} tool {} ({})", stage, tool, target);
|
println!("Building stage{} tool {} ({})", stage, tool, target);
|
||||||
|
|
||||||
let compiler = Compiler::new(stage, &build.config.build);
|
let compiler = Compiler::new(stage, &build.build);
|
||||||
|
|
||||||
let mut cargo = build.cargo(&compiler, Mode::Tool, target, "build");
|
let mut cargo = build.cargo(&compiler, Mode::Tool, target, "build");
|
||||||
let dir = build.src.join("src/tools").join(tool);
|
let dir = build.src.join("src/tools").join(tool);
|
||||||
|
@ -561,23 +559,24 @@ fn run_cargo(build: &Build, cargo: &mut Command, stamp: &Path) {
|
||||||
// If this was an output file in the "host dir" we don't actually
|
// If this was an output file in the "host dir" we don't actually
|
||||||
// worry about it, it's not relevant for us.
|
// worry about it, it's not relevant for us.
|
||||||
if filename.starts_with(&host_root_dir) {
|
if filename.starts_with(&host_root_dir) {
|
||||||
continue
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
// If this was output in the `deps` dir then this is a precise file
|
// If this was output in the `deps` dir then this is a precise file
|
||||||
// name (hash included) so we start tracking it.
|
// name (hash included) so we start tracking it.
|
||||||
} else if filename.starts_with(&target_deps_dir) {
|
if filename.starts_with(&target_deps_dir) {
|
||||||
deps.push(filename.to_path_buf());
|
deps.push(filename.to_path_buf());
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
// Otherwise this was a "top level artifact" which right now doesn't
|
// Otherwise this was a "top level artifact" which right now doesn't
|
||||||
// have a hash in the name, but there's a version of this file in
|
// have a hash in the name, but there's a version of this file in
|
||||||
// the `deps` folder which *does* have a hash in the name. That's
|
// the `deps` folder which *does* have a hash in the name. That's
|
||||||
// the one we'll want to we'll probe for it later.
|
// the one we'll want to we'll probe for it later.
|
||||||
} else {
|
toplevel.push((filename.file_stem().unwrap()
|
||||||
toplevel.push((filename.file_stem().unwrap()
|
.to_str().unwrap().to_string(),
|
||||||
.to_str().unwrap().to_string(),
|
filename.extension().unwrap().to_owned()
|
||||||
filename.extension().unwrap().to_owned()
|
.to_str().unwrap().to_string()));
|
||||||
.to_str().unwrap().to_string()));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -81,8 +81,6 @@ pub struct Config {
|
||||||
pub build: String,
|
pub build: String,
|
||||||
pub host: Vec<String>,
|
pub host: Vec<String>,
|
||||||
pub target: Vec<String>,
|
pub target: Vec<String>,
|
||||||
pub rustc: Option<PathBuf>,
|
|
||||||
pub cargo: Option<PathBuf>,
|
|
||||||
pub local_rebuild: bool,
|
pub local_rebuild: bool,
|
||||||
|
|
||||||
// dist misc
|
// dist misc
|
||||||
|
@ -114,11 +112,18 @@ pub struct Config {
|
||||||
pub python: Option<PathBuf>,
|
pub python: Option<PathBuf>,
|
||||||
pub configure_args: Vec<String>,
|
pub configure_args: Vec<String>,
|
||||||
pub openssl_static: bool,
|
pub openssl_static: bool,
|
||||||
|
|
||||||
|
|
||||||
|
// These are either the stage0 downloaded binaries or the locally installed ones.
|
||||||
|
pub initial_cargo: PathBuf,
|
||||||
|
pub initial_rustc: PathBuf,
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Per-target configuration stored in the global configuration structure.
|
/// Per-target configuration stored in the global configuration structure.
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
pub struct Target {
|
pub struct Target {
|
||||||
|
/// Some(path to llvm-config) if using an external LLVM.
|
||||||
pub llvm_config: Option<PathBuf>,
|
pub llvm_config: Option<PathBuf>,
|
||||||
pub jemalloc: Option<PathBuf>,
|
pub jemalloc: Option<PathBuf>,
|
||||||
pub cc: Option<PathBuf>,
|
pub cc: Option<PathBuf>,
|
||||||
|
@ -307,8 +312,6 @@ impl Config {
|
||||||
config.target.push(target.clone());
|
config.target.push(target.clone());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
config.rustc = build.rustc.map(PathBuf::from);
|
|
||||||
config.cargo = build.cargo.map(PathBuf::from);
|
|
||||||
config.nodejs = build.nodejs.map(PathBuf::from);
|
config.nodejs = build.nodejs.map(PathBuf::from);
|
||||||
config.gdb = build.gdb.map(PathBuf::from);
|
config.gdb = build.gdb.map(PathBuf::from);
|
||||||
config.python = build.python.map(PathBuf::from);
|
config.python = build.python.map(PathBuf::from);
|
||||||
|
@ -410,13 +413,25 @@ impl Config {
|
||||||
set(&mut config.rust_dist_src, t.src_tarball);
|
set(&mut config.rust_dist_src, t.src_tarball);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let cwd = t!(env::current_dir());
|
||||||
|
let out = cwd.join("build");
|
||||||
|
|
||||||
|
let stage0_root = out.join(&config.build).join("stage0/bin");
|
||||||
|
config.initial_rustc = match build.rustc {
|
||||||
|
Some(s) => PathBuf::from(s),
|
||||||
|
None => stage0_root.join(exe("rustc", &config.build)),
|
||||||
|
};
|
||||||
|
config.initial_cargo = match build.cargo {
|
||||||
|
Some(s) => PathBuf::from(s),
|
||||||
|
None => stage0_root.join(exe("cargo", &config.build)),
|
||||||
|
};
|
||||||
|
|
||||||
// compat with `./configure` while we're still using that
|
// compat with `./configure` while we're still using that
|
||||||
if fs::metadata("config.mk").is_ok() {
|
if fs::metadata("config.mk").is_ok() {
|
||||||
config.update_with_config_mk();
|
config.update_with_config_mk();
|
||||||
}
|
}
|
||||||
|
|
||||||
return config
|
config
|
||||||
}
|
}
|
||||||
|
|
||||||
/// "Temporary" routine to parse `config.mk` into this configuration.
|
/// "Temporary" routine to parse `config.mk` into this configuration.
|
||||||
|
@ -609,8 +624,8 @@ impl Config {
|
||||||
}
|
}
|
||||||
"CFG_LOCAL_RUST_ROOT" if value.len() > 0 => {
|
"CFG_LOCAL_RUST_ROOT" if value.len() > 0 => {
|
||||||
let path = parse_configure_path(value);
|
let path = parse_configure_path(value);
|
||||||
self.rustc = Some(push_exe_path(path.clone(), &["bin", "rustc"]));
|
self.initial_rustc = push_exe_path(path.clone(), &["bin", "rustc"]);
|
||||||
self.cargo = Some(push_exe_path(path, &["bin", "cargo"]));
|
self.initial_cargo = push_exe_path(path, &["bin", "cargo"]);
|
||||||
}
|
}
|
||||||
"CFG_PYTHON" if value.len() > 0 => {
|
"CFG_PYTHON" if value.len() > 0 => {
|
||||||
let path = parse_configure_path(value);
|
let path = parse_configure_path(value);
|
||||||
|
|
|
@ -50,7 +50,7 @@ pub fn tmpdir(build: &Build) -> PathBuf {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn rust_installer(build: &Build) -> Command {
|
fn rust_installer(build: &Build) -> Command {
|
||||||
build.tool_cmd(&Compiler::new(0, &build.config.build), "rust-installer")
|
build.tool_cmd(&Compiler::new(0, &build.build), "rust-installer")
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Builds the `rust-docs` installer component.
|
/// Builds the `rust-docs` installer component.
|
||||||
|
@ -89,7 +89,7 @@ pub fn docs(build: &Build, stage: u32, host: &str) {
|
||||||
|
|
||||||
// As part of this step, *also* copy the docs directory to a directory which
|
// As part of this step, *also* copy the docs directory to a directory which
|
||||||
// buildbot typically uploads.
|
// buildbot typically uploads.
|
||||||
if host == build.config.build {
|
if host == build.build {
|
||||||
let dst = distdir(build).join("doc").join(build.rust_package_vers());
|
let dst = distdir(build).join("doc").join(build.rust_package_vers());
|
||||||
t!(fs::create_dir_all(&dst));
|
t!(fs::create_dir_all(&dst));
|
||||||
cp_r(&src, &dst);
|
cp_r(&src, &dst);
|
||||||
|
@ -97,7 +97,7 @@ pub fn docs(build: &Build, stage: u32, host: &str) {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn find_files(files: &[&str], path: &[PathBuf]) -> Vec<PathBuf> {
|
fn find_files(files: &[&str], path: &[PathBuf]) -> Vec<PathBuf> {
|
||||||
let mut found = Vec::new();
|
let mut found = Vec::with_capacity(files.len());
|
||||||
|
|
||||||
for file in files {
|
for file in files {
|
||||||
let file_path =
|
let file_path =
|
||||||
|
@ -119,17 +119,9 @@ fn make_win_dist(rust_root: &Path, plat_root: &Path, target_triple: &str, build:
|
||||||
//Ask gcc where it keeps its stuff
|
//Ask gcc where it keeps its stuff
|
||||||
let mut cmd = Command::new(build.cc(target_triple));
|
let mut cmd = Command::new(build.cc(target_triple));
|
||||||
cmd.arg("-print-search-dirs");
|
cmd.arg("-print-search-dirs");
|
||||||
build.run_quiet(&mut cmd);
|
let gcc_out = output(&mut cmd);
|
||||||
let gcc_out =
|
|
||||||
String::from_utf8(
|
|
||||||
cmd
|
|
||||||
.output()
|
|
||||||
.expect("failed to execute gcc")
|
|
||||||
.stdout).expect("gcc.exe output was not utf8");
|
|
||||||
|
|
||||||
let mut bin_path: Vec<_> =
|
let mut bin_path: Vec<_> = env::split_paths(&env::var_os("PATH").unwrap_or_default()).collect();
|
||||||
env::split_paths(&env::var_os("PATH").unwrap_or_default())
|
|
||||||
.collect();
|
|
||||||
let mut lib_path = Vec::new();
|
let mut lib_path = Vec::new();
|
||||||
|
|
||||||
for line in gcc_out.lines() {
|
for line in gcc_out.lines() {
|
||||||
|
@ -140,7 +132,7 @@ fn make_win_dist(rust_root: &Path, plat_root: &Path, target_triple: &str, build:
|
||||||
line[(idx + 1)..]
|
line[(idx + 1)..]
|
||||||
.trim_left_matches(trim_chars)
|
.trim_left_matches(trim_chars)
|
||||||
.split(';')
|
.split(';')
|
||||||
.map(|s| PathBuf::from(s));
|
.map(PathBuf::from);
|
||||||
|
|
||||||
if key == "programs" {
|
if key == "programs" {
|
||||||
bin_path.extend(value);
|
bin_path.extend(value);
|
||||||
|
@ -149,7 +141,7 @@ fn make_win_dist(rust_root: &Path, plat_root: &Path, target_triple: &str, build:
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let target_tools = vec!["gcc.exe", "ld.exe", "ar.exe", "dlltool.exe", "libwinpthread-1.dll"];
|
let target_tools = ["gcc.exe", "ld.exe", "ar.exe", "dlltool.exe", "libwinpthread-1.dll"];
|
||||||
let mut rustc_dlls = vec!["libstdc++-6.dll", "libwinpthread-1.dll"];
|
let mut rustc_dlls = vec!["libstdc++-6.dll", "libwinpthread-1.dll"];
|
||||||
if target_triple.starts_with("i686-") {
|
if target_triple.starts_with("i686-") {
|
||||||
rustc_dlls.push("libgcc_s_dw2-1.dll");
|
rustc_dlls.push("libgcc_s_dw2-1.dll");
|
||||||
|
@ -157,7 +149,7 @@ fn make_win_dist(rust_root: &Path, plat_root: &Path, target_triple: &str, build:
|
||||||
rustc_dlls.push("libgcc_s_seh-1.dll");
|
rustc_dlls.push("libgcc_s_seh-1.dll");
|
||||||
}
|
}
|
||||||
|
|
||||||
let target_libs = vec![ //MinGW libs
|
let target_libs = [ //MinGW libs
|
||||||
"libgcc.a",
|
"libgcc.a",
|
||||||
"libgcc_eh.a",
|
"libgcc_eh.a",
|
||||||
"libgcc_s.a",
|
"libgcc_s.a",
|
||||||
|
@ -203,7 +195,7 @@ fn make_win_dist(rust_root: &Path, plat_root: &Path, target_triple: &str, build:
|
||||||
let target_libs = find_files(&target_libs, &lib_path);
|
let target_libs = find_files(&target_libs, &lib_path);
|
||||||
|
|
||||||
fn copy_to_folder(src: &Path, dest_folder: &Path) {
|
fn copy_to_folder(src: &Path, dest_folder: &Path) {
|
||||||
let file_name = src.file_name().unwrap().to_os_string();
|
let file_name = src.file_name().unwrap();
|
||||||
let dest = dest_folder.join(file_name);
|
let dest = dest_folder.join(file_name);
|
||||||
copy(src, &dest);
|
copy(src, &dest);
|
||||||
}
|
}
|
||||||
|
@ -234,8 +226,6 @@ fn make_win_dist(rust_root: &Path, plat_root: &Path, target_triple: &str, build:
|
||||||
///
|
///
|
||||||
/// This contains all the bits and pieces to run the MinGW Windows targets
|
/// This contains all the bits and pieces to run the MinGW Windows targets
|
||||||
/// without any extra installed software (e.g. we bundle gcc, libraries, etc).
|
/// without any extra installed software (e.g. we bundle gcc, libraries, etc).
|
||||||
/// Currently just shells out to a python script, but that should be rewritten
|
|
||||||
/// in Rust.
|
|
||||||
pub fn mingw(build: &Build, host: &str) {
|
pub fn mingw(build: &Build, host: &str) {
|
||||||
println!("Dist mingw ({})", host);
|
println!("Dist mingw ({})", host);
|
||||||
let name = pkgname(build, "rust-mingw");
|
let name = pkgname(build, "rust-mingw");
|
||||||
|
@ -366,9 +356,9 @@ pub fn rustc(build: &Build, stage: u32, host: &str) {
|
||||||
pub fn debugger_scripts(build: &Build,
|
pub fn debugger_scripts(build: &Build,
|
||||||
sysroot: &Path,
|
sysroot: &Path,
|
||||||
host: &str) {
|
host: &str) {
|
||||||
|
let dst = sysroot.join("lib/rustlib/etc");
|
||||||
|
t!(fs::create_dir_all(&dst));
|
||||||
let cp_debugger_script = |file: &str| {
|
let cp_debugger_script = |file: &str| {
|
||||||
let dst = sysroot.join("lib/rustlib/etc");
|
|
||||||
t!(fs::create_dir_all(&dst));
|
|
||||||
install(&build.src.join("src/etc/").join(file), &dst, 0o644);
|
install(&build.src.join("src/etc/").join(file), &dst, 0o644);
|
||||||
};
|
};
|
||||||
if host.contains("windows-msvc") {
|
if host.contains("windows-msvc") {
|
||||||
|
@ -404,7 +394,7 @@ pub fn std(build: &Build, compiler: &Compiler, target: &str) {
|
||||||
|
|
||||||
// The only true set of target libraries came from the build triple, so
|
// The only true set of target libraries came from the build triple, so
|
||||||
// let's reduce redundant work by only producing archives from that host.
|
// let's reduce redundant work by only producing archives from that host.
|
||||||
if compiler.host != build.config.build {
|
if compiler.host != build.build {
|
||||||
println!("\tskipping, not a build host");
|
println!("\tskipping, not a build host");
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@ -450,7 +440,7 @@ pub fn analysis(build: &Build, compiler: &Compiler, target: &str) {
|
||||||
assert!(build.config.extended);
|
assert!(build.config.extended);
|
||||||
println!("Dist analysis");
|
println!("Dist analysis");
|
||||||
|
|
||||||
if compiler.host != build.config.build {
|
if compiler.host != build.build {
|
||||||
println!("\tskipping, not a build host");
|
println!("\tskipping, not a build host");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -498,12 +488,11 @@ fn copy_src_dirs(build: &Build, src_dirs: &[&str], exclude_dirs: &[&str], dst_di
|
||||||
if spath.ends_with("~") || spath.ends_with(".pyc") {
|
if spath.ends_with("~") || spath.ends_with(".pyc") {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
if spath.contains("llvm/test") || spath.contains("llvm\\test") {
|
if (spath.contains("llvm/test") || spath.contains("llvm\\test")) &&
|
||||||
if spath.ends_with(".ll") ||
|
(spath.ends_with(".ll") ||
|
||||||
spath.ends_with(".td") ||
|
spath.ends_with(".td") ||
|
||||||
spath.ends_with(".s") {
|
spath.ends_with(".s")) {
|
||||||
return false
|
return false
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let full_path = Path::new(dir).join(path);
|
let full_path = Path::new(dir).join(path);
|
||||||
|
@ -595,7 +584,7 @@ pub fn rust_src(build: &Build) {
|
||||||
t!(fs::remove_dir_all(&image));
|
t!(fs::remove_dir_all(&image));
|
||||||
}
|
}
|
||||||
|
|
||||||
const CARGO_VENDOR_VERSION: &'static str = "0.1.4";
|
const CARGO_VENDOR_VERSION: &str = "0.1.4";
|
||||||
|
|
||||||
/// Creates the plain source tarball
|
/// Creates the plain source tarball
|
||||||
pub fn plain_source_tarball(build: &Build) {
|
pub fn plain_source_tarball(build: &Build) {
|
||||||
|
@ -634,26 +623,26 @@ pub fn plain_source_tarball(build: &Build) {
|
||||||
write_file(&plain_dst_src.join("version"), build.rust_version().as_bytes());
|
write_file(&plain_dst_src.join("version"), build.rust_version().as_bytes());
|
||||||
|
|
||||||
// If we're building from git sources, we need to vendor a complete distribution.
|
// If we're building from git sources, we need to vendor a complete distribution.
|
||||||
if build.src_is_git {
|
if build.rust_info.is_git() {
|
||||||
// Get cargo-vendor installed, if it isn't already.
|
// Get cargo-vendor installed, if it isn't already.
|
||||||
let mut has_cargo_vendor = false;
|
let mut has_cargo_vendor = false;
|
||||||
let mut cmd = Command::new(&build.cargo);
|
let mut cmd = Command::new(&build.initial_cargo);
|
||||||
for line in output(cmd.arg("install").arg("--list")).lines() {
|
for line in output(cmd.arg("install").arg("--list")).lines() {
|
||||||
has_cargo_vendor |= line.starts_with("cargo-vendor ");
|
has_cargo_vendor |= line.starts_with("cargo-vendor ");
|
||||||
}
|
}
|
||||||
if !has_cargo_vendor {
|
if !has_cargo_vendor {
|
||||||
let mut cmd = Command::new(&build.cargo);
|
let mut cmd = Command::new(&build.initial_cargo);
|
||||||
cmd.arg("install")
|
cmd.arg("install")
|
||||||
.arg("--force")
|
.arg("--force")
|
||||||
.arg("--debug")
|
.arg("--debug")
|
||||||
.arg("--vers").arg(CARGO_VENDOR_VERSION)
|
.arg("--vers").arg(CARGO_VENDOR_VERSION)
|
||||||
.arg("cargo-vendor")
|
.arg("cargo-vendor")
|
||||||
.env("RUSTC", &build.rustc);
|
.env("RUSTC", &build.initial_rustc);
|
||||||
build.run(&mut cmd);
|
build.run(&mut cmd);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Vendor all Cargo dependencies
|
// Vendor all Cargo dependencies
|
||||||
let mut cmd = Command::new(&build.cargo);
|
let mut cmd = Command::new(&build.initial_cargo);
|
||||||
cmd.arg("vendor")
|
cmd.arg("vendor")
|
||||||
.current_dir(&plain_dst_src.join("src"));
|
.current_dir(&plain_dst_src.join("src"));
|
||||||
build.run(&mut cmd);
|
build.run(&mut cmd);
|
||||||
|
@ -716,7 +705,7 @@ fn write_file(path: &Path, data: &[u8]) {
|
||||||
|
|
||||||
pub fn cargo(build: &Build, stage: u32, target: &str) {
|
pub fn cargo(build: &Build, stage: u32, target: &str) {
|
||||||
println!("Dist cargo stage{} ({})", stage, target);
|
println!("Dist cargo stage{} ({})", stage, target);
|
||||||
let compiler = Compiler::new(stage, &build.config.build);
|
let compiler = Compiler::new(stage, &build.build);
|
||||||
|
|
||||||
let src = build.src.join("src/tools/cargo");
|
let src = build.src.join("src/tools/cargo");
|
||||||
let etc = src.join("src/etc");
|
let etc = src.join("src/etc");
|
||||||
|
@ -777,7 +766,7 @@ pub fn cargo(build: &Build, stage: u32, target: &str) {
|
||||||
pub fn rls(build: &Build, stage: u32, target: &str) {
|
pub fn rls(build: &Build, stage: u32, target: &str) {
|
||||||
assert!(build.config.extended);
|
assert!(build.config.extended);
|
||||||
println!("Dist RLS stage{} ({})", stage, target);
|
println!("Dist RLS stage{} ({})", stage, target);
|
||||||
let compiler = Compiler::new(stage, &build.config.build);
|
let compiler = Compiler::new(stage, &build.build);
|
||||||
|
|
||||||
let src = build.src.join("src/tools/rls");
|
let src = build.src.join("src/tools/rls");
|
||||||
let release_num = build.release_num("rls");
|
let release_num = build.release_num("rls");
|
||||||
|
@ -1209,7 +1198,7 @@ fn add_env(build: &Build, cmd: &mut Command, target: &str) {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn hash_and_sign(build: &Build) {
|
pub fn hash_and_sign(build: &Build) {
|
||||||
let compiler = Compiler::new(0, &build.config.build);
|
let compiler = Compiler::new(0, &build.build);
|
||||||
let mut cmd = build.tool_cmd(&compiler, "build-manifest");
|
let mut cmd = build.tool_cmd(&compiler, "build-manifest");
|
||||||
let sign = build.config.dist_sign_folder.as_ref().unwrap_or_else(|| {
|
let sign = build.config.dist_sign_folder.as_ref().unwrap_or_else(|| {
|
||||||
panic!("\n\nfailed to specify `dist.sign-folder` in `config.toml`\n\n")
|
panic!("\n\nfailed to specify `dist.sign-folder` in `config.toml`\n\n")
|
||||||
|
|
|
@ -45,7 +45,7 @@ pub fn rustbook_src(build: &Build, target: &str, name: &str, src: &Path) {
|
||||||
t!(fs::create_dir_all(&out));
|
t!(fs::create_dir_all(&out));
|
||||||
|
|
||||||
let out = out.join(name);
|
let out = out.join(name);
|
||||||
let compiler = Compiler::new(0, &build.config.build);
|
let compiler = Compiler::new(0, &build.build);
|
||||||
let src = src.join(name);
|
let src = src.join(name);
|
||||||
let index = out.join("index.html");
|
let index = out.join("index.html");
|
||||||
let rustbook = build.tool(&compiler, "rustbook");
|
let rustbook = build.tool(&compiler, "rustbook");
|
||||||
|
@ -95,7 +95,7 @@ pub fn book(build: &Build, target: &str, name: &str) {
|
||||||
fn invoke_rustdoc(build: &Build, target: &str, markdown: &str) {
|
fn invoke_rustdoc(build: &Build, target: &str, markdown: &str) {
|
||||||
let out = build.doc_out(target);
|
let out = build.doc_out(target);
|
||||||
|
|
||||||
let compiler = Compiler::new(0, &build.config.build);
|
let compiler = Compiler::new(0, &build.build);
|
||||||
|
|
||||||
let path = build.src.join("src/doc").join(markdown);
|
let path = build.src.join("src/doc").join(markdown);
|
||||||
|
|
||||||
|
@ -150,7 +150,7 @@ pub fn standalone(build: &Build, target: &str) {
|
||||||
let out = build.doc_out(target);
|
let out = build.doc_out(target);
|
||||||
t!(fs::create_dir_all(&out));
|
t!(fs::create_dir_all(&out));
|
||||||
|
|
||||||
let compiler = Compiler::new(0, &build.config.build);
|
let compiler = Compiler::new(0, &build.build);
|
||||||
|
|
||||||
let favicon = build.src.join("src/doc/favicon.inc");
|
let favicon = build.src.join("src/doc/favicon.inc");
|
||||||
let footer = build.src.join("src/doc/footer.inc");
|
let footer = build.src.join("src/doc/footer.inc");
|
||||||
|
@ -217,7 +217,7 @@ pub fn std(build: &Build, stage: u32, target: &str) {
|
||||||
println!("Documenting stage{} std ({})", stage, target);
|
println!("Documenting stage{} std ({})", stage, target);
|
||||||
let out = build.doc_out(target);
|
let out = build.doc_out(target);
|
||||||
t!(fs::create_dir_all(&out));
|
t!(fs::create_dir_all(&out));
|
||||||
let compiler = Compiler::new(stage, &build.config.build);
|
let compiler = Compiler::new(stage, &build.build);
|
||||||
let compiler = if build.force_use_stage1(&compiler, target) {
|
let compiler = if build.force_use_stage1(&compiler, target) {
|
||||||
Compiler::new(1, compiler.host)
|
Compiler::new(1, compiler.host)
|
||||||
} else {
|
} else {
|
||||||
|
@ -276,7 +276,7 @@ pub fn test(build: &Build, stage: u32, target: &str) {
|
||||||
println!("Documenting stage{} test ({})", stage, target);
|
println!("Documenting stage{} test ({})", stage, target);
|
||||||
let out = build.doc_out(target);
|
let out = build.doc_out(target);
|
||||||
t!(fs::create_dir_all(&out));
|
t!(fs::create_dir_all(&out));
|
||||||
let compiler = Compiler::new(stage, &build.config.build);
|
let compiler = Compiler::new(stage, &build.build);
|
||||||
let compiler = if build.force_use_stage1(&compiler, target) {
|
let compiler = if build.force_use_stage1(&compiler, target) {
|
||||||
Compiler::new(1, compiler.host)
|
Compiler::new(1, compiler.host)
|
||||||
} else {
|
} else {
|
||||||
|
@ -306,7 +306,7 @@ pub fn rustc(build: &Build, stage: u32, target: &str) {
|
||||||
println!("Documenting stage{} compiler ({})", stage, target);
|
println!("Documenting stage{} compiler ({})", stage, target);
|
||||||
let out = build.doc_out(target);
|
let out = build.doc_out(target);
|
||||||
t!(fs::create_dir_all(&out));
|
t!(fs::create_dir_all(&out));
|
||||||
let compiler = Compiler::new(stage, &build.config.build);
|
let compiler = Compiler::new(stage, &build.build);
|
||||||
let compiler = if build.force_use_stage1(&compiler, target) {
|
let compiler = if build.force_use_stage1(&compiler, target) {
|
||||||
Compiler::new(1, compiler.host)
|
Compiler::new(1, compiler.host)
|
||||||
} else {
|
} else {
|
||||||
|
@ -351,13 +351,13 @@ pub fn error_index(build: &Build, target: &str) {
|
||||||
println!("Documenting error index ({})", target);
|
println!("Documenting error index ({})", target);
|
||||||
let out = build.doc_out(target);
|
let out = build.doc_out(target);
|
||||||
t!(fs::create_dir_all(&out));
|
t!(fs::create_dir_all(&out));
|
||||||
let compiler = Compiler::new(0, &build.config.build);
|
let compiler = Compiler::new(0, &build.build);
|
||||||
let mut index = build.tool_cmd(&compiler, "error_index_generator");
|
let mut index = build.tool_cmd(&compiler, "error_index_generator");
|
||||||
index.arg("html");
|
index.arg("html");
|
||||||
index.arg(out.join("error-index.html"));
|
index.arg(out.join("error-index.html"));
|
||||||
|
|
||||||
// FIXME: shouldn't have to pass this env var
|
// FIXME: shouldn't have to pass this env var
|
||||||
index.env("CFG_BUILD", &build.config.build);
|
index.env("CFG_BUILD", &build.build);
|
||||||
|
|
||||||
build.run(&mut index);
|
build.run(&mut index);
|
||||||
}
|
}
|
||||||
|
@ -367,7 +367,7 @@ pub fn unstable_book_gen(build: &Build, target: &str) {
|
||||||
let out = build.md_doc_out(target).join("unstable-book");
|
let out = build.md_doc_out(target).join("unstable-book");
|
||||||
t!(fs::create_dir_all(&out));
|
t!(fs::create_dir_all(&out));
|
||||||
t!(fs::remove_dir_all(&out));
|
t!(fs::remove_dir_all(&out));
|
||||||
let compiler = Compiler::new(0, &build.config.build);
|
let compiler = Compiler::new(0, &build.build);
|
||||||
let mut cmd = build.tool_cmd(&compiler, "unstable-book-gen");
|
let mut cmd = build.tool_cmd(&compiler, "unstable-book-gen");
|
||||||
cmd.arg(build.src.join("src"));
|
cmd.arg(build.src.join("src"));
|
||||||
cmd.arg(out);
|
cmd.arg(out);
|
||||||
|
|
|
@ -35,22 +35,12 @@ pub struct Flags {
|
||||||
pub host: Vec<String>,
|
pub host: Vec<String>,
|
||||||
pub target: Vec<String>,
|
pub target: Vec<String>,
|
||||||
pub config: Option<PathBuf>,
|
pub config: Option<PathBuf>,
|
||||||
pub src: Option<PathBuf>,
|
pub src: PathBuf,
|
||||||
pub jobs: Option<u32>,
|
pub jobs: Option<u32>,
|
||||||
pub cmd: Subcommand,
|
pub cmd: Subcommand,
|
||||||
pub incremental: bool,
|
pub incremental: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Flags {
|
|
||||||
pub fn verbose(&self) -> bool {
|
|
||||||
self.verbose > 0
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn very_verbose(&self) -> bool {
|
|
||||||
self.verbose > 1
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub enum Subcommand {
|
pub enum Subcommand {
|
||||||
Build {
|
Build {
|
||||||
paths: Vec<PathBuf>,
|
paths: Vec<PathBuf>,
|
||||||
|
@ -61,7 +51,7 @@ pub enum Subcommand {
|
||||||
Test {
|
Test {
|
||||||
paths: Vec<PathBuf>,
|
paths: Vec<PathBuf>,
|
||||||
test_args: Vec<String>,
|
test_args: Vec<String>,
|
||||||
no_fail_fast: bool,
|
fail_fast: bool,
|
||||||
},
|
},
|
||||||
Bench {
|
Bench {
|
||||||
paths: Vec<PathBuf>,
|
paths: Vec<PathBuf>,
|
||||||
|
@ -122,16 +112,15 @@ To learn more about a subcommand, run `./x.py <subcommand> -h`");
|
||||||
// the subcommand. Therefore we must manually identify the subcommand first, so that we can
|
// the subcommand. Therefore we must manually identify the subcommand first, so that we can
|
||||||
// complete the definition of the options. Then we can use the getopt::Matches object from
|
// complete the definition of the options. Then we can use the getopt::Matches object from
|
||||||
// there on out.
|
// there on out.
|
||||||
let mut possible_subcommands = args.iter().collect::<Vec<_>>();
|
let subcommand = args.iter().find(|&s|
|
||||||
possible_subcommands.retain(|&s|
|
(s == "build")
|
||||||
(s == "build")
|
|| (s == "test")
|
||||||
|| (s == "test")
|
|| (s == "bench")
|
||||||
|| (s == "bench")
|
|| (s == "doc")
|
||||||
|| (s == "doc")
|
|| (s == "clean")
|
||||||
|| (s == "clean")
|
|| (s == "dist")
|
||||||
|| (s == "dist")
|
|| (s == "install"));
|
||||||
|| (s == "install"));
|
let subcommand = match subcommand {
|
||||||
let subcommand = match possible_subcommands.first() {
|
|
||||||
Some(s) => s,
|
Some(s) => s,
|
||||||
None => {
|
None => {
|
||||||
// No subcommand -- show the general usage and subcommand help
|
// No subcommand -- show the general usage and subcommand help
|
||||||
|
@ -164,7 +153,7 @@ To learn more about a subcommand, run `./x.py <subcommand> -h`");
|
||||||
let mut pass_sanity_check = true;
|
let mut pass_sanity_check = true;
|
||||||
match matches.free.get(0) {
|
match matches.free.get(0) {
|
||||||
Some(check_subcommand) => {
|
Some(check_subcommand) => {
|
||||||
if &check_subcommand != subcommand {
|
if check_subcommand != subcommand {
|
||||||
pass_sanity_check = false;
|
pass_sanity_check = false;
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -279,7 +268,7 @@ Arguments:
|
||||||
Subcommand::Test {
|
Subcommand::Test {
|
||||||
paths: paths,
|
paths: paths,
|
||||||
test_args: matches.opt_strs("test-args"),
|
test_args: matches.opt_strs("test-args"),
|
||||||
no_fail_fast: matches.opt_present("no-fail-fast"),
|
fail_fast: !matches.opt_present("no-fail-fast"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"bench" => {
|
"bench" => {
|
||||||
|
@ -316,12 +305,15 @@ Arguments:
|
||||||
|
|
||||||
let mut stage = matches.opt_str("stage").map(|j| j.parse().unwrap());
|
let mut stage = matches.opt_str("stage").map(|j| j.parse().unwrap());
|
||||||
|
|
||||||
if matches.opt_present("incremental") {
|
if matches.opt_present("incremental") && stage.is_none() {
|
||||||
if stage.is_none() {
|
stage = Some(1);
|
||||||
stage = Some(1);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let cwd = t!(env::current_dir());
|
||||||
|
let src = matches.opt_str("src").map(PathBuf::from)
|
||||||
|
.or_else(|| env::var_os("SRC").map(PathBuf::from))
|
||||||
|
.unwrap_or(cwd);
|
||||||
|
|
||||||
Flags {
|
Flags {
|
||||||
verbose: matches.opt_count("verbose"),
|
verbose: matches.opt_count("verbose"),
|
||||||
stage: stage,
|
stage: stage,
|
||||||
|
@ -333,7 +325,7 @@ Arguments:
|
||||||
host: split(matches.opt_strs("host")),
|
host: split(matches.opt_strs("host")),
|
||||||
target: split(matches.opt_strs("target")),
|
target: split(matches.opt_strs("target")),
|
||||||
config: cfg_file,
|
config: cfg_file,
|
||||||
src: matches.opt_str("src").map(PathBuf::from),
|
src: src,
|
||||||
jobs: matches.opt_str("jobs").map(|j| j.parse().unwrap()),
|
jobs: matches.opt_str("jobs").map(|j| j.parse().unwrap()),
|
||||||
cmd: cmd,
|
cmd: cmd,
|
||||||
incremental: matches.opt_present("incremental"),
|
incremental: matches.opt_present("incremental"),
|
||||||
|
@ -352,9 +344,9 @@ impl Subcommand {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn no_fail_fast(&self) -> bool {
|
pub fn fail_fast(&self) -> bool {
|
||||||
match *self {
|
match *self {
|
||||||
Subcommand::Test { no_fail_fast, .. } => no_fail_fast,
|
Subcommand::Test { fail_fast, .. } => fail_fast,
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -146,5 +146,5 @@ fn add_destdir(path: &Path, destdir: &Option<PathBuf>) -> PathBuf {
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return ret
|
ret
|
||||||
}
|
}
|
||||||
|
|
|
@ -161,25 +161,35 @@ pub struct Build {
|
||||||
flags: Flags,
|
flags: Flags,
|
||||||
|
|
||||||
// Derived properties from the above two configurations
|
// Derived properties from the above two configurations
|
||||||
cargo: PathBuf,
|
|
||||||
rustc: PathBuf,
|
|
||||||
src: PathBuf,
|
src: PathBuf,
|
||||||
out: PathBuf,
|
out: PathBuf,
|
||||||
rust_info: channel::GitInfo,
|
rust_info: channel::GitInfo,
|
||||||
cargo_info: channel::GitInfo,
|
cargo_info: channel::GitInfo,
|
||||||
rls_info: channel::GitInfo,
|
rls_info: channel::GitInfo,
|
||||||
local_rebuild: bool,
|
local_rebuild: bool,
|
||||||
|
fail_fast: bool,
|
||||||
|
verbosity: usize,
|
||||||
|
|
||||||
|
// Targets for which to build.
|
||||||
|
build: String,
|
||||||
|
hosts: Vec<String>,
|
||||||
|
targets: Vec<String>,
|
||||||
|
|
||||||
|
// Stage 0 (downloaded) compiler and cargo or their local rust equivalents.
|
||||||
|
initial_rustc: PathBuf,
|
||||||
|
initial_cargo: PathBuf,
|
||||||
|
|
||||||
// Probed tools at runtime
|
// Probed tools at runtime
|
||||||
lldb_version: Option<String>,
|
lldb_version: Option<String>,
|
||||||
lldb_python_dir: Option<String>,
|
lldb_python_dir: Option<String>,
|
||||||
|
|
||||||
// Runtime state filled in later on
|
// Runtime state filled in later on
|
||||||
|
// target -> (cc, ar)
|
||||||
cc: HashMap<String, (gcc::Tool, Option<PathBuf>)>,
|
cc: HashMap<String, (gcc::Tool, Option<PathBuf>)>,
|
||||||
|
// host -> (cc, ar)
|
||||||
cxx: HashMap<String, gcc::Tool>,
|
cxx: HashMap<String, gcc::Tool>,
|
||||||
crates: HashMap<String, Crate>,
|
crates: HashMap<String, Crate>,
|
||||||
is_sudo: bool,
|
is_sudo: bool,
|
||||||
src_is_git: bool,
|
|
||||||
ci_env: CiEnv,
|
ci_env: CiEnv,
|
||||||
delayed_failures: Cell<usize>,
|
delayed_failures: Cell<usize>,
|
||||||
}
|
}
|
||||||
|
@ -202,20 +212,16 @@ struct Crate {
|
||||||
/// build system, with each mod generating output in a different directory.
|
/// build system, with each mod generating output in a different directory.
|
||||||
#[derive(Clone, Copy, PartialEq, Eq)]
|
#[derive(Clone, Copy, PartialEq, Eq)]
|
||||||
pub enum Mode {
|
pub enum Mode {
|
||||||
/// This cargo is going to build the standard library, placing output in the
|
/// Build the standard library, placing output in the "stageN-std" directory.
|
||||||
/// "stageN-std" directory.
|
|
||||||
Libstd,
|
Libstd,
|
||||||
|
|
||||||
/// This cargo is going to build libtest, placing output in the
|
/// Build libtest, placing output in the "stageN-test" directory.
|
||||||
/// "stageN-test" directory.
|
|
||||||
Libtest,
|
Libtest,
|
||||||
|
|
||||||
/// This cargo is going to build librustc and compiler libraries, placing
|
/// Build librustc and compiler libraries, placing output in the "stageN-rustc" directory.
|
||||||
/// output in the "stageN-rustc" directory.
|
|
||||||
Librustc,
|
Librustc,
|
||||||
|
|
||||||
/// This cargo is going to build some tool, placing output in the
|
/// Build some tool, placing output in the "stageN-tools" directory.
|
||||||
/// "stageN-tools" directory.
|
|
||||||
Tool,
|
Tool,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -226,22 +232,9 @@ impl Build {
|
||||||
/// By default all build output will be placed in the current directory.
|
/// By default all build output will be placed in the current directory.
|
||||||
pub fn new(flags: Flags, config: Config) -> Build {
|
pub fn new(flags: Flags, config: Config) -> Build {
|
||||||
let cwd = t!(env::current_dir());
|
let cwd = t!(env::current_dir());
|
||||||
let src = flags.src.clone().or_else(|| {
|
let src = flags.src.clone();
|
||||||
env::var_os("SRC").map(|x| x.into())
|
|
||||||
}).unwrap_or(cwd.clone());
|
|
||||||
let out = cwd.join("build");
|
let out = cwd.join("build");
|
||||||
|
|
||||||
let stage0_root = out.join(&config.build).join("stage0/bin");
|
|
||||||
let rustc = match config.rustc {
|
|
||||||
Some(ref s) => PathBuf::from(s),
|
|
||||||
None => stage0_root.join(exe("rustc", &config.build)),
|
|
||||||
};
|
|
||||||
let cargo = match config.cargo {
|
|
||||||
Some(ref s) => PathBuf::from(s),
|
|
||||||
None => stage0_root.join(exe("cargo", &config.build)),
|
|
||||||
};
|
|
||||||
let local_rebuild = config.local_rebuild;
|
|
||||||
|
|
||||||
let is_sudo = match env::var_os("SUDO_USER") {
|
let is_sudo = match env::var_os("SUDO_USER") {
|
||||||
Some(sudo_user) => {
|
Some(sudo_user) => {
|
||||||
match env::var_os("USER") {
|
match env::var_os("USER") {
|
||||||
|
@ -254,32 +247,64 @@ impl Build {
|
||||||
let rust_info = channel::GitInfo::new(&src);
|
let rust_info = channel::GitInfo::new(&src);
|
||||||
let cargo_info = channel::GitInfo::new(&src.join("src/tools/cargo"));
|
let cargo_info = channel::GitInfo::new(&src.join("src/tools/cargo"));
|
||||||
let rls_info = channel::GitInfo::new(&src.join("src/tools/rls"));
|
let rls_info = channel::GitInfo::new(&src.join("src/tools/rls"));
|
||||||
let src_is_git = src.join(".git").exists();
|
|
||||||
|
let hosts = if !flags.host.is_empty() {
|
||||||
|
for host in flags.host.iter() {
|
||||||
|
if !config.host.contains(host) {
|
||||||
|
panic!("specified host `{}` is not in configuration", host);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
flags.host.clone()
|
||||||
|
} else {
|
||||||
|
config.host.clone()
|
||||||
|
};
|
||||||
|
let targets = if !flags.target.is_empty() {
|
||||||
|
for target in flags.target.iter() {
|
||||||
|
if !config.target.contains(target) {
|
||||||
|
panic!("specified target `{}` is not in configuration", target);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
flags.target.clone()
|
||||||
|
} else {
|
||||||
|
config.target.clone()
|
||||||
|
};
|
||||||
|
|
||||||
Build {
|
Build {
|
||||||
|
initial_rustc: config.initial_rustc.clone(),
|
||||||
|
initial_cargo: config.initial_cargo.clone(),
|
||||||
|
local_rebuild: config.local_rebuild,
|
||||||
|
fail_fast: flags.cmd.fail_fast(),
|
||||||
|
verbosity: cmp::max(flags.verbose, config.verbose),
|
||||||
|
|
||||||
|
build: config.host[0].clone(),
|
||||||
|
hosts: hosts,
|
||||||
|
targets: targets,
|
||||||
|
|
||||||
flags: flags,
|
flags: flags,
|
||||||
config: config,
|
config: config,
|
||||||
cargo: cargo,
|
|
||||||
rustc: rustc,
|
|
||||||
src: src,
|
src: src,
|
||||||
out: out,
|
out: out,
|
||||||
|
|
||||||
rust_info: rust_info,
|
rust_info: rust_info,
|
||||||
cargo_info: cargo_info,
|
cargo_info: cargo_info,
|
||||||
rls_info: rls_info,
|
rls_info: rls_info,
|
||||||
local_rebuild: local_rebuild,
|
|
||||||
cc: HashMap::new(),
|
cc: HashMap::new(),
|
||||||
cxx: HashMap::new(),
|
cxx: HashMap::new(),
|
||||||
crates: HashMap::new(),
|
crates: HashMap::new(),
|
||||||
lldb_version: None,
|
lldb_version: None,
|
||||||
lldb_python_dir: None,
|
lldb_python_dir: None,
|
||||||
is_sudo: is_sudo,
|
is_sudo: is_sudo,
|
||||||
src_is_git: src_is_git,
|
|
||||||
ci_env: CiEnv::current(),
|
ci_env: CiEnv::current(),
|
||||||
delayed_failures: Cell::new(0),
|
delayed_failures: Cell::new(0),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn build_slice(&self) -> &[String] {
|
||||||
|
unsafe {
|
||||||
|
std::slice::from_raw_parts(&self.build, 1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Executes the entire build, as configured by the flags and configuration.
|
/// Executes the entire build, as configured by the flags and configuration.
|
||||||
pub fn build(&mut self) {
|
pub fn build(&mut self) {
|
||||||
unsafe {
|
unsafe {
|
||||||
|
@ -296,7 +321,7 @@ impl Build {
|
||||||
sanity::check(self);
|
sanity::check(self);
|
||||||
// If local-rust is the same major.minor as the current version, then force a local-rebuild
|
// If local-rust is the same major.minor as the current version, then force a local-rebuild
|
||||||
let local_version_verbose = output(
|
let local_version_verbose = output(
|
||||||
Command::new(&self.rustc).arg("--version").arg("--verbose"));
|
Command::new(&self.initial_rustc).arg("--version").arg("--verbose"));
|
||||||
let local_release = local_version_verbose
|
let local_release = local_version_verbose
|
||||||
.lines().filter(|x| x.starts_with("release:"))
|
.lines().filter(|x| x.starts_with("release:"))
|
||||||
.next().unwrap().trim_left_matches("release:").trim();
|
.next().unwrap().trim_left_matches("release:").trim();
|
||||||
|
@ -338,7 +363,7 @@ impl Build {
|
||||||
mode: Mode,
|
mode: Mode,
|
||||||
target: &str,
|
target: &str,
|
||||||
cmd: &str) -> Command {
|
cmd: &str) -> Command {
|
||||||
let mut cargo = Command::new(&self.cargo);
|
let mut cargo = Command::new(&self.initial_cargo);
|
||||||
let out_dir = self.stage_out(compiler, mode);
|
let out_dir = self.stage_out(compiler, mode);
|
||||||
cargo.env("CARGO_TARGET_DIR", out_dir)
|
cargo.env("CARGO_TARGET_DIR", out_dir)
|
||||||
.arg(cmd)
|
.arg(cmd)
|
||||||
|
@ -347,7 +372,7 @@ impl Build {
|
||||||
|
|
||||||
// FIXME: Temporary fix for https://github.com/rust-lang/cargo/issues/3005
|
// FIXME: Temporary fix for https://github.com/rust-lang/cargo/issues/3005
|
||||||
// Force cargo to output binaries with disambiguating hashes in the name
|
// Force cargo to output binaries with disambiguating hashes in the name
|
||||||
cargo.env("__CARGO_DEFAULT_LIB_METADATA", "1");
|
cargo.env("__CARGO_DEFAULT_LIB_METADATA", &self.config.channel);
|
||||||
|
|
||||||
let stage;
|
let stage;
|
||||||
if compiler.stage == 0 && self.local_rebuild {
|
if compiler.stage == 0 && self.local_rebuild {
|
||||||
|
@ -422,7 +447,7 @@ impl Build {
|
||||||
// library up and running, so we can use the normal compiler to compile
|
// library up and running, so we can use the normal compiler to compile
|
||||||
// build scripts in that situation.
|
// build scripts in that situation.
|
||||||
if mode == Mode::Libstd {
|
if mode == Mode::Libstd {
|
||||||
cargo.env("RUSTC_SNAPSHOT", &self.rustc)
|
cargo.env("RUSTC_SNAPSHOT", &self.initial_rustc)
|
||||||
.env("RUSTC_SNAPSHOT_LIBDIR", self.rustc_snapshot_libdir());
|
.env("RUSTC_SNAPSHOT_LIBDIR", self.rustc_snapshot_libdir());
|
||||||
} else {
|
} else {
|
||||||
cargo.env("RUSTC_SNAPSHOT", self.compiler_path(compiler))
|
cargo.env("RUSTC_SNAPSHOT", self.compiler_path(compiler))
|
||||||
|
@ -441,8 +466,7 @@ impl Build {
|
||||||
cargo.env("RUSTC_ON_FAIL", on_fail);
|
cargo.env("RUSTC_ON_FAIL", on_fail);
|
||||||
}
|
}
|
||||||
|
|
||||||
let verbose = cmp::max(self.config.verbose, self.flags.verbose);
|
cargo.env("RUSTC_VERBOSE", format!("{}", self.verbosity));
|
||||||
cargo.env("RUSTC_VERBOSE", format!("{}", verbose));
|
|
||||||
|
|
||||||
// Specify some various options for build scripts used throughout
|
// Specify some various options for build scripts used throughout
|
||||||
// the build.
|
// the build.
|
||||||
|
@ -480,7 +504,7 @@ impl Build {
|
||||||
// FIXME: should update code to not require this env var
|
// FIXME: should update code to not require this env var
|
||||||
cargo.env("CFG_COMPILER_HOST_TRIPLE", target);
|
cargo.env("CFG_COMPILER_HOST_TRIPLE", target);
|
||||||
|
|
||||||
if self.config.verbose() || self.flags.verbose() {
|
if self.is_verbose() {
|
||||||
cargo.arg("-v");
|
cargo.arg("-v");
|
||||||
}
|
}
|
||||||
// FIXME: cargo bench does not accept `--release`
|
// FIXME: cargo bench does not accept `--release`
|
||||||
|
@ -496,13 +520,13 @@ impl Build {
|
||||||
|
|
||||||
self.ci_env.force_coloring_in_ci(&mut cargo);
|
self.ci_env.force_coloring_in_ci(&mut cargo);
|
||||||
|
|
||||||
return cargo
|
cargo
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get a path to the compiler specified.
|
/// Get a path to the compiler specified.
|
||||||
fn compiler_path(&self, compiler: &Compiler) -> PathBuf {
|
fn compiler_path(&self, compiler: &Compiler) -> PathBuf {
|
||||||
if compiler.is_snapshot(self) {
|
if compiler.is_snapshot(self) {
|
||||||
self.rustc.clone()
|
self.initial_rustc.clone()
|
||||||
} else {
|
} else {
|
||||||
self.sysroot(compiler).join("bin").join(exe("rustc", compiler.host))
|
self.sysroot(compiler).join("bin").join(exe("rustc", compiler.host))
|
||||||
}
|
}
|
||||||
|
@ -519,7 +543,7 @@ impl Build {
|
||||||
let mut rustdoc = self.compiler_path(compiler);
|
let mut rustdoc = self.compiler_path(compiler);
|
||||||
rustdoc.pop();
|
rustdoc.pop();
|
||||||
rustdoc.push(exe("rustdoc", compiler.host));
|
rustdoc.push(exe("rustdoc", compiler.host));
|
||||||
return rustdoc
|
rustdoc
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get a `Command` which is ready to run `tool` in `stage` built for
|
/// Get a `Command` which is ready to run `tool` in `stage` built for
|
||||||
|
@ -527,7 +551,7 @@ impl Build {
|
||||||
fn tool_cmd(&self, compiler: &Compiler, tool: &str) -> Command {
|
fn tool_cmd(&self, compiler: &Compiler, tool: &str) -> Command {
|
||||||
let mut cmd = Command::new(self.tool(&compiler, tool));
|
let mut cmd = Command::new(self.tool(&compiler, tool));
|
||||||
self.prepare_tool_cmd(compiler, &mut cmd);
|
self.prepare_tool_cmd(compiler, &mut cmd);
|
||||||
return cmd
|
cmd
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Prepares the `cmd` provided to be able to run the `compiler` provided.
|
/// Prepares the `cmd` provided to be able to run the `compiler` provided.
|
||||||
|
@ -578,7 +602,7 @@ impl Build {
|
||||||
if self.config.profiler {
|
if self.config.profiler {
|
||||||
features.push_str(" profiler");
|
features.push_str(" profiler");
|
||||||
}
|
}
|
||||||
return features
|
features
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the space-separated set of activated features for the compiler.
|
/// Get the space-separated set of activated features for the compiler.
|
||||||
|
@ -587,7 +611,7 @@ impl Build {
|
||||||
if self.config.use_jemalloc {
|
if self.config.use_jemalloc {
|
||||||
features.push_str(" jemalloc");
|
features.push_str(" jemalloc");
|
||||||
}
|
}
|
||||||
return features
|
features
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Component directory that Cargo will produce output into (e.g.
|
/// Component directory that Cargo will produce output into (e.g.
|
||||||
|
@ -760,7 +784,7 @@ impl Build {
|
||||||
|
|
||||||
/// Returns the libdir of the snapshot compiler.
|
/// Returns the libdir of the snapshot compiler.
|
||||||
fn rustc_snapshot_libdir(&self) -> PathBuf {
|
fn rustc_snapshot_libdir(&self) -> PathBuf {
|
||||||
self.rustc.parent().unwrap().parent().unwrap()
|
self.initial_rustc.parent().unwrap().parent().unwrap()
|
||||||
.join(libdir(&self.config.build))
|
.join(libdir(&self.config.build))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -792,9 +816,17 @@ impl Build {
|
||||||
try_run_suppressed(cmd)
|
try_run_suppressed(cmd)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn is_verbose(&self) -> bool {
|
||||||
|
self.verbosity > 0
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_very_verbose(&self) -> bool {
|
||||||
|
self.verbosity > 1
|
||||||
|
}
|
||||||
|
|
||||||
/// Prints a message if this build is configured in verbose mode.
|
/// Prints a message if this build is configured in verbose mode.
|
||||||
fn verbose(&self, msg: &str) {
|
fn verbose(&self, msg: &str) {
|
||||||
if self.flags.verbose() || self.config.verbose() {
|
if self.is_verbose() {
|
||||||
println!("{}", msg);
|
println!("{}", msg);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -802,7 +834,7 @@ impl Build {
|
||||||
/// Returns the number of parallel jobs that have been configured for this
|
/// Returns the number of parallel jobs that have been configured for this
|
||||||
/// build.
|
/// build.
|
||||||
fn jobs(&self) -> u32 {
|
fn jobs(&self) -> u32 {
|
||||||
self.flags.jobs.unwrap_or(num_cpus::get() as u32)
|
self.flags.jobs.unwrap_or_else(|| num_cpus::get() as u32)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the path to the C compiler for the target specified.
|
/// Returns the path to the C compiler for the target specified.
|
||||||
|
@ -834,7 +866,7 @@ impl Build {
|
||||||
if target == "i686-pc-windows-gnu" {
|
if target == "i686-pc-windows-gnu" {
|
||||||
base.push("-fno-omit-frame-pointer".into());
|
base.push("-fno-omit-frame-pointer".into());
|
||||||
}
|
}
|
||||||
return base
|
base
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the path to the `ar` archive utility for the target specified.
|
/// Returns the path to the `ar` archive utility for the target specified.
|
||||||
|
@ -866,7 +898,7 @@ impl Build {
|
||||||
!target.contains("emscripten") {
|
!target.contains("emscripten") {
|
||||||
base.push(format!("-Clinker={}", self.cc(target).display()));
|
base.push(format!("-Clinker={}", self.cc(target).display()));
|
||||||
}
|
}
|
||||||
return base
|
base
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the "musl root" for this `target`, if defined
|
/// Returns the "musl root" for this `target`, if defined
|
||||||
|
@ -1047,7 +1079,7 @@ impl<'a> Compiler<'a> {
|
||||||
|
|
||||||
/// Returns whether this is a snapshot compiler for `build`'s configuration
|
/// Returns whether this is a snapshot compiler for `build`'s configuration
|
||||||
fn is_snapshot(&self, build: &Build) -> bool {
|
fn is_snapshot(&self, build: &Build) -> bool {
|
||||||
self.stage == 0 && self.host == build.config.build
|
self.stage == 0 && self.host == build.build
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns if this compiler should be treated as a final stage one in the
|
/// Returns if this compiler should be treated as a final stage one in the
|
||||||
|
|
|
@ -56,7 +56,7 @@ fn build_krate(build: &mut Build, krate: &str) {
|
||||||
// of packages we're going to have to know what `-p` arguments to pass it
|
// of packages we're going to have to know what `-p` arguments to pass it
|
||||||
// to know what crates to test. Here we run `cargo metadata` to learn about
|
// to know what crates to test. Here we run `cargo metadata` to learn about
|
||||||
// the dependency graph and what `-p` arguments there are.
|
// the dependency graph and what `-p` arguments there are.
|
||||||
let mut cargo = Command::new(&build.cargo);
|
let mut cargo = Command::new(&build.initial_cargo);
|
||||||
cargo.arg("metadata")
|
cargo.arg("metadata")
|
||||||
.arg("--format-version").arg("1")
|
.arg("--format-version").arg("1")
|
||||||
.arg("--manifest-path").arg(build.src.join(krate).join("Cargo.toml"));
|
.arg("--manifest-path").arg(build.src.join(krate).join("Cargo.toml"));
|
||||||
|
|
|
@ -94,7 +94,7 @@ pub fn llvm(build: &Build, target: &str) {
|
||||||
let assertions = if build.config.llvm_assertions {"ON"} else {"OFF"};
|
let assertions = if build.config.llvm_assertions {"ON"} else {"OFF"};
|
||||||
|
|
||||||
cfg.target(target)
|
cfg.target(target)
|
||||||
.host(&build.config.build)
|
.host(&build.build)
|
||||||
.out_dir(&out_dir)
|
.out_dir(&out_dir)
|
||||||
.profile(profile)
|
.profile(profile)
|
||||||
.define("LLVM_ENABLE_ASSERTIONS", assertions)
|
.define("LLVM_ENABLE_ASSERTIONS", assertions)
|
||||||
|
@ -129,11 +129,11 @@ pub fn llvm(build: &Build, target: &str) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// http://llvm.org/docs/HowToCrossCompileLLVM.html
|
// http://llvm.org/docs/HowToCrossCompileLLVM.html
|
||||||
if target != build.config.build {
|
if target != build.build {
|
||||||
// FIXME: if the llvm root for the build triple is overridden then we
|
// FIXME: if the llvm root for the build triple is overridden then we
|
||||||
// should use llvm-tblgen from there, also should verify that it
|
// should use llvm-tblgen from there, also should verify that it
|
||||||
// actually exists most of the time in normal installs of LLVM.
|
// actually exists most of the time in normal installs of LLVM.
|
||||||
let host = build.llvm_out(&build.config.build).join("bin/llvm-tblgen");
|
let host = build.llvm_out(&build.build).join("bin/llvm-tblgen");
|
||||||
cfg.define("CMAKE_CROSSCOMPILING", "True")
|
cfg.define("CMAKE_CROSSCOMPILING", "True")
|
||||||
.define("LLVM_TABLEGEN", &host);
|
.define("LLVM_TABLEGEN", &host);
|
||||||
}
|
}
|
||||||
|
@ -243,7 +243,7 @@ pub fn test_helpers(build: &Build, target: &str) {
|
||||||
cfg.cargo_metadata(false)
|
cfg.cargo_metadata(false)
|
||||||
.out_dir(&dst)
|
.out_dir(&dst)
|
||||||
.target(target)
|
.target(target)
|
||||||
.host(&build.config.build)
|
.host(&build.build)
|
||||||
.opt_level(0)
|
.opt_level(0)
|
||||||
.debug(false)
|
.debug(false)
|
||||||
.file(build.src.join("src/rt/rust_test_helpers.c"))
|
.file(build.src.join("src/rt/rust_test_helpers.c"))
|
||||||
|
|
|
@ -18,9 +18,9 @@
|
||||||
//! In theory if we get past this phase it's a bug if a build fails, but in
|
//! In theory if we get past this phase it's a bug if a build fails, but in
|
||||||
//! practice that's likely not true!
|
//! practice that's likely not true!
|
||||||
|
|
||||||
use std::collections::HashSet;
|
use std::collections::HashMap;
|
||||||
use std::env;
|
use std::env;
|
||||||
use std::ffi::{OsStr, OsString};
|
use std::ffi::{OsString, OsStr};
|
||||||
use std::fs;
|
use std::fs;
|
||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
@ -29,45 +29,59 @@ use build_helper::output;
|
||||||
|
|
||||||
use Build;
|
use Build;
|
||||||
|
|
||||||
|
struct Finder {
|
||||||
|
cache: HashMap<OsString, Option<PathBuf>>,
|
||||||
|
path: OsString,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Finder {
|
||||||
|
fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
cache: HashMap::new(),
|
||||||
|
path: env::var_os("PATH").unwrap_or_default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn maybe_have<S: AsRef<OsStr>>(&mut self, cmd: S) -> Option<PathBuf> {
|
||||||
|
let cmd: OsString = cmd.as_ref().into();
|
||||||
|
let path = self.path.clone();
|
||||||
|
self.cache.entry(cmd.clone()).or_insert_with(|| {
|
||||||
|
for path in env::split_paths(&path) {
|
||||||
|
let target = path.join(&cmd);
|
||||||
|
let mut cmd_alt = cmd.clone();
|
||||||
|
cmd_alt.push(".exe");
|
||||||
|
if target.is_file() || // some/path/git
|
||||||
|
target.with_extension("exe").exists() || // some/path/git.exe
|
||||||
|
target.join(&cmd_alt).exists() { // some/path/git/git.exe
|
||||||
|
return Some(target);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}).clone()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn must_have<S: AsRef<OsStr>>(&mut self, cmd: S) -> PathBuf {
|
||||||
|
self.maybe_have(&cmd).unwrap_or_else(|| {
|
||||||
|
panic!("\n\ncouldn't find required command: {:?}\n\n", cmd.as_ref());
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn check(build: &mut Build) {
|
pub fn check(build: &mut Build) {
|
||||||
let mut checked = HashSet::new();
|
let path = env::var_os("PATH").unwrap_or_default();
|
||||||
let path = env::var_os("PATH").unwrap_or(OsString::new());
|
|
||||||
// On Windows, quotes are invalid characters for filename paths, and if
|
// On Windows, quotes are invalid characters for filename paths, and if
|
||||||
// one is present as part of the PATH then that can lead to the system
|
// one is present as part of the PATH then that can lead to the system
|
||||||
// being unable to identify the files properly. See
|
// being unable to identify the files properly. See
|
||||||
// https://github.com/rust-lang/rust/issues/34959 for more details.
|
// https://github.com/rust-lang/rust/issues/34959 for more details.
|
||||||
if cfg!(windows) {
|
if cfg!(windows) && path.to_string_lossy().contains("\"") {
|
||||||
if path.to_string_lossy().contains("\"") {
|
panic!("PATH contains invalid character '\"'");
|
||||||
panic!("PATH contains invalid character '\"'");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
let have_cmd = |cmd: &OsStr| {
|
|
||||||
for path in env::split_paths(&path) {
|
|
||||||
let target = path.join(cmd);
|
|
||||||
let mut cmd_alt = cmd.to_os_string();
|
|
||||||
cmd_alt.push(".exe");
|
|
||||||
if target.is_file() ||
|
|
||||||
target.with_extension("exe").exists() ||
|
|
||||||
target.join(cmd_alt).exists() {
|
|
||||||
return Some(target);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return None;
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut need_cmd = |cmd: &OsStr| {
|
|
||||||
if !checked.insert(cmd.to_owned()) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if have_cmd(cmd).is_none() {
|
|
||||||
panic!("\n\ncouldn't find required command: {:?}\n\n", cmd);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
|
let mut cmd_finder = Finder::new();
|
||||||
// If we've got a git directory we're gona need git to update
|
// If we've got a git directory we're gona need git to update
|
||||||
// submodules and learn about various other aspects.
|
// submodules and learn about various other aspects.
|
||||||
if build.src_is_git {
|
if build.rust_info.is_git() {
|
||||||
need_cmd("git".as_ref());
|
cmd_finder.must_have("git");
|
||||||
}
|
}
|
||||||
|
|
||||||
// We need cmake, but only if we're actually building LLVM or sanitizers.
|
// We need cmake, but only if we're actually building LLVM or sanitizers.
|
||||||
|
@ -75,57 +89,32 @@ pub fn check(build: &mut Build) {
|
||||||
.filter_map(|host| build.config.target_config.get(host))
|
.filter_map(|host| build.config.target_config.get(host))
|
||||||
.any(|config| config.llvm_config.is_none());
|
.any(|config| config.llvm_config.is_none());
|
||||||
if building_llvm || build.config.sanitizers {
|
if building_llvm || build.config.sanitizers {
|
||||||
need_cmd("cmake".as_ref());
|
cmd_finder.must_have("cmake");
|
||||||
}
|
}
|
||||||
|
|
||||||
// Ninja is currently only used for LLVM itself.
|
// Ninja is currently only used for LLVM itself.
|
||||||
if building_llvm && build.config.ninja {
|
// Some Linux distros rename `ninja` to `ninja-build`.
|
||||||
// Some Linux distros rename `ninja` to `ninja-build`.
|
// CMake can work with either binary name.
|
||||||
// CMake can work with either binary name.
|
if building_llvm && build.config.ninja && cmd_finder.maybe_have("ninja-build").is_none() {
|
||||||
if have_cmd("ninja-build".as_ref()).is_none() {
|
cmd_finder.must_have("ninja");
|
||||||
need_cmd("ninja".as_ref());
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if build.config.python.is_none() {
|
build.config.python = build.config.python.take().map(|p| cmd_finder.must_have(p))
|
||||||
// set by bootstrap.py
|
.or_else(|| env::var_os("BOOTSTRAP_PYTHON").map(PathBuf::from)) // set by bootstrap.py
|
||||||
if let Some(v) = env::var_os("BOOTSTRAP_PYTHON") {
|
.or_else(|| cmd_finder.maybe_have("python2.7"))
|
||||||
build.config.python = Some(PathBuf::from(v));
|
.or_else(|| cmd_finder.maybe_have("python2"))
|
||||||
}
|
.or_else(|| Some(cmd_finder.must_have("python")));
|
||||||
}
|
|
||||||
if build.config.python.is_none() {
|
|
||||||
build.config.python = have_cmd("python2.7".as_ref());
|
|
||||||
}
|
|
||||||
if build.config.python.is_none() {
|
|
||||||
build.config.python = have_cmd("python2".as_ref());
|
|
||||||
}
|
|
||||||
if build.config.python.is_none() {
|
|
||||||
need_cmd("python".as_ref());
|
|
||||||
build.config.python = Some("python".into());
|
|
||||||
}
|
|
||||||
need_cmd(build.config.python.as_ref().unwrap().as_ref());
|
|
||||||
|
|
||||||
|
build.config.nodejs = build.config.nodejs.take().map(|p| cmd_finder.must_have(p))
|
||||||
|
.or_else(|| cmd_finder.maybe_have("node"))
|
||||||
|
.or_else(|| cmd_finder.maybe_have("nodejs"));
|
||||||
|
|
||||||
if let Some(ref s) = build.config.nodejs {
|
build.config.gdb = build.config.gdb.take().map(|p| cmd_finder.must_have(p))
|
||||||
need_cmd(s.as_ref());
|
.or_else(|| cmd_finder.maybe_have("gdb"));
|
||||||
} else {
|
|
||||||
// Look for the nodejs command, needed for emscripten testing
|
|
||||||
if let Some(node) = have_cmd("node".as_ref()) {
|
|
||||||
build.config.nodejs = Some(node);
|
|
||||||
} else if let Some(node) = have_cmd("nodejs".as_ref()) {
|
|
||||||
build.config.nodejs = Some(node);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(ref gdb) = build.config.gdb {
|
|
||||||
need_cmd(gdb.as_ref());
|
|
||||||
} else {
|
|
||||||
build.config.gdb = have_cmd("gdb".as_ref());
|
|
||||||
}
|
|
||||||
|
|
||||||
// We're gonna build some custom C code here and there, host triples
|
// We're gonna build some custom C code here and there, host triples
|
||||||
// also build some C++ shims for LLVM so we need a C++ compiler.
|
// also build some C++ shims for LLVM so we need a C++ compiler.
|
||||||
for target in build.config.target.iter() {
|
for target in &build.config.target {
|
||||||
// On emscripten we don't actually need the C compiler to just
|
// On emscripten we don't actually need the C compiler to just
|
||||||
// build the target artifacts, only for testing. For the sake
|
// build the target artifacts, only for testing. For the sake
|
||||||
// of easier bot configuration, just skip detection.
|
// of easier bot configuration, just skip detection.
|
||||||
|
@ -133,33 +122,32 @@ pub fn check(build: &mut Build) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
need_cmd(build.cc(target).as_ref());
|
cmd_finder.must_have(build.cc(target));
|
||||||
if let Some(ar) = build.ar(target) {
|
if let Some(ar) = build.ar(target) {
|
||||||
need_cmd(ar.as_ref());
|
cmd_finder.must_have(ar);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for host in build.config.host.iter() {
|
|
||||||
need_cmd(build.cxx(host).unwrap().as_ref());
|
|
||||||
}
|
|
||||||
|
|
||||||
// The msvc hosts don't use jemalloc, turn it off globally to
|
|
||||||
// avoid packaging the dummy liballoc_jemalloc on that platform.
|
|
||||||
for host in build.config.host.iter() {
|
for host in build.config.host.iter() {
|
||||||
|
cmd_finder.must_have(build.cxx(host).unwrap());
|
||||||
|
|
||||||
|
// The msvc hosts don't use jemalloc, turn it off globally to
|
||||||
|
// avoid packaging the dummy liballoc_jemalloc on that platform.
|
||||||
if host.contains("msvc") {
|
if host.contains("msvc") {
|
||||||
build.config.use_jemalloc = false;
|
build.config.use_jemalloc = false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Externally configured LLVM requires FileCheck to exist
|
// Externally configured LLVM requires FileCheck to exist
|
||||||
let filecheck = build.llvm_filecheck(&build.config.build);
|
let filecheck = build.llvm_filecheck(&build.build);
|
||||||
if !filecheck.starts_with(&build.out) && !filecheck.exists() && build.config.codegen_tests {
|
if !filecheck.starts_with(&build.out) && !filecheck.exists() && build.config.codegen_tests {
|
||||||
panic!("FileCheck executable {:?} does not exist", filecheck);
|
panic!("FileCheck executable {:?} does not exist", filecheck);
|
||||||
}
|
}
|
||||||
|
|
||||||
for target in build.config.target.iter() {
|
for target in &build.config.target {
|
||||||
// Can't compile for iOS unless we're on macOS
|
// Can't compile for iOS unless we're on macOS
|
||||||
if target.contains("apple-ios") &&
|
if target.contains("apple-ios") &&
|
||||||
!build.config.build.contains("apple-darwin") {
|
!build.build.contains("apple-darwin") {
|
||||||
panic!("the iOS target is only supported on macOS");
|
panic!("the iOS target is only supported on macOS");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -206,18 +194,6 @@ $ pacman -R cmake && pacman -S mingw-w64-x86_64-cmake
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for host in build.flags.host.iter() {
|
|
||||||
if !build.config.host.contains(host) {
|
|
||||||
panic!("specified host `{}` is not in the ./configure list", host);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for target in build.flags.target.iter() {
|
|
||||||
if !build.config.target.contains(target) {
|
|
||||||
panic!("specified target `{}` is not in the ./configure list",
|
|
||||||
target);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let run = |cmd: &mut Command| {
|
let run = |cmd: &mut Command| {
|
||||||
cmd.output().map(|output| {
|
cmd.output().map(|output| {
|
||||||
String::from_utf8_lossy(&output.stdout)
|
String::from_utf8_lossy(&output.stdout)
|
||||||
|
@ -231,6 +207,6 @@ $ pacman -R cmake && pacman -S mingw-w64-x86_64-cmake
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(ref s) = build.config.ccache {
|
if let Some(ref s) = build.config.ccache {
|
||||||
need_cmd(s.as_ref());
|
cmd_finder.must_have(s);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -104,10 +104,10 @@ pub fn build_rules<'a>(build: &'a Build) -> Rules {
|
||||||
rules.build("llvm", "src/llvm")
|
rules.build("llvm", "src/llvm")
|
||||||
.host(true)
|
.host(true)
|
||||||
.dep(move |s| {
|
.dep(move |s| {
|
||||||
if s.target == build.config.build {
|
if s.target == build.build {
|
||||||
Step::noop()
|
Step::noop()
|
||||||
} else {
|
} else {
|
||||||
s.target(&build.config.build)
|
s.target(&build.build)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.run(move |s| native::llvm(build, s.target));
|
.run(move |s| native::llvm(build, s.target));
|
||||||
|
@ -124,7 +124,7 @@ pub fn build_rules<'a>(build: &'a Build) -> Rules {
|
||||||
Step::noop()
|
Step::noop()
|
||||||
} else {
|
} else {
|
||||||
s.name("librustc")
|
s.name("librustc")
|
||||||
.host(&build.config.build)
|
.host(&build.build)
|
||||||
.stage(s.stage - 1)
|
.stage(s.stage - 1)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
@ -148,7 +148,7 @@ pub fn build_rules<'a>(build: &'a Build) -> Rules {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return ret
|
ret
|
||||||
};
|
};
|
||||||
|
|
||||||
// ========================================================================
|
// ========================================================================
|
||||||
|
@ -215,29 +215,29 @@ pub fn build_rules<'a>(build: &'a Build) -> Rules {
|
||||||
let mut rule = rules.build(&krate, "path/to/nowhere");
|
let mut rule = rules.build(&krate, "path/to/nowhere");
|
||||||
rule.dep(move |s| {
|
rule.dep(move |s| {
|
||||||
if build.force_use_stage1(&s.compiler(), s.target) {
|
if build.force_use_stage1(&s.compiler(), s.target) {
|
||||||
s.host(&build.config.build).stage(1)
|
s.host(&build.build).stage(1)
|
||||||
} else if s.host == build.config.build {
|
} else if s.host == build.build {
|
||||||
s.name(dep)
|
s.name(dep)
|
||||||
} else {
|
} else {
|
||||||
s.host(&build.config.build)
|
s.host(&build.build)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.run(move |s| {
|
.run(move |s| {
|
||||||
if build.force_use_stage1(&s.compiler(), s.target) {
|
if build.force_use_stage1(&s.compiler(), s.target) {
|
||||||
link(build,
|
link(build,
|
||||||
&s.stage(1).host(&build.config.build).compiler(),
|
&s.stage(1).host(&build.build).compiler(),
|
||||||
&s.compiler(),
|
&s.compiler(),
|
||||||
s.target)
|
s.target)
|
||||||
} else if s.host == build.config.build {
|
} else if s.host == build.build {
|
||||||
link(build, &s.compiler(), &s.compiler(), s.target)
|
link(build, &s.compiler(), &s.compiler(), s.target)
|
||||||
} else {
|
} else {
|
||||||
link(build,
|
link(build,
|
||||||
&s.host(&build.config.build).compiler(),
|
&s.host(&build.build).compiler(),
|
||||||
&s.compiler(),
|
&s.compiler(),
|
||||||
s.target)
|
s.target)
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
return rule
|
rule
|
||||||
}
|
}
|
||||||
|
|
||||||
// Similar to the `libstd`, `libtest`, and `librustc` rules above, except
|
// Similar to the `libstd`, `libtest`, and `librustc` rules above, except
|
||||||
|
@ -269,7 +269,7 @@ pub fn build_rules<'a>(build: &'a Build) -> Rules {
|
||||||
for (krate, path, _default) in krates("std") {
|
for (krate, path, _default) in krates("std") {
|
||||||
rules.build(&krate.build_step, path)
|
rules.build(&krate.build_step, path)
|
||||||
.dep(|s| s.name("startup-objects"))
|
.dep(|s| s.name("startup-objects"))
|
||||||
.dep(move |s| s.name("rustc").host(&build.config.build).target(s.host))
|
.dep(move |s| s.name("rustc").host(&build.build).target(s.host))
|
||||||
.run(move |s| compile::std(build, s.target, &s.compiler()));
|
.run(move |s| compile::std(build, s.target, &s.compiler()));
|
||||||
}
|
}
|
||||||
for (krate, path, _default) in krates("test") {
|
for (krate, path, _default) in krates("test") {
|
||||||
|
@ -280,7 +280,7 @@ pub fn build_rules<'a>(build: &'a Build) -> Rules {
|
||||||
for (krate, path, _default) in krates("rustc-main") {
|
for (krate, path, _default) in krates("rustc-main") {
|
||||||
rules.build(&krate.build_step, path)
|
rules.build(&krate.build_step, path)
|
||||||
.dep(|s| s.name("libtest-link"))
|
.dep(|s| s.name("libtest-link"))
|
||||||
.dep(move |s| s.name("llvm").host(&build.config.build).stage(0))
|
.dep(move |s| s.name("llvm").host(&build.build).stage(0))
|
||||||
.dep(|s| s.name("may-run-build-script"))
|
.dep(|s| s.name("may-run-build-script"))
|
||||||
.run(move |s| compile::rustc(build, s.target, &s.compiler()));
|
.run(move |s| compile::rustc(build, s.target, &s.compiler()));
|
||||||
}
|
}
|
||||||
|
@ -291,8 +291,8 @@ pub fn build_rules<'a>(build: &'a Build) -> Rules {
|
||||||
rules.build("may-run-build-script", "path/to/nowhere")
|
rules.build("may-run-build-script", "path/to/nowhere")
|
||||||
.dep(move |s| {
|
.dep(move |s| {
|
||||||
s.name("libstd-link")
|
s.name("libstd-link")
|
||||||
.host(&build.config.build)
|
.host(&build.build)
|
||||||
.target(&build.config.build)
|
.target(&build.build)
|
||||||
});
|
});
|
||||||
rules.build("startup-objects", "src/rtstartup")
|
rules.build("startup-objects", "src/rtstartup")
|
||||||
.dep(|s| s.name("create-sysroot").target(s.host))
|
.dep(|s| s.name("create-sysroot").target(s.host))
|
||||||
|
@ -332,7 +332,7 @@ pub fn build_rules<'a>(build: &'a Build) -> Rules {
|
||||||
"incremental");
|
"incremental");
|
||||||
}
|
}
|
||||||
|
|
||||||
if build.config.build.contains("msvc") {
|
if build.build.contains("msvc") {
|
||||||
// nothing to do for debuginfo tests
|
// nothing to do for debuginfo tests
|
||||||
} else {
|
} else {
|
||||||
rules.test("check-debuginfo-lldb", "src/test/debuginfo-lldb")
|
rules.test("check-debuginfo-lldb", "src/test/debuginfo-lldb")
|
||||||
|
@ -352,7 +352,7 @@ pub fn build_rules<'a>(build: &'a Build) -> Rules {
|
||||||
"debuginfo-gdb", "debuginfo"));
|
"debuginfo-gdb", "debuginfo"));
|
||||||
let mut rule = rules.test("check-debuginfo", "src/test/debuginfo");
|
let mut rule = rules.test("check-debuginfo", "src/test/debuginfo");
|
||||||
rule.default(true);
|
rule.default(true);
|
||||||
if build.config.build.contains("apple") {
|
if build.build.contains("apple") {
|
||||||
rule.dep(|s| s.name("check-debuginfo-lldb"));
|
rule.dep(|s| s.name("check-debuginfo-lldb"));
|
||||||
} else {
|
} else {
|
||||||
rule.dep(|s| s.name("check-debuginfo-gdb"));
|
rule.dep(|s| s.name("check-debuginfo-gdb"));
|
||||||
|
@ -594,8 +594,8 @@ pub fn build_rules<'a>(build: &'a Build) -> Rules {
|
||||||
// Cargo depends on procedural macros, which requires a full host
|
// Cargo depends on procedural macros, which requires a full host
|
||||||
// compiler to be available, so we need to depend on that.
|
// compiler to be available, so we need to depend on that.
|
||||||
s.name("librustc-link")
|
s.name("librustc-link")
|
||||||
.target(&build.config.build)
|
.target(&build.build)
|
||||||
.host(&build.config.build)
|
.host(&build.build)
|
||||||
})
|
})
|
||||||
.run(move |s| compile::tool(build, s.stage, s.target, "cargo"));
|
.run(move |s| compile::tool(build, s.stage, s.target, "cargo"));
|
||||||
rules.build("tool-rls", "src/tools/rls")
|
rules.build("tool-rls", "src/tools/rls")
|
||||||
|
@ -606,8 +606,8 @@ pub fn build_rules<'a>(build: &'a Build) -> Rules {
|
||||||
.dep(move |s| {
|
.dep(move |s| {
|
||||||
// rls, like cargo, uses procedural macros
|
// rls, like cargo, uses procedural macros
|
||||||
s.name("librustc-link")
|
s.name("librustc-link")
|
||||||
.target(&build.config.build)
|
.target(&build.build)
|
||||||
.host(&build.config.build)
|
.host(&build.build)
|
||||||
})
|
})
|
||||||
.run(move |s| compile::tool(build, s.stage, s.target, "rls"));
|
.run(move |s| compile::tool(build, s.stage, s.target, "rls"));
|
||||||
|
|
||||||
|
@ -635,8 +635,8 @@ pub fn build_rules<'a>(build: &'a Build) -> Rules {
|
||||||
rules.doc("doc-book", "src/doc/book")
|
rules.doc("doc-book", "src/doc/book")
|
||||||
.dep(move |s| {
|
.dep(move |s| {
|
||||||
s.name("tool-rustbook")
|
s.name("tool-rustbook")
|
||||||
.host(&build.config.build)
|
.host(&build.build)
|
||||||
.target(&build.config.build)
|
.target(&build.build)
|
||||||
.stage(0)
|
.stage(0)
|
||||||
})
|
})
|
||||||
.default(build.config.docs)
|
.default(build.config.docs)
|
||||||
|
@ -644,8 +644,8 @@ pub fn build_rules<'a>(build: &'a Build) -> Rules {
|
||||||
rules.doc("doc-nomicon", "src/doc/nomicon")
|
rules.doc("doc-nomicon", "src/doc/nomicon")
|
||||||
.dep(move |s| {
|
.dep(move |s| {
|
||||||
s.name("tool-rustbook")
|
s.name("tool-rustbook")
|
||||||
.host(&build.config.build)
|
.host(&build.build)
|
||||||
.target(&build.config.build)
|
.target(&build.build)
|
||||||
.stage(0)
|
.stage(0)
|
||||||
})
|
})
|
||||||
.default(build.config.docs)
|
.default(build.config.docs)
|
||||||
|
@ -653,8 +653,8 @@ pub fn build_rules<'a>(build: &'a Build) -> Rules {
|
||||||
rules.doc("doc-reference", "src/doc/reference")
|
rules.doc("doc-reference", "src/doc/reference")
|
||||||
.dep(move |s| {
|
.dep(move |s| {
|
||||||
s.name("tool-rustbook")
|
s.name("tool-rustbook")
|
||||||
.host(&build.config.build)
|
.host(&build.build)
|
||||||
.target(&build.config.build)
|
.target(&build.build)
|
||||||
.stage(0)
|
.stage(0)
|
||||||
})
|
})
|
||||||
.default(build.config.docs)
|
.default(build.config.docs)
|
||||||
|
@ -662,8 +662,8 @@ pub fn build_rules<'a>(build: &'a Build) -> Rules {
|
||||||
rules.doc("doc-unstable-book", "src/doc/unstable-book")
|
rules.doc("doc-unstable-book", "src/doc/unstable-book")
|
||||||
.dep(move |s| {
|
.dep(move |s| {
|
||||||
s.name("tool-rustbook")
|
s.name("tool-rustbook")
|
||||||
.host(&build.config.build)
|
.host(&build.build)
|
||||||
.target(&build.config.build)
|
.target(&build.build)
|
||||||
.stage(0)
|
.stage(0)
|
||||||
})
|
})
|
||||||
.dep(move |s| s.name("doc-unstable-book-gen"))
|
.dep(move |s| s.name("doc-unstable-book-gen"))
|
||||||
|
@ -675,14 +675,14 @@ pub fn build_rules<'a>(build: &'a Build) -> Rules {
|
||||||
rules.doc("doc-standalone", "src/doc")
|
rules.doc("doc-standalone", "src/doc")
|
||||||
.dep(move |s| {
|
.dep(move |s| {
|
||||||
s.name("rustc")
|
s.name("rustc")
|
||||||
.host(&build.config.build)
|
.host(&build.build)
|
||||||
.target(&build.config.build)
|
.target(&build.build)
|
||||||
.stage(0)
|
.stage(0)
|
||||||
})
|
})
|
||||||
.default(build.config.docs)
|
.default(build.config.docs)
|
||||||
.run(move |s| doc::standalone(build, s.target));
|
.run(move |s| doc::standalone(build, s.target));
|
||||||
rules.doc("doc-error-index", "src/tools/error_index_generator")
|
rules.doc("doc-error-index", "src/tools/error_index_generator")
|
||||||
.dep(move |s| s.name("tool-error-index").target(&build.config.build).stage(0))
|
.dep(move |s| s.name("tool-error-index").target(&build.build).stage(0))
|
||||||
.dep(move |s| s.name("librustc-link"))
|
.dep(move |s| s.name("librustc-link"))
|
||||||
.default(build.config.docs)
|
.default(build.config.docs)
|
||||||
.host(true)
|
.host(true)
|
||||||
|
@ -690,8 +690,8 @@ pub fn build_rules<'a>(build: &'a Build) -> Rules {
|
||||||
rules.doc("doc-unstable-book-gen", "src/tools/unstable-book-gen")
|
rules.doc("doc-unstable-book-gen", "src/tools/unstable-book-gen")
|
||||||
.dep(move |s| {
|
.dep(move |s| {
|
||||||
s.name("tool-unstable-book-gen")
|
s.name("tool-unstable-book-gen")
|
||||||
.host(&build.config.build)
|
.host(&build.build)
|
||||||
.target(&build.config.build)
|
.target(&build.build)
|
||||||
.stage(0)
|
.stage(0)
|
||||||
})
|
})
|
||||||
.dep(move |s| s.name("libstd-link"))
|
.dep(move |s| s.name("libstd-link"))
|
||||||
|
@ -725,7 +725,7 @@ pub fn build_rules<'a>(build: &'a Build) -> Rules {
|
||||||
// ========================================================================
|
// ========================================================================
|
||||||
// Distribution targets
|
// Distribution targets
|
||||||
rules.dist("dist-rustc", "src/librustc")
|
rules.dist("dist-rustc", "src/librustc")
|
||||||
.dep(move |s| s.name("rustc").host(&build.config.build))
|
.dep(move |s| s.name("rustc").host(&build.build))
|
||||||
.host(true)
|
.host(true)
|
||||||
.only_host_build(true)
|
.only_host_build(true)
|
||||||
.default(true)
|
.default(true)
|
||||||
|
@ -811,7 +811,7 @@ pub fn build_rules<'a>(build: &'a Build) -> Rules {
|
||||||
.host(true)
|
.host(true)
|
||||||
.only_build(true)
|
.only_build(true)
|
||||||
.only_host_build(true)
|
.only_host_build(true)
|
||||||
.dep(move |s| s.name("tool-build-manifest").target(&build.config.build).stage(0))
|
.dep(move |s| s.name("tool-build-manifest").target(&build.build).stage(0))
|
||||||
.run(move |_| dist::hash_and_sign(build));
|
.run(move |_| dist::hash_and_sign(build));
|
||||||
|
|
||||||
rules.install("install-docs", "src/doc")
|
rules.install("install-docs", "src/doc")
|
||||||
|
@ -861,8 +861,8 @@ pub fn build_rules<'a>(build: &'a Build) -> Rules {
|
||||||
/// Helper to depend on a stage0 build-only rust-installer tool.
|
/// Helper to depend on a stage0 build-only rust-installer tool.
|
||||||
fn tool_rust_installer<'a>(build: &'a Build, step: &Step<'a>) -> Step<'a> {
|
fn tool_rust_installer<'a>(build: &'a Build, step: &Step<'a>) -> Step<'a> {
|
||||||
step.name("tool-rust-installer")
|
step.name("tool-rust-installer")
|
||||||
.host(&build.config.build)
|
.host(&build.build)
|
||||||
.target(&build.config.build)
|
.target(&build.build)
|
||||||
.stage(0)
|
.stage(0)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1058,8 +1058,8 @@ impl<'a> Rules<'a> {
|
||||||
build: build,
|
build: build,
|
||||||
sbuild: Step {
|
sbuild: Step {
|
||||||
stage: build.flags.stage.unwrap_or(2),
|
stage: build.flags.stage.unwrap_or(2),
|
||||||
target: &build.config.build,
|
target: &build.build,
|
||||||
host: &build.config.build,
|
host: &build.build,
|
||||||
name: "",
|
name: "",
|
||||||
},
|
},
|
||||||
rules: BTreeMap::new(),
|
rules: BTreeMap::new(),
|
||||||
|
@ -1218,16 +1218,9 @@ invalid rule dependency graph detected, was a rule added and maybe typo'd?
|
||||||
|
|
||||||
rules.into_iter().flat_map(|(rule, _)| {
|
rules.into_iter().flat_map(|(rule, _)| {
|
||||||
let hosts = if rule.only_host_build || rule.only_build {
|
let hosts = if rule.only_host_build || rule.only_build {
|
||||||
&self.build.config.host[..1]
|
self.build.build_slice()
|
||||||
} else if self.build.flags.host.len() > 0 {
|
|
||||||
&self.build.flags.host
|
|
||||||
} else {
|
} else {
|
||||||
&self.build.config.host
|
&self.build.hosts
|
||||||
};
|
|
||||||
let targets = if self.build.flags.target.len() > 0 {
|
|
||||||
&self.build.flags.target
|
|
||||||
} else {
|
|
||||||
&self.build.config.target
|
|
||||||
};
|
};
|
||||||
// Determine the actual targets participating in this rule.
|
// Determine the actual targets participating in this rule.
|
||||||
// NOTE: We should keep the full projection from build triple to
|
// NOTE: We should keep the full projection from build triple to
|
||||||
|
@ -1236,19 +1229,18 @@ invalid rule dependency graph detected, was a rule added and maybe typo'd?
|
||||||
// the original non-shadowed hosts array is used below.
|
// the original non-shadowed hosts array is used below.
|
||||||
let arr = if rule.host {
|
let arr = if rule.host {
|
||||||
// If --target was specified but --host wasn't specified,
|
// If --target was specified but --host wasn't specified,
|
||||||
// don't run any host-only tests. Also, respect any `--host`
|
// don't run any host-only tests.
|
||||||
// overrides as done for `hosts`.
|
|
||||||
if self.build.flags.host.len() > 0 {
|
if self.build.flags.host.len() > 0 {
|
||||||
&self.build.flags.host[..]
|
&self.build.hosts
|
||||||
} else if self.build.flags.target.len() > 0 {
|
} else if self.build.flags.target.len() > 0 {
|
||||||
&[]
|
&[]
|
||||||
} else if rule.only_build {
|
} else if rule.only_build {
|
||||||
&self.build.config.host[..1]
|
self.build.build_slice()
|
||||||
} else {
|
} else {
|
||||||
&self.build.config.host[..]
|
&self.build.hosts
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
targets
|
&self.build.targets
|
||||||
};
|
};
|
||||||
|
|
||||||
hosts.iter().flat_map(move |host| {
|
hosts.iter().flat_map(move |host| {
|
||||||
|
@ -1326,7 +1318,7 @@ invalid rule dependency graph detected, was a rule added and maybe typo'd?
|
||||||
for idx in 0..nodes.len() {
|
for idx in 0..nodes.len() {
|
||||||
self.topo_sort(idx, &idx_to_node, &edges, &mut visited, &mut order);
|
self.topo_sort(idx, &idx_to_node, &edges, &mut visited, &mut order);
|
||||||
}
|
}
|
||||||
return order
|
order
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Builds the dependency graph rooted at `step`.
|
/// Builds the dependency graph rooted at `step`.
|
||||||
|
@ -1365,7 +1357,7 @@ invalid rule dependency graph detected, was a rule added and maybe typo'd?
|
||||||
}
|
}
|
||||||
|
|
||||||
edges.entry(idx).or_insert(HashSet::new()).extend(deps);
|
edges.entry(idx).or_insert(HashSet::new()).extend(deps);
|
||||||
return idx
|
idx
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Given a dependency graph with a finished list of `nodes`, fill out more
|
/// Given a dependency graph with a finished list of `nodes`, fill out more
|
||||||
|
@ -1494,8 +1486,8 @@ mod tests {
|
||||||
let step = super::Step {
|
let step = super::Step {
|
||||||
name: "",
|
name: "",
|
||||||
stage: 2,
|
stage: 2,
|
||||||
host: &build.config.build,
|
host: &build.build,
|
||||||
target: &build.config.build,
|
target: &build.build,
|
||||||
};
|
};
|
||||||
|
|
||||||
assert!(plan.contains(&step.name("dist-docs")));
|
assert!(plan.contains(&step.name("dist-docs")));
|
||||||
|
@ -1517,8 +1509,8 @@ mod tests {
|
||||||
let step = super::Step {
|
let step = super::Step {
|
||||||
name: "",
|
name: "",
|
||||||
stage: 2,
|
stage: 2,
|
||||||
host: &build.config.build,
|
host: &build.build,
|
||||||
target: &build.config.build,
|
target: &build.build,
|
||||||
};
|
};
|
||||||
|
|
||||||
assert!(plan.contains(&step.name("dist-docs")));
|
assert!(plan.contains(&step.name("dist-docs")));
|
||||||
|
@ -1545,8 +1537,8 @@ mod tests {
|
||||||
let step = super::Step {
|
let step = super::Step {
|
||||||
name: "",
|
name: "",
|
||||||
stage: 2,
|
stage: 2,
|
||||||
host: &build.config.build,
|
host: &build.build,
|
||||||
target: &build.config.build,
|
target: &build.build,
|
||||||
};
|
};
|
||||||
|
|
||||||
assert!(!plan.iter().any(|s| s.host == "B"));
|
assert!(!plan.iter().any(|s| s.host == "B"));
|
||||||
|
@ -1575,8 +1567,8 @@ mod tests {
|
||||||
let step = super::Step {
|
let step = super::Step {
|
||||||
name: "",
|
name: "",
|
||||||
stage: 2,
|
stage: 2,
|
||||||
host: &build.config.build,
|
host: &build.build,
|
||||||
target: &build.config.build,
|
target: &build.build,
|
||||||
};
|
};
|
||||||
|
|
||||||
assert!(!plan.iter().any(|s| s.host == "B"));
|
assert!(!plan.iter().any(|s| s.host == "B"));
|
||||||
|
@ -1612,8 +1604,8 @@ mod tests {
|
||||||
let step = super::Step {
|
let step = super::Step {
|
||||||
name: "",
|
name: "",
|
||||||
stage: 2,
|
stage: 2,
|
||||||
host: &build.config.build,
|
host: &build.build,
|
||||||
target: &build.config.build,
|
target: &build.build,
|
||||||
};
|
};
|
||||||
|
|
||||||
assert!(!plan.iter().any(|s| s.target == "A"));
|
assert!(!plan.iter().any(|s| s.target == "A"));
|
||||||
|
@ -1639,8 +1631,8 @@ mod tests {
|
||||||
let step = super::Step {
|
let step = super::Step {
|
||||||
name: "",
|
name: "",
|
||||||
stage: 2,
|
stage: 2,
|
||||||
host: &build.config.build,
|
host: &build.build,
|
||||||
target: &build.config.build,
|
target: &build.build,
|
||||||
};
|
};
|
||||||
|
|
||||||
assert!(!plan.iter().any(|s| s.target == "A"));
|
assert!(!plan.iter().any(|s| s.target == "A"));
|
||||||
|
@ -1683,8 +1675,8 @@ mod tests {
|
||||||
let step = super::Step {
|
let step = super::Step {
|
||||||
name: "",
|
name: "",
|
||||||
stage: 2,
|
stage: 2,
|
||||||
host: &build.config.build,
|
host: &build.build,
|
||||||
target: &build.config.build,
|
target: &build.build,
|
||||||
};
|
};
|
||||||
|
|
||||||
// rustc built for all for of (A, B) x (A, B)
|
// rustc built for all for of (A, B) x (A, B)
|
||||||
|
|
|
@ -14,7 +14,6 @@
|
||||||
//! not a lot of interesting happenings here unfortunately.
|
//! not a lot of interesting happenings here unfortunately.
|
||||||
|
|
||||||
use std::env;
|
use std::env;
|
||||||
use std::ffi::OsString;
|
|
||||||
use std::fs;
|
use std::fs;
|
||||||
use std::io::{self, Write};
|
use std::io::{self, Write};
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
|
@ -32,16 +31,9 @@ pub fn staticlib(name: &str, target: &str) -> String {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Copies a file from `src` to `dst`, attempting to use hard links and then
|
/// Copies a file from `src` to `dst`
|
||||||
/// falling back to an actually filesystem copy if necessary.
|
|
||||||
pub fn copy(src: &Path, dst: &Path) {
|
pub fn copy(src: &Path, dst: &Path) {
|
||||||
// A call to `hard_link` will fail if `dst` exists, so remove it if it
|
|
||||||
// already exists so we can try to help `hard_link` succeed.
|
|
||||||
let _ = fs::remove_file(&dst);
|
let _ = fs::remove_file(&dst);
|
||||||
|
|
||||||
// Attempt to "easy copy" by creating a hard link (symlinks don't work on
|
|
||||||
// windows), but if that fails just fall back to a slow `copy` operation.
|
|
||||||
// let res = fs::hard_link(src, dst);
|
|
||||||
let res = fs::copy(src, dst);
|
let res = fs::copy(src, dst);
|
||||||
if let Err(e) = res {
|
if let Err(e) = res {
|
||||||
panic!("failed to copy `{}` to `{}`: {}", src.display(),
|
panic!("failed to copy `{}` to `{}`: {}", src.display(),
|
||||||
|
@ -149,8 +141,7 @@ pub fn dylib_path_var() -> &'static str {
|
||||||
/// Parses the `dylib_path_var()` environment variable, returning a list of
|
/// Parses the `dylib_path_var()` environment variable, returning a list of
|
||||||
/// paths that are members of this lookup path.
|
/// paths that are members of this lookup path.
|
||||||
pub fn dylib_path() -> Vec<PathBuf> {
|
pub fn dylib_path() -> Vec<PathBuf> {
|
||||||
env::split_paths(&env::var_os(dylib_path_var()).unwrap_or(OsString::new()))
|
env::split_paths(&env::var_os(dylib_path_var()).unwrap_or_default()).collect()
|
||||||
.collect()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// `push` all components to `buf`. On windows, append `.exe` to the last component.
|
/// `push` all components to `buf`. On windows, append `.exe` to the last component.
|
||||||
|
@ -422,4 +413,4 @@ impl CiEnv {
|
||||||
cmd.env("TERM", "xterm").args(&["--color", "always"]);
|
cmd.env("TERM", "xterm").args(&["--color", "always"]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
The tracking issue for this feature is: [#40872]
|
The tracking issue for this feature is: [#40872]
|
||||||
|
|
||||||
[#29599]: https://github.com/rust-lang/rust/issues/40872
|
[#40872]: https://github.com/rust-lang/rust/issues/40872
|
||||||
|
|
||||||
------------------------
|
------------------------
|
||||||
|
|
||||||
|
|
|
@ -143,7 +143,8 @@ pub extern fn rust_eh_unwind_resume() {
|
||||||
#[no_mangle]
|
#[no_mangle]
|
||||||
pub extern fn rust_begin_panic(_msg: core::fmt::Arguments,
|
pub extern fn rust_begin_panic(_msg: core::fmt::Arguments,
|
||||||
_file: &'static str,
|
_file: &'static str,
|
||||||
_line: u32) -> ! {
|
_line: u32,
|
||||||
|
_column: u32) -> ! {
|
||||||
unsafe { intrinsics::abort() }
|
unsafe { intrinsics::abort() }
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
@ -187,7 +188,8 @@ pub extern fn rust_eh_unwind_resume() {
|
||||||
#[no_mangle]
|
#[no_mangle]
|
||||||
pub extern fn rust_begin_panic(_msg: core::fmt::Arguments,
|
pub extern fn rust_begin_panic(_msg: core::fmt::Arguments,
|
||||||
_file: &'static str,
|
_file: &'static str,
|
||||||
_line: u32) -> ! {
|
_line: u32,
|
||||||
|
_column: u32) -> ! {
|
||||||
unsafe { intrinsics::abort() }
|
unsafe { intrinsics::abort() }
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
|
@ -0,0 +1,27 @@
|
||||||
|
# `unsized_tuple_coercion`
|
||||||
|
|
||||||
|
The tracking issue for this feature is: [#42877]
|
||||||
|
|
||||||
|
[#42877]: https://github.com/rust-lang/rust/issues/42877
|
||||||
|
|
||||||
|
------------------------
|
||||||
|
|
||||||
|
This is a part of [RFC0401]. According to the RFC, there should be an implementation like this:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
impl<..., T, U: ?Sized> Unsized<(..., U)> for (..., T) where T: Unsized<U> {}
|
||||||
|
```
|
||||||
|
|
||||||
|
This implementation is currently gated behind `#[feature(unsized_tuple_coercion)]` to avoid insta-stability. Therefore you can use it like this:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
#![feature(unsized_tuple_coercion)]
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
let x : ([i32; 3], [i32; 3]) = ([1, 2, 3], [4, 5, 6]);
|
||||||
|
let y : &([i32; 3], [i32]) = &x;
|
||||||
|
assert_eq!(y.1[0], 4);
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
[RFC0401]: https://github.com/rust-lang/rfcs/blob/master/text/0401-coercions.md
|
|
@ -0,0 +1,17 @@
|
||||||
|
# `iterator_for_each`
|
||||||
|
|
||||||
|
The tracking issue for this feature is: [#42986]
|
||||||
|
|
||||||
|
[#42986]: https://github.com/rust-lang/rust/issues/42986
|
||||||
|
|
||||||
|
------------------------
|
||||||
|
|
||||||
|
To call a closure on each element of an iterator, you can use `for_each`:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
#![feature(iterator_for_each)]
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
(0..10).for_each(|i| println!("{}", i));
|
||||||
|
}
|
||||||
|
```
|
|
@ -1,11 +0,0 @@
|
||||||
# `more_io_inner_methods`
|
|
||||||
|
|
||||||
The tracking issue for this feature is: [#41519]
|
|
||||||
|
|
||||||
[#41519]: https://github.com/rust-lang/rust/issues/41519
|
|
||||||
|
|
||||||
------------------------
|
|
||||||
|
|
||||||
This feature enables several internal accessor methods on structures in
|
|
||||||
`std::io` including `Take::{get_ref, get_mut}` and `Chain::{into_inner, get_ref,
|
|
||||||
get_mut}`.
|
|
|
@ -1,40 +0,0 @@
|
||||||
# `sort_unstable`
|
|
||||||
|
|
||||||
The tracking issue for this feature is: [#40585]
|
|
||||||
|
|
||||||
[#40585]: https://github.com/rust-lang/rust/issues/40585
|
|
||||||
|
|
||||||
------------------------
|
|
||||||
|
|
||||||
The default `sort` method on slices is stable. In other words, it guarantees
|
|
||||||
that the original order of equal elements is preserved after sorting. The
|
|
||||||
method has several undesirable characteristics:
|
|
||||||
|
|
||||||
1. It allocates a sizable chunk of memory.
|
|
||||||
2. If you don't need stability, it is not as performant as it could be.
|
|
||||||
|
|
||||||
An alternative is the new `sort_unstable` feature, which includes these
|
|
||||||
methods for sorting slices:
|
|
||||||
|
|
||||||
1. `sort_unstable`
|
|
||||||
2. `sort_unstable_by`
|
|
||||||
3. `sort_unstable_by_key`
|
|
||||||
|
|
||||||
Unstable sorting is generally faster and makes no allocations. The majority
|
|
||||||
of real-world sorting needs doesn't require stability, so these methods can
|
|
||||||
very often come in handy.
|
|
||||||
|
|
||||||
Another important difference is that `sort` lives in `libstd` and
|
|
||||||
`sort_unstable` lives in `libcore`. The reason is that the former makes
|
|
||||||
allocations and the latter doesn't.
|
|
||||||
|
|
||||||
A simple example:
|
|
||||||
|
|
||||||
```rust
|
|
||||||
#![feature(sort_unstable)]
|
|
||||||
|
|
||||||
let mut v = [-5, 4, 1, -3, 2];
|
|
||||||
|
|
||||||
v.sort_unstable();
|
|
||||||
assert!(v == [-5, -3, 1, 2, 4]);
|
|
||||||
```
|
|
|
@ -873,7 +873,7 @@ pub unsafe trait Alloc {
|
||||||
{
|
{
|
||||||
let k = Layout::new::<T>();
|
let k = Layout::new::<T>();
|
||||||
if k.size() > 0 {
|
if k.size() > 0 {
|
||||||
unsafe { self.alloc(k).map(|p|Unique::new(*p as *mut T)) }
|
unsafe { self.alloc(k).map(|p| Unique::new(p as *mut T)) }
|
||||||
} else {
|
} else {
|
||||||
Err(AllocErr::invalid_input("zero-sized type invalid for alloc_one"))
|
Err(AllocErr::invalid_input("zero-sized type invalid for alloc_one"))
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,9 +14,9 @@
|
||||||
#![feature(rand)]
|
#![feature(rand)]
|
||||||
#![feature(repr_simd)]
|
#![feature(repr_simd)]
|
||||||
#![feature(slice_rotate)]
|
#![feature(slice_rotate)]
|
||||||
#![feature(sort_unstable)]
|
|
||||||
#![feature(test)]
|
#![feature(test)]
|
||||||
|
|
||||||
|
extern crate rand;
|
||||||
extern crate test;
|
extern crate test;
|
||||||
|
|
||||||
mod btree;
|
mod btree;
|
||||||
|
|
|
@ -8,9 +8,11 @@
|
||||||
// option. This file may not be copied, modified, or distributed
|
// option. This file may not be copied, modified, or distributed
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
use std::{mem, ptr};
|
use std::__rand::{thread_rng};
|
||||||
use std::__rand::{Rng, thread_rng};
|
use std::mem;
|
||||||
|
use std::ptr;
|
||||||
|
|
||||||
|
use rand::{Rng, SeedableRng, XorShiftRng};
|
||||||
use test::{Bencher, black_box};
|
use test::{Bencher, black_box};
|
||||||
|
|
||||||
#[bench]
|
#[bench]
|
||||||
|
@ -191,17 +193,17 @@ fn gen_descending(len: usize) -> Vec<u64> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn gen_random(len: usize) -> Vec<u64> {
|
fn gen_random(len: usize) -> Vec<u64> {
|
||||||
let mut rng = thread_rng();
|
let mut rng = XorShiftRng::from_seed([0, 1, 2, 3]);
|
||||||
rng.gen_iter::<u64>().take(len).collect()
|
rng.gen_iter::<u64>().take(len).collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn gen_random_bytes(len: usize) -> Vec<u8> {
|
fn gen_random_bytes(len: usize) -> Vec<u8> {
|
||||||
let mut rng = thread_rng();
|
let mut rng = XorShiftRng::from_seed([0, 1, 2, 3]);
|
||||||
rng.gen_iter::<u8>().take(len).collect()
|
rng.gen_iter::<u8>().take(len).collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn gen_mostly_ascending(len: usize) -> Vec<u64> {
|
fn gen_mostly_ascending(len: usize) -> Vec<u64> {
|
||||||
let mut rng = thread_rng();
|
let mut rng = XorShiftRng::from_seed([0, 1, 2, 3]);
|
||||||
let mut v = gen_ascending(len);
|
let mut v = gen_ascending(len);
|
||||||
for _ in (0usize..).take_while(|x| x * x <= len) {
|
for _ in (0usize..).take_while(|x| x * x <= len) {
|
||||||
let x = rng.gen::<usize>() % len;
|
let x = rng.gen::<usize>() % len;
|
||||||
|
@ -212,7 +214,7 @@ fn gen_mostly_ascending(len: usize) -> Vec<u64> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn gen_mostly_descending(len: usize) -> Vec<u64> {
|
fn gen_mostly_descending(len: usize) -> Vec<u64> {
|
||||||
let mut rng = thread_rng();
|
let mut rng = XorShiftRng::from_seed([0, 1, 2, 3]);
|
||||||
let mut v = gen_descending(len);
|
let mut v = gen_descending(len);
|
||||||
for _ in (0usize..).take_while(|x| x * x <= len) {
|
for _ in (0usize..).take_while(|x| x * x <= len) {
|
||||||
let x = rng.gen::<usize>() % len;
|
let x = rng.gen::<usize>() % len;
|
||||||
|
@ -223,7 +225,7 @@ fn gen_mostly_descending(len: usize) -> Vec<u64> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn gen_strings(len: usize) -> Vec<String> {
|
fn gen_strings(len: usize) -> Vec<String> {
|
||||||
let mut rng = thread_rng();
|
let mut rng = XorShiftRng::from_seed([0, 1, 2, 3]);
|
||||||
let mut v = vec![];
|
let mut v = vec![];
|
||||||
for _ in 0..len {
|
for _ in 0..len {
|
||||||
let n = rng.gen::<usize>() % 20 + 1;
|
let n = rng.gen::<usize>() % 20 + 1;
|
||||||
|
@ -233,7 +235,7 @@ fn gen_strings(len: usize) -> Vec<String> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn gen_big_random(len: usize) -> Vec<[u64; 16]> {
|
fn gen_big_random(len: usize) -> Vec<[u64; 16]> {
|
||||||
let mut rng = thread_rng();
|
let mut rng = XorShiftRng::from_seed([0, 1, 2, 3]);
|
||||||
rng.gen_iter().map(|x| [x; 16]).take(len).collect()
|
rng.gen_iter().map(|x| [x; 16]).take(len).collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -241,18 +243,32 @@ macro_rules! sort {
|
||||||
($f:ident, $name:ident, $gen:expr, $len:expr) => {
|
($f:ident, $name:ident, $gen:expr, $len:expr) => {
|
||||||
#[bench]
|
#[bench]
|
||||||
fn $name(b: &mut Bencher) {
|
fn $name(b: &mut Bencher) {
|
||||||
b.iter(|| $gen($len).$f());
|
let v = $gen($len);
|
||||||
|
b.iter(|| v.clone().$f());
|
||||||
b.bytes = $len * mem::size_of_val(&$gen(1)[0]) as u64;
|
b.bytes = $len * mem::size_of_val(&$gen(1)[0]) as u64;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
macro_rules! sort_strings {
|
||||||
|
($f:ident, $name:ident, $gen:expr, $len:expr) => {
|
||||||
|
#[bench]
|
||||||
|
fn $name(b: &mut Bencher) {
|
||||||
|
let v = $gen($len);
|
||||||
|
let v = v.iter().map(|s| &**s).collect::<Vec<&str>>();
|
||||||
|
b.iter(|| v.clone().$f());
|
||||||
|
b.bytes = $len * mem::size_of::<&str>() as u64;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
macro_rules! sort_expensive {
|
macro_rules! sort_expensive {
|
||||||
($f:ident, $name:ident, $gen:expr, $len:expr) => {
|
($f:ident, $name:ident, $gen:expr, $len:expr) => {
|
||||||
#[bench]
|
#[bench]
|
||||||
fn $name(b: &mut Bencher) {
|
fn $name(b: &mut Bencher) {
|
||||||
|
let v = $gen($len);
|
||||||
b.iter(|| {
|
b.iter(|| {
|
||||||
let mut v = $gen($len);
|
let mut v = v.clone();
|
||||||
let mut count = 0;
|
let mut count = 0;
|
||||||
v.$f(|a: &u64, b: &u64| {
|
v.$f(|a: &u64, b: &u64| {
|
||||||
count += 1;
|
count += 1;
|
||||||
|
@ -263,7 +279,7 @@ macro_rules! sort_expensive {
|
||||||
});
|
});
|
||||||
black_box(count);
|
black_box(count);
|
||||||
});
|
});
|
||||||
b.bytes = $len as u64 * mem::size_of::<u64>() as u64;
|
b.bytes = $len * mem::size_of_val(&$gen(1)[0]) as u64;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -271,30 +287,30 @@ macro_rules! sort_expensive {
|
||||||
sort!(sort, sort_small_ascending, gen_ascending, 10);
|
sort!(sort, sort_small_ascending, gen_ascending, 10);
|
||||||
sort!(sort, sort_small_descending, gen_descending, 10);
|
sort!(sort, sort_small_descending, gen_descending, 10);
|
||||||
sort!(sort, sort_small_random, gen_random, 10);
|
sort!(sort, sort_small_random, gen_random, 10);
|
||||||
sort!(sort, sort_small_big_random, gen_big_random, 10);
|
sort!(sort, sort_small_big, gen_big_random, 10);
|
||||||
sort!(sort, sort_medium_random, gen_random, 100);
|
sort!(sort, sort_medium_random, gen_random, 100);
|
||||||
sort!(sort, sort_large_ascending, gen_ascending, 10000);
|
sort!(sort, sort_large_ascending, gen_ascending, 10000);
|
||||||
sort!(sort, sort_large_descending, gen_descending, 10000);
|
sort!(sort, sort_large_descending, gen_descending, 10000);
|
||||||
sort!(sort, sort_large_mostly_ascending, gen_mostly_ascending, 10000);
|
sort!(sort, sort_large_mostly_ascending, gen_mostly_ascending, 10000);
|
||||||
sort!(sort, sort_large_mostly_descending, gen_mostly_descending, 10000);
|
sort!(sort, sort_large_mostly_descending, gen_mostly_descending, 10000);
|
||||||
sort!(sort, sort_large_random, gen_random, 10000);
|
sort!(sort, sort_large_random, gen_random, 10000);
|
||||||
sort!(sort, sort_large_big_random, gen_big_random, 10000);
|
sort!(sort, sort_large_big, gen_big_random, 10000);
|
||||||
sort!(sort, sort_large_strings, gen_strings, 10000);
|
sort_strings!(sort, sort_large_strings, gen_strings, 10000);
|
||||||
sort_expensive!(sort_by, sort_large_random_expensive, gen_random, 10000);
|
sort_expensive!(sort_by, sort_large_expensive, gen_random, 10000);
|
||||||
|
|
||||||
sort!(sort_unstable, sort_unstable_small_ascending, gen_ascending, 10);
|
sort!(sort_unstable, sort_unstable_small_ascending, gen_ascending, 10);
|
||||||
sort!(sort_unstable, sort_unstable_small_descending, gen_descending, 10);
|
sort!(sort_unstable, sort_unstable_small_descending, gen_descending, 10);
|
||||||
sort!(sort_unstable, sort_unstable_small_random, gen_random, 10);
|
sort!(sort_unstable, sort_unstable_small_random, gen_random, 10);
|
||||||
sort!(sort_unstable, sort_unstable_small_big_random, gen_big_random, 10);
|
sort!(sort_unstable, sort_unstable_small_big, gen_big_random, 10);
|
||||||
sort!(sort_unstable, sort_unstable_medium_random, gen_random, 100);
|
sort!(sort_unstable, sort_unstable_medium_random, gen_random, 100);
|
||||||
sort!(sort_unstable, sort_unstable_large_ascending, gen_ascending, 10000);
|
sort!(sort_unstable, sort_unstable_large_ascending, gen_ascending, 10000);
|
||||||
sort!(sort_unstable, sort_unstable_large_descending, gen_descending, 10000);
|
sort!(sort_unstable, sort_unstable_large_descending, gen_descending, 10000);
|
||||||
sort!(sort_unstable, sort_unstable_large_mostly_ascending, gen_mostly_ascending, 10000);
|
sort!(sort_unstable, sort_unstable_large_mostly_ascending, gen_mostly_ascending, 10000);
|
||||||
sort!(sort_unstable, sort_unstable_large_mostly_descending, gen_mostly_descending, 10000);
|
sort!(sort_unstable, sort_unstable_large_mostly_descending, gen_mostly_descending, 10000);
|
||||||
sort!(sort_unstable, sort_unstable_large_random, gen_random, 10000);
|
sort!(sort_unstable, sort_unstable_large_random, gen_random, 10000);
|
||||||
sort!(sort_unstable, sort_unstable_large_big_random, gen_big_random, 10000);
|
sort!(sort_unstable, sort_unstable_large_big, gen_big_random, 10000);
|
||||||
sort!(sort_unstable, sort_unstable_large_strings, gen_strings, 10000);
|
sort_strings!(sort_unstable, sort_unstable_large_strings, gen_strings, 10000);
|
||||||
sort_expensive!(sort_unstable_by, sort_unstable_large_random_expensive, gen_random, 10000);
|
sort_expensive!(sort_unstable_by, sort_unstable_large_expensive, gen_random, 10000);
|
||||||
|
|
||||||
macro_rules! reverse {
|
macro_rules! reverse {
|
||||||
($name:ident, $ty:ty, $f:expr) => {
|
($name:ident, $ty:ty, $f:expr) => {
|
||||||
|
|
|
@ -498,12 +498,10 @@ pub use core::fmt::{DebugList, DebugMap, DebugSet, DebugStruct, DebugTuple};
|
||||||
|
|
||||||
use string;
|
use string;
|
||||||
|
|
||||||
/// The format function takes a precompiled format string and a list of
|
/// The `format` function takes an `Arguments` struct and returns the resulting
|
||||||
/// arguments, to return the resulting formatted string.
|
/// formatted string.
|
||||||
///
|
///
|
||||||
/// # Arguments
|
/// The `Arguments` instance can be created with the `format_args!` macro.
|
||||||
///
|
|
||||||
/// * args - a structure of arguments generated via the `format_args!` macro.
|
|
||||||
///
|
///
|
||||||
/// # Examples
|
/// # Examples
|
||||||
///
|
///
|
||||||
|
|
|
@ -83,7 +83,6 @@
|
||||||
#![cfg_attr(not(test), feature(core_float))]
|
#![cfg_attr(not(test), feature(core_float))]
|
||||||
#![cfg_attr(not(test), feature(exact_size_is_empty))]
|
#![cfg_attr(not(test), feature(exact_size_is_empty))]
|
||||||
#![cfg_attr(not(test), feature(slice_rotate))]
|
#![cfg_attr(not(test), feature(slice_rotate))]
|
||||||
#![cfg_attr(not(test), feature(sort_unstable))]
|
|
||||||
#![cfg_attr(not(test), feature(str_checked_slicing))]
|
#![cfg_attr(not(test), feature(str_checked_slicing))]
|
||||||
#![cfg_attr(test, feature(rand, test))]
|
#![cfg_attr(test, feature(rand, test))]
|
||||||
#![feature(allocator)]
|
#![feature(allocator)]
|
||||||
|
|
|
@ -1144,6 +1144,10 @@ impl<T> [T] {
|
||||||
///
|
///
|
||||||
/// This sort is stable (i.e. does not reorder equal elements) and `O(n log n)` worst-case.
|
/// This sort is stable (i.e. does not reorder equal elements) and `O(n log n)` worst-case.
|
||||||
///
|
///
|
||||||
|
/// When applicable, unstable sorting is preferred because it is generally faster than stable
|
||||||
|
/// sorting and it doesn't allocate auxiliary memory.
|
||||||
|
/// See [`sort_unstable`](#method.sort_unstable).
|
||||||
|
///
|
||||||
/// # Current implementation
|
/// # Current implementation
|
||||||
///
|
///
|
||||||
/// The current algorithm is an adaptive, iterative merge sort inspired by
|
/// The current algorithm is an adaptive, iterative merge sort inspired by
|
||||||
|
@ -1174,6 +1178,10 @@ impl<T> [T] {
|
||||||
///
|
///
|
||||||
/// This sort is stable (i.e. does not reorder equal elements) and `O(n log n)` worst-case.
|
/// This sort is stable (i.e. does not reorder equal elements) and `O(n log n)` worst-case.
|
||||||
///
|
///
|
||||||
|
/// When applicable, unstable sorting is preferred because it is generally faster than stable
|
||||||
|
/// sorting and it doesn't allocate auxiliary memory.
|
||||||
|
/// See [`sort_unstable_by`](#method.sort_unstable_by).
|
||||||
|
///
|
||||||
/// # Current implementation
|
/// # Current implementation
|
||||||
///
|
///
|
||||||
/// The current algorithm is an adaptive, iterative merge sort inspired by
|
/// The current algorithm is an adaptive, iterative merge sort inspired by
|
||||||
|
@ -1207,6 +1215,10 @@ impl<T> [T] {
|
||||||
///
|
///
|
||||||
/// This sort is stable (i.e. does not reorder equal elements) and `O(n log n)` worst-case.
|
/// This sort is stable (i.e. does not reorder equal elements) and `O(n log n)` worst-case.
|
||||||
///
|
///
|
||||||
|
/// When applicable, unstable sorting is preferred because it is generally faster than stable
|
||||||
|
/// sorting and it doesn't allocate auxiliary memory.
|
||||||
|
/// See [`sort_unstable_by_key`](#method.sort_unstable_by_key).
|
||||||
|
///
|
||||||
/// # Current implementation
|
/// # Current implementation
|
||||||
///
|
///
|
||||||
/// The current algorithm is an adaptive, iterative merge sort inspired by
|
/// The current algorithm is an adaptive, iterative merge sort inspired by
|
||||||
|
@ -1251,8 +1263,6 @@ impl<T> [T] {
|
||||||
/// # Examples
|
/// # Examples
|
||||||
///
|
///
|
||||||
/// ```
|
/// ```
|
||||||
/// #![feature(sort_unstable)]
|
|
||||||
///
|
|
||||||
/// let mut v = [-5, 4, 1, -3, 2];
|
/// let mut v = [-5, 4, 1, -3, 2];
|
||||||
///
|
///
|
||||||
/// v.sort_unstable();
|
/// v.sort_unstable();
|
||||||
|
@ -1260,8 +1270,7 @@ impl<T> [T] {
|
||||||
/// ```
|
/// ```
|
||||||
///
|
///
|
||||||
/// [pdqsort]: https://github.com/orlp/pdqsort
|
/// [pdqsort]: https://github.com/orlp/pdqsort
|
||||||
// FIXME #40585: Mention `sort_unstable` in the documentation for `sort`.
|
#[stable(feature = "sort_unstable", since = "1.20.0")]
|
||||||
#[unstable(feature = "sort_unstable", issue = "40585")]
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn sort_unstable(&mut self)
|
pub fn sort_unstable(&mut self)
|
||||||
where T: Ord
|
where T: Ord
|
||||||
|
@ -1288,8 +1297,6 @@ impl<T> [T] {
|
||||||
/// # Examples
|
/// # Examples
|
||||||
///
|
///
|
||||||
/// ```
|
/// ```
|
||||||
/// #![feature(sort_unstable)]
|
|
||||||
///
|
|
||||||
/// let mut v = [5, 4, 1, 3, 2];
|
/// let mut v = [5, 4, 1, 3, 2];
|
||||||
/// v.sort_unstable_by(|a, b| a.cmp(b));
|
/// v.sort_unstable_by(|a, b| a.cmp(b));
|
||||||
/// assert!(v == [1, 2, 3, 4, 5]);
|
/// assert!(v == [1, 2, 3, 4, 5]);
|
||||||
|
@ -1300,8 +1307,7 @@ impl<T> [T] {
|
||||||
/// ```
|
/// ```
|
||||||
///
|
///
|
||||||
/// [pdqsort]: https://github.com/orlp/pdqsort
|
/// [pdqsort]: https://github.com/orlp/pdqsort
|
||||||
// FIXME #40585: Mention `sort_unstable_by` in the documentation for `sort_by`.
|
#[stable(feature = "sort_unstable", since = "1.20.0")]
|
||||||
#[unstable(feature = "sort_unstable", issue = "40585")]
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn sort_unstable_by<F>(&mut self, compare: F)
|
pub fn sort_unstable_by<F>(&mut self, compare: F)
|
||||||
where F: FnMut(&T, &T) -> Ordering
|
where F: FnMut(&T, &T) -> Ordering
|
||||||
|
@ -1328,8 +1334,6 @@ impl<T> [T] {
|
||||||
/// # Examples
|
/// # Examples
|
||||||
///
|
///
|
||||||
/// ```
|
/// ```
|
||||||
/// #![feature(sort_unstable)]
|
|
||||||
///
|
|
||||||
/// let mut v = [-5i32, 4, 1, -3, 2];
|
/// let mut v = [-5i32, 4, 1, -3, 2];
|
||||||
///
|
///
|
||||||
/// v.sort_unstable_by_key(|k| k.abs());
|
/// v.sort_unstable_by_key(|k| k.abs());
|
||||||
|
@ -1337,8 +1341,7 @@ impl<T> [T] {
|
||||||
/// ```
|
/// ```
|
||||||
///
|
///
|
||||||
/// [pdqsort]: https://github.com/orlp/pdqsort
|
/// [pdqsort]: https://github.com/orlp/pdqsort
|
||||||
// FIXME #40585: Mention `sort_unstable_by_key` in the documentation for `sort_by_key`.
|
#[stable(feature = "sort_unstable", since = "1.20.0")]
|
||||||
#[unstable(feature = "sort_unstable", issue = "40585")]
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn sort_unstable_by_key<B, F>(&mut self, f: F)
|
pub fn sort_unstable_by_key<B, F>(&mut self, f: F)
|
||||||
where F: FnMut(&T) -> B,
|
where F: FnMut(&T) -> B,
|
||||||
|
@ -1794,7 +1797,7 @@ unsafe fn merge<T, F>(v: &mut [T], mid: usize, buf: *mut T, is_less: &mut F)
|
||||||
|
|
||||||
impl<T> Drop for MergeHole<T> {
|
impl<T> Drop for MergeHole<T> {
|
||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
// `T` is not a zero-sized type, so it's okay to divide by it's size.
|
// `T` is not a zero-sized type, so it's okay to divide by its size.
|
||||||
let len = (self.end as usize - self.start as usize) / mem::size_of::<T>();
|
let len = (self.end as usize - self.start as usize) / mem::size_of::<T>();
|
||||||
unsafe { ptr::copy_nonoverlapping(self.start, self.dest, len); }
|
unsafe { ptr::copy_nonoverlapping(self.start, self.dest, len); }
|
||||||
}
|
}
|
||||||
|
@ -1908,7 +1911,7 @@ fn merge_sort<T, F>(v: &mut [T], mut is_less: F)
|
||||||
// if `Some(r)` is returned, that means `runs[r]` and `runs[r + 1]` must be merged next. If the
|
// if `Some(r)` is returned, that means `runs[r]` and `runs[r + 1]` must be merged next. If the
|
||||||
// algorithm should continue building a new run instead, `None` is returned.
|
// algorithm should continue building a new run instead, `None` is returned.
|
||||||
//
|
//
|
||||||
// TimSort is infamous for it's buggy implementations, as described here:
|
// TimSort is infamous for its buggy implementations, as described here:
|
||||||
// http://envisage-project.eu/timsort-specification-and-verification/
|
// http://envisage-project.eu/timsort-specification-and-verification/
|
||||||
//
|
//
|
||||||
// The gist of the story is: we must enforce the invariants on the top four runs on the stack.
|
// The gist of the story is: we must enforce the invariants on the top four runs on the stack.
|
||||||
|
|
|
@ -2008,10 +2008,10 @@ impl From<Box<str>> for String {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[stable(feature = "box_from_str", since = "1.18.0")]
|
#[stable(feature = "box_from_str", since = "1.20.0")]
|
||||||
impl Into<Box<str>> for String {
|
impl From<String> for Box<str> {
|
||||||
fn into(self) -> Box<str> {
|
fn from(s: String) -> Box<str> {
|
||||||
self.into_boxed_str()
|
s.into_boxed_str()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -396,18 +396,44 @@ fn test_sort() {
|
||||||
let mut rng = thread_rng();
|
let mut rng = thread_rng();
|
||||||
|
|
||||||
for len in (2..25).chain(500..510) {
|
for len in (2..25).chain(500..510) {
|
||||||
for _ in 0..100 {
|
for &modulus in &[5, 10, 100, 1000] {
|
||||||
let mut v: Vec<_> = rng.gen_iter::<i32>().take(len).collect();
|
for _ in 0..10 {
|
||||||
let mut v1 = v.clone();
|
let orig: Vec<_> = rng.gen_iter::<i32>()
|
||||||
|
.map(|x| x % modulus)
|
||||||
|
.take(len)
|
||||||
|
.collect();
|
||||||
|
|
||||||
v.sort();
|
// Sort in default order.
|
||||||
assert!(v.windows(2).all(|w| w[0] <= w[1]));
|
let mut v = orig.clone();
|
||||||
|
v.sort();
|
||||||
|
assert!(v.windows(2).all(|w| w[0] <= w[1]));
|
||||||
|
|
||||||
v1.sort_by(|a, b| a.cmp(b));
|
// Sort in ascending order.
|
||||||
assert!(v1.windows(2).all(|w| w[0] <= w[1]));
|
let mut v = orig.clone();
|
||||||
|
v.sort_by(|a, b| a.cmp(b));
|
||||||
|
assert!(v.windows(2).all(|w| w[0] <= w[1]));
|
||||||
|
|
||||||
v1.sort_by(|a, b| b.cmp(a));
|
// Sort in descending order.
|
||||||
assert!(v1.windows(2).all(|w| w[0] >= w[1]));
|
let mut v = orig.clone();
|
||||||
|
v.sort_by(|a, b| b.cmp(a));
|
||||||
|
assert!(v.windows(2).all(|w| w[0] >= w[1]));
|
||||||
|
|
||||||
|
// Sort with many pre-sorted runs.
|
||||||
|
let mut v = orig.clone();
|
||||||
|
v.sort();
|
||||||
|
v.reverse();
|
||||||
|
for _ in 0..5 {
|
||||||
|
let a = rng.gen::<usize>() % len;
|
||||||
|
let b = rng.gen::<usize>() % len;
|
||||||
|
if a < b {
|
||||||
|
v[a..b].reverse();
|
||||||
|
} else {
|
||||||
|
v.swap(a, b);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
v.sort();
|
||||||
|
assert!(v.windows(2).all(|w| w[0] <= w[1]));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -274,6 +274,11 @@ fn test_dedup_by() {
|
||||||
vec.dedup_by(|a, b| a.eq_ignore_ascii_case(b));
|
vec.dedup_by(|a, b| a.eq_ignore_ascii_case(b));
|
||||||
|
|
||||||
assert_eq!(vec, ["foo", "bar", "baz", "bar"]);
|
assert_eq!(vec, ["foo", "bar", "baz", "bar"]);
|
||||||
|
|
||||||
|
let mut vec = vec![("foo", 1), ("foo", 2), ("bar", 3), ("bar", 4), ("bar", 5)];
|
||||||
|
vec.dedup_by(|a, b| a.0 == b.0 && { b.1 += a.1; true });
|
||||||
|
|
||||||
|
assert_eq!(vec, [("foo", 3), ("bar", 12)]);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
|
@ -510,8 +510,7 @@ fn test_from_iter() {
|
||||||
let u: Vec<_> = deq.iter().cloned().collect();
|
let u: Vec<_> = deq.iter().cloned().collect();
|
||||||
assert_eq!(u, v);
|
assert_eq!(u, v);
|
||||||
|
|
||||||
// FIXME #27741: Remove `.skip(0)` when Range::step_by is fully removed
|
let seq = (0..).step_by(2).take(256);
|
||||||
let seq = (0..).skip(0).step_by(2).take(256);
|
|
||||||
let deq: VecDeque<_> = seq.collect();
|
let deq: VecDeque<_> = seq.collect();
|
||||||
for (i, &x) in deq.iter().enumerate() {
|
for (i, &x) in deq.iter().enumerate() {
|
||||||
assert_eq!(2 * i, x);
|
assert_eq!(2 * i, x);
|
||||||
|
|
|
@ -222,7 +222,7 @@ use Bound::{Excluded, Included, Unbounded};
|
||||||
/// on an empty Vec, it will not allocate memory. Similarly, if you store zero-sized
|
/// on an empty Vec, it will not allocate memory. Similarly, if you store zero-sized
|
||||||
/// types inside a `Vec`, it will not allocate space for them. *Note that in this case
|
/// types inside a `Vec`, it will not allocate space for them. *Note that in this case
|
||||||
/// the `Vec` may not report a [`capacity`] of 0*. `Vec` will allocate if and only
|
/// the `Vec` may not report a [`capacity`] of 0*. `Vec` will allocate if and only
|
||||||
/// if [`mem::size_of::<T>`]` * capacity() > 0`. In general, `Vec`'s allocation
|
/// if [`mem::size_of::<T>`]`() * capacity() > 0`. In general, `Vec`'s allocation
|
||||||
/// details are subtle enough that it is strongly recommended that you only
|
/// details are subtle enough that it is strongly recommended that you only
|
||||||
/// free memory allocated by a `Vec` by creating a new `Vec` and dropping it.
|
/// free memory allocated by a `Vec` by creating a new `Vec` and dropping it.
|
||||||
///
|
///
|
||||||
|
@ -823,7 +823,8 @@ impl<T> Vec<T> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Removes consecutive elements in the vector that resolve to the same key.
|
/// Removes all but the first of consecutive elements in the vector that resolve to the same
|
||||||
|
/// key.
|
||||||
///
|
///
|
||||||
/// If the vector is sorted, this removes all duplicates.
|
/// If the vector is sorted, this removes all duplicates.
|
||||||
///
|
///
|
||||||
|
@ -842,11 +843,13 @@ impl<T> Vec<T> {
|
||||||
self.dedup_by(|a, b| key(a) == key(b))
|
self.dedup_by(|a, b| key(a) == key(b))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Removes consecutive elements in the vector according to a predicate.
|
/// Removes all but the first of consecutive elements in the vector satisfying a given equality
|
||||||
|
/// relation.
|
||||||
///
|
///
|
||||||
/// The `same_bucket` function is passed references to two elements from the vector, and
|
/// The `same_bucket` function is passed references to two elements from the vector, and
|
||||||
/// returns `true` if the elements compare equal, or `false` if they do not. Only the first
|
/// returns `true` if the elements compare equal, or `false` if they do not. The elements are
|
||||||
/// of adjacent equal items is kept.
|
/// passed in opposite order from their order in the vector, so if `same_bucket(a, b)` returns
|
||||||
|
/// `true`, `a` is removed.
|
||||||
///
|
///
|
||||||
/// If the vector is sorted, this removes all duplicates.
|
/// If the vector is sorted, this removes all duplicates.
|
||||||
///
|
///
|
||||||
|
|
|
@ -137,8 +137,6 @@ fn main() {
|
||||||
cmd.arg("--enable-debug");
|
cmd.arg("--enable-debug");
|
||||||
}
|
}
|
||||||
|
|
||||||
// Turn off broken quarantine (see jemalloc/jemalloc#161)
|
|
||||||
cmd.arg("--disable-fill");
|
|
||||||
cmd.arg(format!("--host={}", build_helper::gnu_target(&target)));
|
cmd.arg(format!("--host={}", build_helper::gnu_target(&target)));
|
||||||
cmd.arg(format!("--build={}", build_helper::gnu_target(&host)));
|
cmd.arg(format!("--build={}", build_helper::gnu_target(&host)));
|
||||||
|
|
||||||
|
|
|
@ -99,3 +99,50 @@ fn bench_zip_add(b: &mut Bencher) {
|
||||||
add_zip(&source, &mut dst)
|
add_zip(&source, &mut dst)
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// `Iterator::for_each` implemented as a plain loop.
|
||||||
|
fn for_each_loop<I, F>(iter: I, mut f: F) where
|
||||||
|
I: Iterator, F: FnMut(I::Item)
|
||||||
|
{
|
||||||
|
for item in iter {
|
||||||
|
f(item);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// `Iterator::for_each` implemented with `fold` for internal iteration.
|
||||||
|
/// (except when `by_ref()` effectively disables that optimization.)
|
||||||
|
fn for_each_fold<I, F>(iter: I, mut f: F) where
|
||||||
|
I: Iterator, F: FnMut(I::Item)
|
||||||
|
{
|
||||||
|
iter.fold((), move |(), item| f(item));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[bench]
|
||||||
|
fn bench_for_each_chain_loop(b: &mut Bencher) {
|
||||||
|
b.iter(|| {
|
||||||
|
let mut acc = 0;
|
||||||
|
let iter = (0i64..1000000).chain(0..1000000).map(black_box);
|
||||||
|
for_each_loop(iter, |x| acc += x);
|
||||||
|
acc
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[bench]
|
||||||
|
fn bench_for_each_chain_fold(b: &mut Bencher) {
|
||||||
|
b.iter(|| {
|
||||||
|
let mut acc = 0;
|
||||||
|
let iter = (0i64..1000000).chain(0..1000000).map(black_box);
|
||||||
|
for_each_fold(iter, |x| acc += x);
|
||||||
|
acc
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[bench]
|
||||||
|
fn bench_for_each_chain_ref_fold(b: &mut Bencher) {
|
||||||
|
b.iter(|| {
|
||||||
|
let mut acc = 0;
|
||||||
|
let mut iter = (0i64..1000000).chain(0..1000000).map(black_box);
|
||||||
|
for_each_fold(iter.by_ref(), |x| acc += x);
|
||||||
|
acc
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
|
@ -335,7 +335,6 @@ impl Ordering {
|
||||||
/// Example usage:
|
/// Example usage:
|
||||||
///
|
///
|
||||||
/// ```
|
/// ```
|
||||||
/// #![feature(reverse_cmp_key)]
|
|
||||||
/// use std::cmp::Reverse;
|
/// use std::cmp::Reverse;
|
||||||
///
|
///
|
||||||
/// let mut v = vec![1, 2, 3, 4, 5, 6];
|
/// let mut v = vec![1, 2, 3, 4, 5, 6];
|
||||||
|
@ -343,10 +342,10 @@ impl Ordering {
|
||||||
/// assert_eq!(v, vec![3, 2, 1, 6, 5, 4]);
|
/// assert_eq!(v, vec![3, 2, 1, 6, 5, 4]);
|
||||||
/// ```
|
/// ```
|
||||||
#[derive(PartialEq, Eq, Debug)]
|
#[derive(PartialEq, Eq, Debug)]
|
||||||
#[unstable(feature = "reverse_cmp_key", issue = "40893")]
|
#[stable(feature = "reverse_cmp_key", since = "1.19.0")]
|
||||||
pub struct Reverse<T>(pub T);
|
pub struct Reverse<T>(#[stable(feature = "reverse_cmp_key", since = "1.19.0")] pub T);
|
||||||
|
|
||||||
#[unstable(feature = "reverse_cmp_key", issue = "40893")]
|
#[stable(feature = "reverse_cmp_key", since = "1.19.0")]
|
||||||
impl<T: PartialOrd> PartialOrd for Reverse<T> {
|
impl<T: PartialOrd> PartialOrd for Reverse<T> {
|
||||||
#[inline]
|
#[inline]
|
||||||
fn partial_cmp(&self, other: &Reverse<T>) -> Option<Ordering> {
|
fn partial_cmp(&self, other: &Reverse<T>) -> Option<Ordering> {
|
||||||
|
@ -363,7 +362,7 @@ impl<T: PartialOrd> PartialOrd for Reverse<T> {
|
||||||
fn gt(&self, other: &Self) -> bool { other.0 > self.0 }
|
fn gt(&self, other: &Self) -> bool { other.0 > self.0 }
|
||||||
}
|
}
|
||||||
|
|
||||||
#[unstable(feature = "reverse_cmp_key", issue = "40893")]
|
#[stable(feature = "reverse_cmp_key", since = "1.19.0")]
|
||||||
impl<T: Ord> Ord for Reverse<T> {
|
impl<T: Ord> Ord for Reverse<T> {
|
||||||
#[inline]
|
#[inline]
|
||||||
fn cmp(&self, other: &Reverse<T>) -> Ordering {
|
fn cmp(&self, other: &Reverse<T>) -> Ordering {
|
||||||
|
@ -380,8 +379,9 @@ impl<T: Ord> Ord for Reverse<T> {
|
||||||
///
|
///
|
||||||
/// ## Derivable
|
/// ## Derivable
|
||||||
///
|
///
|
||||||
/// This trait can be used with `#[derive]`. When `derive`d, it will produce a lexicographic
|
/// This trait can be used with `#[derive]`. When `derive`d on structs, it will produce a
|
||||||
/// ordering based on the top-to-bottom declaration order of the struct's members.
|
/// lexicographic ordering based on the top-to-bottom declaration order of the struct's members.
|
||||||
|
/// When `derive`d on enums, variants are ordered by their top-to-bottom declaration order.
|
||||||
///
|
///
|
||||||
/// ## How can I implement `Ord`?
|
/// ## How can I implement `Ord`?
|
||||||
///
|
///
|
||||||
|
@ -513,8 +513,9 @@ impl PartialOrd for Ordering {
|
||||||
///
|
///
|
||||||
/// ## Derivable
|
/// ## Derivable
|
||||||
///
|
///
|
||||||
/// This trait can be used with `#[derive]`. When `derive`d, it will produce a lexicographic
|
/// This trait can be used with `#[derive]`. When `derive`d on structs, it will produce a
|
||||||
/// ordering based on the top-to-bottom declaration order of the struct's members.
|
/// lexicographic ordering based on the top-to-bottom declaration order of the struct's members.
|
||||||
|
/// When `derive`d on enums, variants are ordered by their top-to-bottom declaration order.
|
||||||
///
|
///
|
||||||
/// ## How can I implement `PartialOrd`?
|
/// ## How can I implement `PartialOrd`?
|
||||||
///
|
///
|
||||||
|
|
|
@ -49,9 +49,37 @@ impl<'a, 'b: 'a> fmt::Write for PadAdapter<'a, 'b> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A struct to help with `fmt::Debug` implementations.
|
/// A struct to help with [`fmt::Debug`](trait.Debug.html) implementations.
|
||||||
///
|
///
|
||||||
/// Constructed by the `Formatter::debug_struct` method.
|
/// This is useful when you wish to output a formatted struct as a part of your
|
||||||
|
/// [`Debug::fmt`](trait.Debug.html#tymethod.fmt) implementation.
|
||||||
|
///
|
||||||
|
/// This can be constructed by the
|
||||||
|
/// [`Formatter::debug_struct`](struct.Formatter.html#method.debug_struct)
|
||||||
|
/// method.
|
||||||
|
///
|
||||||
|
/// # Example
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use std::fmt;
|
||||||
|
///
|
||||||
|
/// struct Foo {
|
||||||
|
/// bar: i32,
|
||||||
|
/// baz: String,
|
||||||
|
/// }
|
||||||
|
///
|
||||||
|
/// impl fmt::Debug for Foo {
|
||||||
|
/// fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
/// fmt.debug_struct("Foo")
|
||||||
|
/// .field("bar", &self.bar)
|
||||||
|
/// .field("baz", &self.baz)
|
||||||
|
/// .finish()
|
||||||
|
/// }
|
||||||
|
/// }
|
||||||
|
///
|
||||||
|
/// // prints "Foo { bar: 10, baz: "Hello World" }"
|
||||||
|
/// println!("{:?}", Foo { bar: 10, baz: "Hello World".to_string() });
|
||||||
|
/// ```
|
||||||
#[must_use]
|
#[must_use]
|
||||||
#[allow(missing_debug_implementations)]
|
#[allow(missing_debug_implementations)]
|
||||||
#[stable(feature = "debug_builders", since = "1.2.0")]
|
#[stable(feature = "debug_builders", since = "1.2.0")]
|
||||||
|
@ -116,9 +144,34 @@ impl<'a, 'b: 'a> DebugStruct<'a, 'b> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A struct to help with `fmt::Debug` implementations.
|
/// A struct to help with [`fmt::Debug`](trait.Debug.html) implementations.
|
||||||
///
|
///
|
||||||
/// Constructed by the `Formatter::debug_tuple` method.
|
/// This is useful when you wish to output a formatted tuple as a part of your
|
||||||
|
/// [`Debug::fmt`](trait.Debug.html#tymethod.fmt) implementation.
|
||||||
|
///
|
||||||
|
/// This can be constructed by the
|
||||||
|
/// [`Formatter::debug_tuple`](struct.Formatter.html#method.debug_tuple)
|
||||||
|
/// method.
|
||||||
|
///
|
||||||
|
/// # Example
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use std::fmt;
|
||||||
|
///
|
||||||
|
/// struct Foo(i32, String);
|
||||||
|
///
|
||||||
|
/// impl fmt::Debug for Foo {
|
||||||
|
/// fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
/// fmt.debug_tuple("Foo")
|
||||||
|
/// .field(&self.0)
|
||||||
|
/// .field(&self.1)
|
||||||
|
/// .finish()
|
||||||
|
/// }
|
||||||
|
/// }
|
||||||
|
///
|
||||||
|
/// // prints "Foo(10, "Hello World")"
|
||||||
|
/// println!("{:?}", Foo(10, "Hello World".to_string()));
|
||||||
|
/// ```
|
||||||
#[must_use]
|
#[must_use]
|
||||||
#[allow(missing_debug_implementations)]
|
#[allow(missing_debug_implementations)]
|
||||||
#[stable(feature = "debug_builders", since = "1.2.0")]
|
#[stable(feature = "debug_builders", since = "1.2.0")]
|
||||||
|
@ -228,9 +281,31 @@ impl<'a, 'b: 'a> DebugInner<'a, 'b> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A struct to help with `fmt::Debug` implementations.
|
/// A struct to help with [`fmt::Debug`](trait.Debug.html) implementations.
|
||||||
///
|
///
|
||||||
/// Constructed by the `Formatter::debug_set` method.
|
/// This is useful when you wish to output a formatted set of items as a part
|
||||||
|
/// of your [`Debug::fmt`](trait.Debug.html#tymethod.fmt) implementation.
|
||||||
|
///
|
||||||
|
/// This can be constructed by the
|
||||||
|
/// [`Formatter::debug_set`](struct.Formatter.html#method.debug_set)
|
||||||
|
/// method.
|
||||||
|
///
|
||||||
|
/// # Example
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use std::fmt;
|
||||||
|
///
|
||||||
|
/// struct Foo(Vec<i32>);
|
||||||
|
///
|
||||||
|
/// impl fmt::Debug for Foo {
|
||||||
|
/// fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
/// fmt.debug_set().entries(self.0.iter()).finish()
|
||||||
|
/// }
|
||||||
|
/// }
|
||||||
|
///
|
||||||
|
/// // prints "{10, 11}"
|
||||||
|
/// println!("{:?}", Foo(vec![10, 11]));
|
||||||
|
/// ```
|
||||||
#[must_use]
|
#[must_use]
|
||||||
#[allow(missing_debug_implementations)]
|
#[allow(missing_debug_implementations)]
|
||||||
#[stable(feature = "debug_builders", since = "1.2.0")]
|
#[stable(feature = "debug_builders", since = "1.2.0")]
|
||||||
|
@ -277,9 +352,31 @@ impl<'a, 'b: 'a> DebugSet<'a, 'b> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A struct to help with `fmt::Debug` implementations.
|
/// A struct to help with [`fmt::Debug`](trait.Debug.html) implementations.
|
||||||
///
|
///
|
||||||
/// Constructed by the `Formatter::debug_list` method.
|
/// This is useful when you wish to output a formatted list of items as a part
|
||||||
|
/// of your [`Debug::fmt`](trait.Debug.html#tymethod.fmt) implementation.
|
||||||
|
///
|
||||||
|
/// This can be constructed by the
|
||||||
|
/// [`Formatter::debug_list`](struct.Formatter.html#method.debug_list)
|
||||||
|
/// method.
|
||||||
|
///
|
||||||
|
/// # Example
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use std::fmt;
|
||||||
|
///
|
||||||
|
/// struct Foo(Vec<i32>);
|
||||||
|
///
|
||||||
|
/// impl fmt::Debug for Foo {
|
||||||
|
/// fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
/// fmt.debug_list().entries(self.0.iter()).finish()
|
||||||
|
/// }
|
||||||
|
/// }
|
||||||
|
///
|
||||||
|
/// // prints "[10, 11]"
|
||||||
|
/// println!("{:?}", Foo(vec![10, 11]));
|
||||||
|
/// ```
|
||||||
#[must_use]
|
#[must_use]
|
||||||
#[allow(missing_debug_implementations)]
|
#[allow(missing_debug_implementations)]
|
||||||
#[stable(feature = "debug_builders", since = "1.2.0")]
|
#[stable(feature = "debug_builders", since = "1.2.0")]
|
||||||
|
@ -326,9 +423,31 @@ impl<'a, 'b: 'a> DebugList<'a, 'b> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A struct to help with `fmt::Debug` implementations.
|
/// A struct to help with [`fmt::Debug`](trait.Debug.html) implementations.
|
||||||
///
|
///
|
||||||
/// Constructed by the `Formatter::debug_map` method.
|
/// This is useful when you wish to output a formatted map as a part of your
|
||||||
|
/// [`Debug::fmt`](trait.Debug.html#tymethod.fmt) implementation.
|
||||||
|
///
|
||||||
|
/// This can be constructed by the
|
||||||
|
/// [`Formatter::debug_map`](struct.Formatter.html#method.debug_map)
|
||||||
|
/// method.
|
||||||
|
///
|
||||||
|
/// # Example
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use std::fmt;
|
||||||
|
///
|
||||||
|
/// struct Foo(Vec<(String, i32)>);
|
||||||
|
///
|
||||||
|
/// impl fmt::Debug for Foo {
|
||||||
|
/// fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
/// fmt.debug_map().entries(self.0.iter().map(|&(ref k, ref v)| (k, v))).finish()
|
||||||
|
/// }
|
||||||
|
/// }
|
||||||
|
///
|
||||||
|
/// // prints "{"A": 10, "B": 11}"
|
||||||
|
/// println!("{:?}", Foo(vec![("A".to_string(), 10), ("B".to_string(), 11)]));
|
||||||
|
/// ```
|
||||||
#[must_use]
|
#[must_use]
|
||||||
#[allow(missing_debug_implementations)]
|
#[allow(missing_debug_implementations)]
|
||||||
#[stable(feature = "debug_builders", since = "1.2.0")]
|
#[stable(feature = "debug_builders", since = "1.2.0")]
|
||||||
|
|
|
@ -897,14 +897,11 @@ pub trait UpperExp {
|
||||||
fn fmt(&self, f: &mut Formatter) -> Result;
|
fn fmt(&self, f: &mut Formatter) -> Result;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The `write` function takes an output stream, a precompiled format string,
|
/// The `write` function takes an output stream, and an `Arguments` struct
|
||||||
/// and a list of arguments. The arguments will be formatted according to the
|
/// that can be precompiled with the `format_args!` macro.
|
||||||
/// specified format string into the output stream provided.
|
|
||||||
///
|
///
|
||||||
/// # Arguments
|
/// The arguments will be formatted according to the specified format string
|
||||||
///
|
/// into the output stream provided.
|
||||||
/// * output - the buffer to write output to
|
|
||||||
/// * args - the precompiled arguments generated by `format_args!`
|
|
||||||
///
|
///
|
||||||
/// # Examples
|
/// # Examples
|
||||||
///
|
///
|
||||||
|
|
|
@ -482,6 +482,53 @@ pub trait Iterator {
|
||||||
Map{iter: self, f: f}
|
Map{iter: self, f: f}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Calls a closure on each element of an iterator.
|
||||||
|
///
|
||||||
|
/// This is equivalent to using a [`for`] loop on the iterator, although
|
||||||
|
/// `break` and `continue` are not possible from a closure. It's generally
|
||||||
|
/// more idiomatic to use a `for` loop, but `for_each` may be more legible
|
||||||
|
/// when processing items at the end of longer iterator chains. In some
|
||||||
|
/// cases `for_each` may also be faster than a loop, because it will use
|
||||||
|
/// internal iteration on adaptors like `Chain`.
|
||||||
|
///
|
||||||
|
/// [`for`]: ../../book/first-edition/loops.html#for
|
||||||
|
///
|
||||||
|
/// # Examples
|
||||||
|
///
|
||||||
|
/// Basic usage:
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// #![feature(iterator_for_each)]
|
||||||
|
///
|
||||||
|
/// use std::sync::mpsc::channel;
|
||||||
|
///
|
||||||
|
/// let (tx, rx) = channel();
|
||||||
|
/// (0..5).map(|x| x * 2 + 1)
|
||||||
|
/// .for_each(move |x| tx.send(x).unwrap());
|
||||||
|
///
|
||||||
|
/// let v: Vec<_> = rx.iter().collect();
|
||||||
|
/// assert_eq!(v, vec![1, 3, 5, 7, 9]);
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// For such a small example, a `for` loop may be cleaner, but `for_each`
|
||||||
|
/// might be preferable to keep a functional style with longer iterators:
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// #![feature(iterator_for_each)]
|
||||||
|
///
|
||||||
|
/// (0..5).flat_map(|x| x * 100 .. x * 110)
|
||||||
|
/// .enumerate()
|
||||||
|
/// .filter(|&(i, x)| (i + x) % 3 == 0)
|
||||||
|
/// .for_each(|(i, x)| println!("{}:{}", i, x));
|
||||||
|
/// ```
|
||||||
|
#[inline]
|
||||||
|
#[unstable(feature = "iterator_for_each", issue = "42986")]
|
||||||
|
fn for_each<F>(self, mut f: F) where
|
||||||
|
Self: Sized, F: FnMut(Self::Item),
|
||||||
|
{
|
||||||
|
self.fold((), move |(), item| f(item));
|
||||||
|
}
|
||||||
|
|
||||||
/// Creates an iterator which uses a closure to determine if an element
|
/// Creates an iterator which uses a closure to determine if an element
|
||||||
/// should be yielded.
|
/// should be yielded.
|
||||||
///
|
///
|
||||||
|
|
|
@ -314,12 +314,6 @@ pub use self::iterator::Iterator;
|
||||||
reason = "likely to be replaced by finer-grained traits",
|
reason = "likely to be replaced by finer-grained traits",
|
||||||
issue = "42168")]
|
issue = "42168")]
|
||||||
pub use self::range::Step;
|
pub use self::range::Step;
|
||||||
#[unstable(feature = "step_by", reason = "recent addition",
|
|
||||||
issue = "27741")]
|
|
||||||
#[rustc_deprecated(since = "1.19.0",
|
|
||||||
reason = "replaced by `iter::StepBy`")]
|
|
||||||
#[allow(deprecated)]
|
|
||||||
pub use self::range::StepBy as DeprecatedStepBy;
|
|
||||||
|
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
pub use self::sources::{Repeat, repeat};
|
pub use self::sources::{Repeat, repeat};
|
||||||
|
|
|
@ -244,219 +244,6 @@ step_impl_signed!(i64);
|
||||||
step_impl_no_between!(u64 i64);
|
step_impl_no_between!(u64 i64);
|
||||||
step_impl_no_between!(u128 i128);
|
step_impl_no_between!(u128 i128);
|
||||||
|
|
||||||
/// An adapter for stepping range iterators by a custom amount.
|
|
||||||
///
|
|
||||||
/// The resulting iterator handles overflow by stopping. The `A`
|
|
||||||
/// parameter is the type being iterated over, while `R` is the range
|
|
||||||
/// type (usually one of `std::ops::{Range, RangeFrom, RangeInclusive}`.
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
#[unstable(feature = "step_by", reason = "recent addition",
|
|
||||||
issue = "27741")]
|
|
||||||
#[rustc_deprecated(since = "1.19.0",
|
|
||||||
reason = "replaced by `iter::StepBy`")]
|
|
||||||
#[allow(deprecated)]
|
|
||||||
pub struct StepBy<A, R> {
|
|
||||||
step_by: A,
|
|
||||||
range: R,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<A: Step> ops::RangeFrom<A> {
|
|
||||||
/// Creates an iterator starting at the same point, but stepping by
|
|
||||||
/// the given amount at each iteration.
|
|
||||||
///
|
|
||||||
/// # Examples
|
|
||||||
///
|
|
||||||
/// ```
|
|
||||||
/// #![feature(step_by)]
|
|
||||||
/// fn main() {
|
|
||||||
/// let result: Vec<_> = (0..).step_by(2).take(5).collect();
|
|
||||||
/// assert_eq!(result, vec![0, 2, 4, 6, 8]);
|
|
||||||
/// }
|
|
||||||
/// ```
|
|
||||||
#[unstable(feature = "step_by", reason = "recent addition",
|
|
||||||
issue = "27741")]
|
|
||||||
#[rustc_deprecated(since = "1.19.0",
|
|
||||||
reason = "replaced by `Iterator::step_by`")]
|
|
||||||
#[allow(deprecated)]
|
|
||||||
pub fn step_by(self, by: A) -> StepBy<A, Self> {
|
|
||||||
StepBy {
|
|
||||||
step_by: by,
|
|
||||||
range: self
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<A: Step> ops::Range<A> {
|
|
||||||
/// Creates an iterator with the same range, but stepping by the
|
|
||||||
/// given amount at each iteration.
|
|
||||||
///
|
|
||||||
/// The resulting iterator handles overflow by stopping.
|
|
||||||
///
|
|
||||||
/// # Examples
|
|
||||||
///
|
|
||||||
/// ```
|
|
||||||
/// #![feature(step_by)]
|
|
||||||
/// fn main() {
|
|
||||||
/// let result: Vec<_> = (0..10).step_by(2).collect();
|
|
||||||
/// assert_eq!(result, vec![0, 2, 4, 6, 8]);
|
|
||||||
/// }
|
|
||||||
/// ```
|
|
||||||
#[unstable(feature = "step_by", reason = "recent addition",
|
|
||||||
issue = "27741")]
|
|
||||||
#[rustc_deprecated(since = "1.19.0",
|
|
||||||
reason = "replaced by `Iterator::step_by`")]
|
|
||||||
#[allow(deprecated)]
|
|
||||||
pub fn step_by(self, by: A) -> StepBy<A, Self> {
|
|
||||||
StepBy {
|
|
||||||
step_by: by,
|
|
||||||
range: self
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<A: Step> ops::RangeInclusive<A> {
|
|
||||||
/// Creates an iterator with the same range, but stepping by the
|
|
||||||
/// given amount at each iteration.
|
|
||||||
///
|
|
||||||
/// The resulting iterator handles overflow by stopping.
|
|
||||||
///
|
|
||||||
/// # Examples
|
|
||||||
///
|
|
||||||
/// ```
|
|
||||||
/// #![feature(step_by, inclusive_range_syntax)]
|
|
||||||
///
|
|
||||||
/// let result: Vec<_> = (0...10).step_by(2).collect();
|
|
||||||
/// assert_eq!(result, vec![0, 2, 4, 6, 8, 10]);
|
|
||||||
/// ```
|
|
||||||
#[unstable(feature = "step_by", reason = "recent addition",
|
|
||||||
issue = "27741")]
|
|
||||||
#[rustc_deprecated(since = "1.19.0",
|
|
||||||
reason = "replaced by `Iterator::step_by`")]
|
|
||||||
#[allow(deprecated)]
|
|
||||||
pub fn step_by(self, by: A) -> StepBy<A, Self> {
|
|
||||||
StepBy {
|
|
||||||
step_by: by,
|
|
||||||
range: self
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[unstable(feature = "step_by", reason = "recent addition",
|
|
||||||
issue = "27741")]
|
|
||||||
#[allow(deprecated)]
|
|
||||||
impl<A> Iterator for StepBy<A, ops::RangeFrom<A>> where
|
|
||||||
A: Clone,
|
|
||||||
for<'a> &'a A: Add<&'a A, Output = A>
|
|
||||||
{
|
|
||||||
type Item = A;
|
|
||||||
|
|
||||||
#[inline]
|
|
||||||
fn next(&mut self) -> Option<A> {
|
|
||||||
let mut n = &self.range.start + &self.step_by;
|
|
||||||
mem::swap(&mut n, &mut self.range.start);
|
|
||||||
Some(n)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[inline]
|
|
||||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
|
||||||
(usize::MAX, None) // Too bad we can't specify an infinite lower bound
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[unstable(feature = "fused", issue = "35602")]
|
|
||||||
#[allow(deprecated)]
|
|
||||||
impl<A> FusedIterator for StepBy<A, ops::RangeFrom<A>>
|
|
||||||
where A: Clone, for<'a> &'a A: Add<&'a A, Output = A> {}
|
|
||||||
|
|
||||||
#[unstable(feature = "step_by", reason = "recent addition",
|
|
||||||
issue = "27741")]
|
|
||||||
#[allow(deprecated)]
|
|
||||||
impl<A: Step + Clone> Iterator for StepBy<A, ops::Range<A>> {
|
|
||||||
type Item = A;
|
|
||||||
|
|
||||||
#[inline]
|
|
||||||
fn next(&mut self) -> Option<A> {
|
|
||||||
let rev = self.step_by.is_negative();
|
|
||||||
if (rev && self.range.start > self.range.end) ||
|
|
||||||
(!rev && self.range.start < self.range.end)
|
|
||||||
{
|
|
||||||
match self.range.start.step(&self.step_by) {
|
|
||||||
Some(mut n) => {
|
|
||||||
mem::swap(&mut self.range.start, &mut n);
|
|
||||||
Some(n)
|
|
||||||
},
|
|
||||||
None => {
|
|
||||||
let mut n = self.range.end.clone();
|
|
||||||
mem::swap(&mut self.range.start, &mut n);
|
|
||||||
Some(n)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[inline]
|
|
||||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
|
||||||
match Step::steps_between(&self.range.start,
|
|
||||||
&self.range.end,
|
|
||||||
&self.step_by) {
|
|
||||||
Some(hint) => (hint, Some(hint)),
|
|
||||||
None => (0, None)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[unstable(feature = "fused", issue = "35602")]
|
|
||||||
#[allow(deprecated)]
|
|
||||||
impl<A: Step + Clone> FusedIterator for StepBy<A, ops::Range<A>> {}
|
|
||||||
|
|
||||||
#[unstable(feature = "inclusive_range",
|
|
||||||
reason = "recently added, follows RFC",
|
|
||||||
issue = "28237")]
|
|
||||||
#[allow(deprecated)]
|
|
||||||
impl<A: Step + Clone> Iterator for StepBy<A, ops::RangeInclusive<A>> {
|
|
||||||
type Item = A;
|
|
||||||
|
|
||||||
#[inline]
|
|
||||||
fn next(&mut self) -> Option<A> {
|
|
||||||
let rev = self.step_by.is_negative();
|
|
||||||
|
|
||||||
if (rev && self.range.start >= self.range.end) ||
|
|
||||||
(!rev && self.range.start <= self.range.end)
|
|
||||||
{
|
|
||||||
match self.range.start.step(&self.step_by) {
|
|
||||||
Some(n) => {
|
|
||||||
Some(mem::replace(&mut self.range.start, n))
|
|
||||||
},
|
|
||||||
None => {
|
|
||||||
let last = self.range.start.replace_one();
|
|
||||||
self.range.end.replace_zero();
|
|
||||||
self.step_by.replace_one();
|
|
||||||
Some(last)
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[inline]
|
|
||||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
|
||||||
match Step::steps_between(&self.range.start,
|
|
||||||
&self.range.end,
|
|
||||||
&self.step_by) {
|
|
||||||
Some(hint) => (hint.saturating_add(1), hint.checked_add(1)),
|
|
||||||
None => (0, None)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[unstable(feature = "fused", issue = "35602")]
|
|
||||||
#[allow(deprecated)]
|
|
||||||
impl<A: Step + Clone> FusedIterator for StepBy<A, ops::RangeInclusive<A>> {}
|
|
||||||
|
|
||||||
macro_rules! range_exact_iter_impl {
|
macro_rules! range_exact_iter_impl {
|
||||||
($($t:ty)*) => ($(
|
($($t:ty)*) => ($(
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
|
|
|
@ -36,12 +36,12 @@
|
||||||
//! These functions are often provided by the system libc, but can also be
|
//! These functions are often provided by the system libc, but can also be
|
||||||
//! provided by the [rlibc crate](https://crates.io/crates/rlibc).
|
//! provided by the [rlibc crate](https://crates.io/crates/rlibc).
|
||||||
//!
|
//!
|
||||||
//! * `rust_begin_panic` - This function takes three arguments, a
|
//! * `rust_begin_panic` - This function takes four arguments, a
|
||||||
//! `fmt::Arguments`, a `&'static str`, and a `u32`. These three arguments
|
//! `fmt::Arguments`, a `&'static str`, and two `u32`'s. These four arguments
|
||||||
//! dictate the panic message, the file at which panic was invoked, and the
|
//! dictate the panic message, the file at which panic was invoked, and the
|
||||||
//! line. It is up to consumers of this core library to define this panic
|
//! line and column inside the file. It is up to consumers of this core
|
||||||
//! function; it is only required to never return. This requires a `lang`
|
//! library to define this panic function; it is only required to never
|
||||||
//! attribute named `panic_fmt`.
|
//! return. This requires a `lang` attribute named `panic_fmt`.
|
||||||
//!
|
//!
|
||||||
//! * `rust_eh_personality` - is used by the failure mechanisms of the
|
//! * `rust_eh_personality` - is used by the failure mechanisms of the
|
||||||
//! compiler. This is often mapped to GCC's personality function, but crates
|
//! compiler. This is often mapped to GCC's personality function, but crates
|
||||||
|
|
|
@ -17,16 +17,18 @@ macro_rules! panic {
|
||||||
panic!("explicit panic")
|
panic!("explicit panic")
|
||||||
);
|
);
|
||||||
($msg:expr) => ({
|
($msg:expr) => ({
|
||||||
static _MSG_FILE_LINE: (&'static str, &'static str, u32) = ($msg, file!(), line!());
|
static _MSG_FILE_LINE_COL: (&'static str, &'static str, u32, u32) =
|
||||||
$crate::panicking::panic(&_MSG_FILE_LINE)
|
($msg, file!(), line!(), column!());
|
||||||
|
$crate::panicking::panic(&_MSG_FILE_LINE_COL)
|
||||||
});
|
});
|
||||||
($fmt:expr, $($arg:tt)*) => ({
|
($fmt:expr, $($arg:tt)*) => ({
|
||||||
// The leading _'s are to avoid dead code warnings if this is
|
// The leading _'s are to avoid dead code warnings if this is
|
||||||
// used inside a dead function. Just `#[allow(dead_code)]` is
|
// used inside a dead function. Just `#[allow(dead_code)]` is
|
||||||
// insufficient, since the user may have
|
// insufficient, since the user may have
|
||||||
// `#[forbid(dead_code)]` and which cannot be overridden.
|
// `#[forbid(dead_code)]` and which cannot be overridden.
|
||||||
static _FILE_LINE: (&'static str, u32) = (file!(), line!());
|
static _MSG_FILE_LINE_COL: (&'static str, u32, u32) =
|
||||||
$crate::panicking::panic_fmt(format_args!($fmt, $($arg)*), &_FILE_LINE)
|
(file!(), line!(), column!());
|
||||||
|
$crate::panicking::panic_fmt(format_args!($fmt, $($arg)*), &_MSG_FILE_LINE_COL)
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -506,59 +506,7 @@ pub unsafe fn uninitialized<T>() -> T {
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
pub fn swap<T>(x: &mut T, y: &mut T) {
|
pub fn swap<T>(x: &mut T, y: &mut T) {
|
||||||
unsafe {
|
unsafe {
|
||||||
// The approach here is to utilize simd to swap x & y efficiently. Testing reveals
|
ptr::swap_nonoverlapping(x, y, 1);
|
||||||
// that swapping either 32 bytes or 64 bytes at a time is most efficient for intel
|
|
||||||
// Haswell E processors. LLVM is more able to optimize if we give a struct a
|
|
||||||
// #[repr(simd)], even if we don't actually use this struct directly.
|
|
||||||
//
|
|
||||||
// FIXME repr(simd) broken on emscripten and redox
|
|
||||||
#[cfg_attr(not(any(target_os = "emscripten", target_os = "redox")), repr(simd))]
|
|
||||||
struct Block(u64, u64, u64, u64);
|
|
||||||
struct UnalignedBlock(u64, u64, u64, u64);
|
|
||||||
|
|
||||||
let block_size = size_of::<Block>();
|
|
||||||
|
|
||||||
// Get raw pointers to the bytes of x & y for easier manipulation
|
|
||||||
let x = x as *mut T as *mut u8;
|
|
||||||
let y = y as *mut T as *mut u8;
|
|
||||||
|
|
||||||
// Loop through x & y, copying them `Block` at a time
|
|
||||||
// The optimizer should unroll the loop fully for most types
|
|
||||||
// N.B. We can't use a for loop as the `range` impl calls `mem::swap` recursively
|
|
||||||
let len = size_of::<T>();
|
|
||||||
let mut i = 0;
|
|
||||||
while i + block_size <= len {
|
|
||||||
// Create some uninitialized memory as scratch space
|
|
||||||
// Declaring `t` here avoids aligning the stack when this loop is unused
|
|
||||||
let mut t: Block = uninitialized();
|
|
||||||
let t = &mut t as *mut _ as *mut u8;
|
|
||||||
let x = x.offset(i as isize);
|
|
||||||
let y = y.offset(i as isize);
|
|
||||||
|
|
||||||
// Swap a block of bytes of x & y, using t as a temporary buffer
|
|
||||||
// This should be optimized into efficient SIMD operations where available
|
|
||||||
ptr::copy_nonoverlapping(x, t, block_size);
|
|
||||||
ptr::copy_nonoverlapping(y, x, block_size);
|
|
||||||
ptr::copy_nonoverlapping(t, y, block_size);
|
|
||||||
i += block_size;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
if i < len {
|
|
||||||
// Swap any remaining bytes, using aligned types to copy
|
|
||||||
// where appropriate (this information is lost by conversion
|
|
||||||
// to *mut u8, so restore it manually here)
|
|
||||||
let mut t: UnalignedBlock = uninitialized();
|
|
||||||
let rem = len - i;
|
|
||||||
|
|
||||||
let t = &mut t as *mut _ as *mut u8;
|
|
||||||
let x = x.offset(i as isize);
|
|
||||||
let y = y.offset(i as isize);
|
|
||||||
|
|
||||||
ptr::copy_nonoverlapping(x, t, rem);
|
|
||||||
ptr::copy_nonoverlapping(y, x, rem);
|
|
||||||
ptr::copy_nonoverlapping(t, y, rem);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -205,18 +205,25 @@ impl Float for f32 {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns `true` if `self` is positive, including `+0.0` and
|
/// Returns `true` if and only if `self` has a positive sign, including `+0.0`, `NaN`s with
|
||||||
/// `Float::infinity()`.
|
/// positive sign bit and positive infinity.
|
||||||
#[inline]
|
#[inline]
|
||||||
fn is_sign_positive(self) -> bool {
|
fn is_sign_positive(self) -> bool {
|
||||||
self > 0.0 || (1.0 / self) == INFINITY
|
!self.is_sign_negative()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns `true` if `self` is negative, including `-0.0` and
|
/// Returns `true` if and only if `self` has a negative sign, including `-0.0`, `NaN`s with
|
||||||
/// `Float::neg_infinity()`.
|
/// negative sign bit and negative infinity.
|
||||||
#[inline]
|
#[inline]
|
||||||
fn is_sign_negative(self) -> bool {
|
fn is_sign_negative(self) -> bool {
|
||||||
self < 0.0 || (1.0 / self) == NEG_INFINITY
|
// IEEE754 says: isSignMinus(x) is true if and only if x has negative sign. isSignMinus
|
||||||
|
// applies to zeros and NaNs as well.
|
||||||
|
#[repr(C)]
|
||||||
|
union F32Bytes {
|
||||||
|
f: f32,
|
||||||
|
b: u32
|
||||||
|
}
|
||||||
|
unsafe { F32Bytes { f: self }.b & 0x8000_0000 != 0 }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the reciprocal (multiplicative inverse) of the number.
|
/// Returns the reciprocal (multiplicative inverse) of the number.
|
||||||
|
|
|
@ -205,18 +205,23 @@ impl Float for f64 {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns `true` if `self` is positive, including `+0.0` and
|
/// Returns `true` if and only if `self` has a positive sign, including `+0.0`, `NaN`s with
|
||||||
/// `Float::infinity()`.
|
/// positive sign bit and positive infinity.
|
||||||
#[inline]
|
#[inline]
|
||||||
fn is_sign_positive(self) -> bool {
|
fn is_sign_positive(self) -> bool {
|
||||||
self > 0.0 || (1.0 / self) == INFINITY
|
!self.is_sign_negative()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns `true` if `self` is negative, including `-0.0` and
|
/// Returns `true` if and only if `self` has a negative sign, including `-0.0`, `NaN`s with
|
||||||
/// `Float::neg_infinity()`.
|
/// negative sign bit and negative infinity.
|
||||||
#[inline]
|
#[inline]
|
||||||
fn is_sign_negative(self) -> bool {
|
fn is_sign_negative(self) -> bool {
|
||||||
self < 0.0 || (1.0 / self) == NEG_INFINITY
|
#[repr(C)]
|
||||||
|
union F64Bytes {
|
||||||
|
f: f64,
|
||||||
|
b: u64
|
||||||
|
}
|
||||||
|
unsafe { F64Bytes { f: self }.b & 0x8000_0000_0000_0000 != 0 }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the reciprocal (multiplicative inverse) of the number.
|
/// Returns the reciprocal (multiplicative inverse) of the number.
|
||||||
|
|
|
@ -17,7 +17,7 @@
|
||||||
//!
|
//!
|
||||||
//! ```
|
//! ```
|
||||||
//! # use std::fmt;
|
//! # use std::fmt;
|
||||||
//! fn panic_impl(fmt: fmt::Arguments, file_line: &(&'static str, u32)) -> !
|
//! fn panic_impl(fmt: fmt::Arguments, file_line_col: &(&'static str, u32, u32)) -> !
|
||||||
//! # { loop {} }
|
//! # { loop {} }
|
||||||
//! ```
|
//! ```
|
||||||
//!
|
//!
|
||||||
|
@ -39,34 +39,55 @@
|
||||||
use fmt;
|
use fmt;
|
||||||
|
|
||||||
#[cold] #[inline(never)] // this is the slow path, always
|
#[cold] #[inline(never)] // this is the slow path, always
|
||||||
#[lang = "panic"]
|
#[cfg_attr(not(stage0), lang = "panic")]
|
||||||
pub fn panic(expr_file_line: &(&'static str, &'static str, u32)) -> ! {
|
pub fn panic(expr_file_line_col: &(&'static str, &'static str, u32, u32)) -> ! {
|
||||||
// Use Arguments::new_v1 instead of format_args!("{}", expr) to potentially
|
// Use Arguments::new_v1 instead of format_args!("{}", expr) to potentially
|
||||||
// reduce size overhead. The format_args! macro uses str's Display trait to
|
// reduce size overhead. The format_args! macro uses str's Display trait to
|
||||||
// write expr, which calls Formatter::pad, which must accommodate string
|
// write expr, which calls Formatter::pad, which must accommodate string
|
||||||
// truncation and padding (even though none is used here). Using
|
// truncation and padding (even though none is used here). Using
|
||||||
// Arguments::new_v1 may allow the compiler to omit Formatter::pad from the
|
// Arguments::new_v1 may allow the compiler to omit Formatter::pad from the
|
||||||
// output binary, saving up to a few kilobytes.
|
// output binary, saving up to a few kilobytes.
|
||||||
|
let (expr, file, line, col) = *expr_file_line_col;
|
||||||
|
panic_fmt(fmt::Arguments::new_v1(&[expr], &[]), &(file, line, col))
|
||||||
|
}
|
||||||
|
|
||||||
|
// FIXME: remove when SNAP
|
||||||
|
#[cold] #[inline(never)]
|
||||||
|
#[cfg(stage0)]
|
||||||
|
#[lang = "panic"]
|
||||||
|
pub fn panic_old(expr_file_line: &(&'static str, &'static str, u32)) -> ! {
|
||||||
let (expr, file, line) = *expr_file_line;
|
let (expr, file, line) = *expr_file_line;
|
||||||
panic_fmt(fmt::Arguments::new_v1(&[expr], &[]), &(file, line))
|
let expr_file_line_col = (expr, file, line, 0);
|
||||||
|
panic(&expr_file_line_col)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cold] #[inline(never)]
|
#[cold] #[inline(never)]
|
||||||
#[lang = "panic_bounds_check"]
|
#[cfg_attr(not(stage0), lang = "panic_bounds_check")]
|
||||||
fn panic_bounds_check(file_line: &(&'static str, u32),
|
fn panic_bounds_check(file_line_col: &(&'static str, u32, u32),
|
||||||
index: usize, len: usize) -> ! {
|
index: usize, len: usize) -> ! {
|
||||||
panic_fmt(format_args!("index out of bounds: the len is {} but the index is {}",
|
panic_fmt(format_args!("index out of bounds: the len is {} but the index is {}",
|
||||||
len, index), file_line)
|
len, index), file_line_col)
|
||||||
|
}
|
||||||
|
|
||||||
|
// FIXME: remove when SNAP
|
||||||
|
#[cold] #[inline(never)]
|
||||||
|
#[cfg(stage0)]
|
||||||
|
#[lang = "panic_bounds_check"]
|
||||||
|
fn panic_bounds_check_old(file_line: &(&'static str, u32),
|
||||||
|
index: usize, len: usize) -> ! {
|
||||||
|
let (file, line) = *file_line;
|
||||||
|
panic_fmt(format_args!("index out of bounds: the len is {} but the index is {}",
|
||||||
|
len, index), &(file, line, 0))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cold] #[inline(never)]
|
#[cold] #[inline(never)]
|
||||||
pub fn panic_fmt(fmt: fmt::Arguments, file_line: &(&'static str, u32)) -> ! {
|
pub fn panic_fmt(fmt: fmt::Arguments, file_line_col: &(&'static str, u32, u32)) -> ! {
|
||||||
#[allow(improper_ctypes)]
|
#[allow(improper_ctypes)]
|
||||||
extern {
|
extern {
|
||||||
#[lang = "panic_fmt"]
|
#[lang = "panic_fmt"]
|
||||||
#[unwind]
|
#[unwind]
|
||||||
fn panic_impl(fmt: fmt::Arguments, file: &'static str, line: u32) -> !;
|
fn panic_impl(fmt: fmt::Arguments, file: &'static str, line: u32, col: u32) -> !;
|
||||||
}
|
}
|
||||||
let (file, line) = *file_line;
|
let (file, line, col) = *file_line_col;
|
||||||
unsafe { panic_impl(fmt, file, line) }
|
unsafe { panic_impl(fmt, file, line, col) }
|
||||||
}
|
}
|
||||||
|
|
|
@ -117,6 +117,90 @@ pub unsafe fn swap<T>(x: *mut T, y: *mut T) {
|
||||||
mem::forget(tmp);
|
mem::forget(tmp);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Swaps a sequence of values at two mutable locations of the same type.
|
||||||
|
///
|
||||||
|
/// # Safety
|
||||||
|
///
|
||||||
|
/// The two arguments must each point to the beginning of `count` locations
|
||||||
|
/// of valid memory, and the two memory ranges must not overlap.
|
||||||
|
///
|
||||||
|
/// # Examples
|
||||||
|
///
|
||||||
|
/// Basic usage:
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// #![feature(swap_nonoverlapping)]
|
||||||
|
///
|
||||||
|
/// use std::ptr;
|
||||||
|
///
|
||||||
|
/// let mut x = [1, 2, 3, 4];
|
||||||
|
/// let mut y = [7, 8, 9];
|
||||||
|
///
|
||||||
|
/// unsafe {
|
||||||
|
/// ptr::swap_nonoverlapping(x.as_mut_ptr(), y.as_mut_ptr(), 2);
|
||||||
|
/// }
|
||||||
|
///
|
||||||
|
/// assert_eq!(x, [7, 8, 3, 4]);
|
||||||
|
/// assert_eq!(y, [1, 2, 9]);
|
||||||
|
/// ```
|
||||||
|
#[inline]
|
||||||
|
#[unstable(feature = "swap_nonoverlapping", issue = "42818")]
|
||||||
|
pub unsafe fn swap_nonoverlapping<T>(x: *mut T, y: *mut T, count: usize) {
|
||||||
|
let x = x as *mut u8;
|
||||||
|
let y = y as *mut u8;
|
||||||
|
let len = mem::size_of::<T>() * count;
|
||||||
|
swap_nonoverlapping_bytes(x, y, len)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
unsafe fn swap_nonoverlapping_bytes(x: *mut u8, y: *mut u8, len: usize) {
|
||||||
|
// The approach here is to utilize simd to swap x & y efficiently. Testing reveals
|
||||||
|
// that swapping either 32 bytes or 64 bytes at a time is most efficient for intel
|
||||||
|
// Haswell E processors. LLVM is more able to optimize if we give a struct a
|
||||||
|
// #[repr(simd)], even if we don't actually use this struct directly.
|
||||||
|
//
|
||||||
|
// FIXME repr(simd) broken on emscripten and redox
|
||||||
|
#[cfg_attr(not(any(target_os = "emscripten", target_os = "redox")), repr(simd))]
|
||||||
|
struct Block(u64, u64, u64, u64);
|
||||||
|
struct UnalignedBlock(u64, u64, u64, u64);
|
||||||
|
|
||||||
|
let block_size = mem::size_of::<Block>();
|
||||||
|
|
||||||
|
// Loop through x & y, copying them `Block` at a time
|
||||||
|
// The optimizer should unroll the loop fully for most types
|
||||||
|
// N.B. We can't use a for loop as the `range` impl calls `mem::swap` recursively
|
||||||
|
let mut i = 0;
|
||||||
|
while i + block_size <= len {
|
||||||
|
// Create some uninitialized memory as scratch space
|
||||||
|
// Declaring `t` here avoids aligning the stack when this loop is unused
|
||||||
|
let mut t: Block = mem::uninitialized();
|
||||||
|
let t = &mut t as *mut _ as *mut u8;
|
||||||
|
let x = x.offset(i as isize);
|
||||||
|
let y = y.offset(i as isize);
|
||||||
|
|
||||||
|
// Swap a block of bytes of x & y, using t as a temporary buffer
|
||||||
|
// This should be optimized into efficient SIMD operations where available
|
||||||
|
copy_nonoverlapping(x, t, block_size);
|
||||||
|
copy_nonoverlapping(y, x, block_size);
|
||||||
|
copy_nonoverlapping(t, y, block_size);
|
||||||
|
i += block_size;
|
||||||
|
}
|
||||||
|
|
||||||
|
if i < len {
|
||||||
|
// Swap any remaining bytes
|
||||||
|
let mut t: UnalignedBlock = mem::uninitialized();
|
||||||
|
let rem = len - i;
|
||||||
|
|
||||||
|
let t = &mut t as *mut _ as *mut u8;
|
||||||
|
let x = x.offset(i as isize);
|
||||||
|
let y = y.offset(i as isize);
|
||||||
|
|
||||||
|
copy_nonoverlapping(x, t, rem);
|
||||||
|
copy_nonoverlapping(y, x, rem);
|
||||||
|
copy_nonoverlapping(t, y, rem);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Replaces the value at `dest` with `src`, returning the old
|
/// Replaces the value at `dest` with `src`, returning the old
|
||||||
/// value, without dropping either.
|
/// value, without dropping either.
|
||||||
///
|
///
|
||||||
|
|
|
@ -212,15 +212,15 @@ pub trait SliceExt {
|
||||||
#[stable(feature = "copy_from_slice", since = "1.9.0")]
|
#[stable(feature = "copy_from_slice", since = "1.9.0")]
|
||||||
fn copy_from_slice(&mut self, src: &[Self::Item]) where Self::Item: Copy;
|
fn copy_from_slice(&mut self, src: &[Self::Item]) where Self::Item: Copy;
|
||||||
|
|
||||||
#[unstable(feature = "sort_unstable", issue = "40585")]
|
#[stable(feature = "sort_unstable", since = "1.20.0")]
|
||||||
fn sort_unstable(&mut self)
|
fn sort_unstable(&mut self)
|
||||||
where Self::Item: Ord;
|
where Self::Item: Ord;
|
||||||
|
|
||||||
#[unstable(feature = "sort_unstable", issue = "40585")]
|
#[stable(feature = "sort_unstable", since = "1.20.0")]
|
||||||
fn sort_unstable_by<F>(&mut self, compare: F)
|
fn sort_unstable_by<F>(&mut self, compare: F)
|
||||||
where F: FnMut(&Self::Item, &Self::Item) -> Ordering;
|
where F: FnMut(&Self::Item, &Self::Item) -> Ordering;
|
||||||
|
|
||||||
#[unstable(feature = "sort_unstable", issue = "40585")]
|
#[stable(feature = "sort_unstable", since = "1.20.0")]
|
||||||
fn sort_unstable_by_key<B, F>(&mut self, f: F)
|
fn sort_unstable_by_key<B, F>(&mut self, f: F)
|
||||||
where F: FnMut(&Self::Item) -> B,
|
where F: FnMut(&Self::Item) -> B,
|
||||||
B: Ord;
|
B: Ord;
|
||||||
|
|
|
@ -76,7 +76,7 @@ pub unsafe fn ptr_rotate<T>(mut left: usize, mid: *mut T, mut right: usize) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
ptr_swap_n(
|
ptr::swap_nonoverlapping(
|
||||||
mid.offset(-(left as isize)),
|
mid.offset(-(left as isize)),
|
||||||
mid.offset((right-delta) as isize),
|
mid.offset((right-delta) as isize),
|
||||||
delta);
|
delta);
|
||||||
|
@ -103,10 +103,3 @@ pub unsafe fn ptr_rotate<T>(mut left: usize, mid: *mut T, mut right: usize) {
|
||||||
ptr::copy_nonoverlapping(buf, mid.offset(-(left as isize)), right);
|
ptr::copy_nonoverlapping(buf, mid.offset(-(left as isize)), right);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe fn ptr_swap_n<T>(a: *mut T, b: *mut T, n: usize) {
|
|
||||||
for i in 0..n {
|
|
||||||
// These are nonoverlapping, so use mem::swap instead of ptr::swap
|
|
||||||
mem::swap(&mut *a.offset(i as isize), &mut *b.offset(i as isize));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
|
@ -351,7 +351,7 @@ fn partition_in_blocks<T, F>(v: &mut [T], pivot: &T, is_less: &mut F) -> usize
|
||||||
|
|
||||||
if start_l < end_l {
|
if start_l < end_l {
|
||||||
// The left block remains.
|
// The left block remains.
|
||||||
// Move it's remaining out-of-order elements to the far right.
|
// Move its remaining out-of-order elements to the far right.
|
||||||
debug_assert_eq!(width(l, r), block_l);
|
debug_assert_eq!(width(l, r), block_l);
|
||||||
while start_l < end_l {
|
while start_l < end_l {
|
||||||
unsafe {
|
unsafe {
|
||||||
|
@ -363,7 +363,7 @@ fn partition_in_blocks<T, F>(v: &mut [T], pivot: &T, is_less: &mut F) -> usize
|
||||||
width(v.as_mut_ptr(), r)
|
width(v.as_mut_ptr(), r)
|
||||||
} else if start_r < end_r {
|
} else if start_r < end_r {
|
||||||
// The right block remains.
|
// The right block remains.
|
||||||
// Move it's remaining out-of-order elements to the far left.
|
// Move its remaining out-of-order elements to the far left.
|
||||||
debug_assert_eq!(width(l, r), block_r);
|
debug_assert_eq!(width(l, r), block_r);
|
||||||
while start_r < end_r {
|
while start_r < end_r {
|
||||||
unsafe {
|
unsafe {
|
||||||
|
|
|
@ -12,15 +12,6 @@ use core::iter::*;
|
||||||
use core::{i8, i16, isize};
|
use core::{i8, i16, isize};
|
||||||
use core::usize;
|
use core::usize;
|
||||||
|
|
||||||
// FIXME #27741: This is here to simplify calling Iterator::step_by. Remove
|
|
||||||
// once Range::step_by is completely gone (not just deprecated).
|
|
||||||
trait IterEx: Sized {
|
|
||||||
fn iter_step_by(self, n: usize) -> StepBy<Self>;
|
|
||||||
}
|
|
||||||
impl<I:Iterator> IterEx for I {
|
|
||||||
fn iter_step_by(self, n: usize) -> StepBy<Self> { self.step_by(n) }
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_lt() {
|
fn test_lt() {
|
||||||
let empty: [isize; 0] = [];
|
let empty: [isize; 0] = [];
|
||||||
|
@ -76,7 +67,7 @@ fn test_multi_iter() {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_counter_from_iter() {
|
fn test_counter_from_iter() {
|
||||||
let it = (0..).iter_step_by(5).take(10);
|
let it = (0..).step_by(5).take(10);
|
||||||
let xs: Vec<isize> = FromIterator::from_iter(it);
|
let xs: Vec<isize> = FromIterator::from_iter(it);
|
||||||
assert_eq!(xs, [0, 5, 10, 15, 20, 25, 30, 35, 40, 45]);
|
assert_eq!(xs, [0, 5, 10, 15, 20, 25, 30, 35, 40, 45]);
|
||||||
}
|
}
|
||||||
|
@ -94,7 +85,7 @@ fn test_iterator_chain() {
|
||||||
}
|
}
|
||||||
assert_eq!(i, expected.len());
|
assert_eq!(i, expected.len());
|
||||||
|
|
||||||
let ys = (30..).iter_step_by(10).take(4);
|
let ys = (30..).step_by(10).take(4);
|
||||||
let it = xs.iter().cloned().chain(ys);
|
let it = xs.iter().cloned().chain(ys);
|
||||||
let mut i = 0;
|
let mut i = 0;
|
||||||
for x in it {
|
for x in it {
|
||||||
|
@ -156,13 +147,13 @@ fn test_iterator_chain_find() {
|
||||||
#[test]
|
#[test]
|
||||||
fn test_iterator_step_by() {
|
fn test_iterator_step_by() {
|
||||||
// Identity
|
// Identity
|
||||||
let mut it = (0..).iter_step_by(1).take(3);
|
let mut it = (0..).step_by(1).take(3);
|
||||||
assert_eq!(it.next(), Some(0));
|
assert_eq!(it.next(), Some(0));
|
||||||
assert_eq!(it.next(), Some(1));
|
assert_eq!(it.next(), Some(1));
|
||||||
assert_eq!(it.next(), Some(2));
|
assert_eq!(it.next(), Some(2));
|
||||||
assert_eq!(it.next(), None);
|
assert_eq!(it.next(), None);
|
||||||
|
|
||||||
let mut it = (0..).iter_step_by(3).take(4);
|
let mut it = (0..).step_by(3).take(4);
|
||||||
assert_eq!(it.next(), Some(0));
|
assert_eq!(it.next(), Some(0));
|
||||||
assert_eq!(it.next(), Some(3));
|
assert_eq!(it.next(), Some(3));
|
||||||
assert_eq!(it.next(), Some(6));
|
assert_eq!(it.next(), Some(6));
|
||||||
|
@ -173,7 +164,7 @@ fn test_iterator_step_by() {
|
||||||
#[test]
|
#[test]
|
||||||
#[should_panic]
|
#[should_panic]
|
||||||
fn test_iterator_step_by_zero() {
|
fn test_iterator_step_by_zero() {
|
||||||
let mut it = (0..).iter_step_by(0);
|
let mut it = (0..).step_by(0);
|
||||||
it.next();
|
it.next();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -252,7 +243,7 @@ fn test_iterator_step_by_size_hint() {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_filter_map() {
|
fn test_filter_map() {
|
||||||
let it = (0..).iter_step_by(1).take(10)
|
let it = (0..).step_by(1).take(10)
|
||||||
.filter_map(|x| if x % 2 == 0 { Some(x*x) } else { None });
|
.filter_map(|x| if x % 2 == 0 { Some(x*x) } else { None });
|
||||||
assert_eq!(it.collect::<Vec<usize>>(), [0*0, 2*2, 4*4, 6*6, 8*8]);
|
assert_eq!(it.collect::<Vec<usize>>(), [0*0, 2*2, 4*4, 6*6, 8*8]);
|
||||||
}
|
}
|
||||||
|
@ -654,7 +645,7 @@ fn test_iterator_scan() {
|
||||||
fn test_iterator_flat_map() {
|
fn test_iterator_flat_map() {
|
||||||
let xs = [0, 3, 6];
|
let xs = [0, 3, 6];
|
||||||
let ys = [0, 1, 2, 3, 4, 5, 6, 7, 8];
|
let ys = [0, 1, 2, 3, 4, 5, 6, 7, 8];
|
||||||
let it = xs.iter().flat_map(|&x| (x..).iter_step_by(1).take(3));
|
let it = xs.iter().flat_map(|&x| (x..).step_by(1).take(3));
|
||||||
let mut i = 0;
|
let mut i = 0;
|
||||||
for x in it {
|
for x in it {
|
||||||
assert_eq!(x, ys[i]);
|
assert_eq!(x, ys[i]);
|
||||||
|
@ -680,13 +671,13 @@ fn test_inspect() {
|
||||||
#[test]
|
#[test]
|
||||||
fn test_cycle() {
|
fn test_cycle() {
|
||||||
let cycle_len = 3;
|
let cycle_len = 3;
|
||||||
let it = (0..).iter_step_by(1).take(cycle_len).cycle();
|
let it = (0..).step_by(1).take(cycle_len).cycle();
|
||||||
assert_eq!(it.size_hint(), (usize::MAX, None));
|
assert_eq!(it.size_hint(), (usize::MAX, None));
|
||||||
for (i, x) in it.take(100).enumerate() {
|
for (i, x) in it.take(100).enumerate() {
|
||||||
assert_eq!(i % cycle_len, x);
|
assert_eq!(i % cycle_len, x);
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut it = (0..).iter_step_by(1).take(0).cycle();
|
let mut it = (0..).step_by(1).take(0).cycle();
|
||||||
assert_eq!(it.size_hint(), (0, Some(0)));
|
assert_eq!(it.size_hint(), (0, Some(0)));
|
||||||
assert_eq!(it.next(), None);
|
assert_eq!(it.next(), None);
|
||||||
}
|
}
|
||||||
|
@ -765,7 +756,7 @@ fn test_iterator_min() {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_iterator_size_hint() {
|
fn test_iterator_size_hint() {
|
||||||
let c = (0..).iter_step_by(1);
|
let c = (0..).step_by(1);
|
||||||
let v: &[_] = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
|
let v: &[_] = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
|
||||||
let v2 = &[10, 11, 12];
|
let v2 = &[10, 11, 12];
|
||||||
let vi = v.iter();
|
let vi = v.iter();
|
||||||
|
@ -1090,8 +1081,8 @@ fn test_range_step() {
|
||||||
#![allow(deprecated)]
|
#![allow(deprecated)]
|
||||||
|
|
||||||
assert_eq!((0..20).step_by(5).collect::<Vec<isize>>(), [0, 5, 10, 15]);
|
assert_eq!((0..20).step_by(5).collect::<Vec<isize>>(), [0, 5, 10, 15]);
|
||||||
assert_eq!((20..0).step_by(-5).collect::<Vec<isize>>(), [20, 15, 10, 5]);
|
assert_eq!((1..21).rev().step_by(5).collect::<Vec<isize>>(), [20, 15, 10, 5]);
|
||||||
assert_eq!((20..0).step_by(-6).collect::<Vec<isize>>(), [20, 14, 8, 2]);
|
assert_eq!((1..21).rev().step_by(6).collect::<Vec<isize>>(), [20, 14, 8, 2]);
|
||||||
assert_eq!((200..255).step_by(50).collect::<Vec<u8>>(), [200, 250]);
|
assert_eq!((200..255).step_by(50).collect::<Vec<u8>>(), [200, 250]);
|
||||||
assert_eq!((200..-5).step_by(1).collect::<Vec<isize>>(), []);
|
assert_eq!((200..-5).step_by(1).collect::<Vec<isize>>(), []);
|
||||||
assert_eq!((200..200).step_by(1).collect::<Vec<isize>>(), []);
|
assert_eq!((200..200).step_by(1).collect::<Vec<isize>>(), []);
|
||||||
|
@ -1099,13 +1090,12 @@ fn test_range_step() {
|
||||||
assert_eq!((0..20).step_by(1).size_hint(), (20, Some(20)));
|
assert_eq!((0..20).step_by(1).size_hint(), (20, Some(20)));
|
||||||
assert_eq!((0..20).step_by(21).size_hint(), (1, Some(1)));
|
assert_eq!((0..20).step_by(21).size_hint(), (1, Some(1)));
|
||||||
assert_eq!((0..20).step_by(5).size_hint(), (4, Some(4)));
|
assert_eq!((0..20).step_by(5).size_hint(), (4, Some(4)));
|
||||||
assert_eq!((20..0).step_by(-5).size_hint(), (4, Some(4)));
|
assert_eq!((1..21).rev().step_by(5).size_hint(), (4, Some(4)));
|
||||||
assert_eq!((20..0).step_by(-6).size_hint(), (4, Some(4)));
|
assert_eq!((1..21).rev().step_by(6).size_hint(), (4, Some(4)));
|
||||||
assert_eq!((20..-5).step_by(1).size_hint(), (0, Some(0)));
|
assert_eq!((20..-5).step_by(1).size_hint(), (0, Some(0)));
|
||||||
assert_eq!((20..20).step_by(1).size_hint(), (0, Some(0)));
|
assert_eq!((20..20).step_by(1).size_hint(), (0, Some(0)));
|
||||||
assert_eq!((0..1).step_by(0).size_hint(), (0, None));
|
assert_eq!((i8::MIN..i8::MAX).step_by(-(i8::MIN as i32) as usize).size_hint(), (2, Some(2)));
|
||||||
assert_eq!((i8::MAX..i8::MIN).step_by(i8::MIN).size_hint(), (2, Some(2)));
|
assert_eq!((i16::MIN..i16::MAX).step_by(i16::MAX as usize).size_hint(), (3, Some(3)));
|
||||||
assert_eq!((i16::MIN..i16::MAX).step_by(i16::MAX).size_hint(), (3, Some(3)));
|
|
||||||
assert_eq!((isize::MIN..isize::MAX).step_by(1).size_hint(), (usize::MAX, Some(usize::MAX)));
|
assert_eq!((isize::MIN..isize::MAX).step_by(1).size_hint(), (usize::MAX, Some(usize::MAX)));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -34,9 +34,7 @@
|
||||||
#![feature(slice_patterns)]
|
#![feature(slice_patterns)]
|
||||||
#![feature(slice_rotate)]
|
#![feature(slice_rotate)]
|
||||||
#![feature(sort_internals)]
|
#![feature(sort_internals)]
|
||||||
#![feature(sort_unstable)]
|
|
||||||
#![feature(specialization)]
|
#![feature(specialization)]
|
||||||
#![feature(step_by)]
|
|
||||||
#![feature(step_trait)]
|
#![feature(step_trait)]
|
||||||
#![feature(test)]
|
#![feature(test)]
|
||||||
#![feature(trusted_len)]
|
#![feature(trusted_len)]
|
||||||
|
|
|
@ -31,7 +31,7 @@
|
||||||
issue = "27703")]
|
issue = "27703")]
|
||||||
#![feature(core_intrinsics)]
|
#![feature(core_intrinsics)]
|
||||||
#![feature(staged_api)]
|
#![feature(staged_api)]
|
||||||
#![feature(step_by)]
|
#![feature(iterator_step_by)]
|
||||||
#![feature(custom_attribute)]
|
#![feature(custom_attribute)]
|
||||||
#![feature(specialization)]
|
#![feature(specialization)]
|
||||||
#![allow(unused_attributes)]
|
#![allow(unused_attributes)]
|
||||||
|
|
|
@ -1946,6 +1946,44 @@ Maybe you just misspelled the lint name or the lint doesn't exist anymore.
|
||||||
Either way, try to update/remove it in order to fix the error.
|
Either way, try to update/remove it in order to fix the error.
|
||||||
"##,
|
"##,
|
||||||
|
|
||||||
|
E0621: r##"
|
||||||
|
This error code indicates a mismatch between the function signature (i.e.,
|
||||||
|
the parameter types and the return type) and the function body. Most of
|
||||||
|
the time, this indicates that the function signature needs to be changed to
|
||||||
|
match the body, but it may be that the body needs to be changed to match
|
||||||
|
the signature.
|
||||||
|
|
||||||
|
Specifically, one or more of the parameters contain borrowed data that
|
||||||
|
needs to have a named lifetime in order for the body to type-check. Most of
|
||||||
|
the time, this is because the borrowed data is being returned from the
|
||||||
|
function, as in this example:
|
||||||
|
|
||||||
|
```compile_fail,E0621
|
||||||
|
fn foo<'a>(x: &'a i32, y: &i32) -> &'a i32 { // explicit lifetime required
|
||||||
|
// in the type of `y`
|
||||||
|
if x > y { x } else { y }
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Here, the function is returning data borrowed from either x or y, but the
|
||||||
|
'a annotation indicates that it is returning data only from x. We can make
|
||||||
|
the signature match the body by changing the type of y to &'a i32, like so:
|
||||||
|
|
||||||
|
```
|
||||||
|
fn foo<'a>(x: &'a i32, y: &'a i32) -> &'a i32 {
|
||||||
|
if x > y { x } else { y }
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Alternatively, you could change the body not to return data from y:
|
||||||
|
|
||||||
|
```
|
||||||
|
fn foo<'a>(x: &'a i32, y: &i32) -> &'a i32 {
|
||||||
|
x
|
||||||
|
}
|
||||||
|
```
|
||||||
|
"##,
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -2170,12 +2170,12 @@ impl<'a> LoweringContext<'a> {
|
||||||
// let result = match ::std::iter::IntoIterator::into_iter(<head>) {
|
// let result = match ::std::iter::IntoIterator::into_iter(<head>) {
|
||||||
// mut iter => {
|
// mut iter => {
|
||||||
// [opt_ident]: loop {
|
// [opt_ident]: loop {
|
||||||
// let mut _next;
|
// let mut __next;
|
||||||
// match ::std::iter::Iterator::next(&mut iter) {
|
// match ::std::iter::Iterator::next(&mut iter) {
|
||||||
// ::std::option::Option::Some(val) => _next = val,
|
// ::std::option::Option::Some(val) => __next = val,
|
||||||
// ::std::option::Option::None => break
|
// ::std::option::Option::None => break
|
||||||
// };
|
// };
|
||||||
// let <pat> = _next;
|
// let <pat> = __next;
|
||||||
// StmtExpr(<body>);
|
// StmtExpr(<body>);
|
||||||
// }
|
// }
|
||||||
// }
|
// }
|
||||||
|
@ -2188,7 +2188,7 @@ impl<'a> LoweringContext<'a> {
|
||||||
|
|
||||||
let iter = self.str_to_ident("iter");
|
let iter = self.str_to_ident("iter");
|
||||||
|
|
||||||
let next_ident = self.str_to_ident("_next");
|
let next_ident = self.str_to_ident("__next");
|
||||||
let next_pat = self.pat_ident_binding_mode(e.span,
|
let next_pat = self.pat_ident_binding_mode(e.span,
|
||||||
next_ident,
|
next_ident,
|
||||||
hir::BindByValue(hir::MutMutable));
|
hir::BindByValue(hir::MutMutable));
|
||||||
|
@ -2237,13 +2237,13 @@ impl<'a> LoweringContext<'a> {
|
||||||
|
|
||||||
let next_expr = P(self.expr_ident(e.span, next_ident, next_pat.id));
|
let next_expr = P(self.expr_ident(e.span, next_ident, next_pat.id));
|
||||||
|
|
||||||
// `let mut _next`
|
// `let mut __next`
|
||||||
let next_let = self.stmt_let_pat(e.span,
|
let next_let = self.stmt_let_pat(e.span,
|
||||||
None,
|
None,
|
||||||
next_pat,
|
next_pat,
|
||||||
hir::LocalSource::ForLoopDesugar);
|
hir::LocalSource::ForLoopDesugar);
|
||||||
|
|
||||||
// `let <pat> = _next`
|
// `let <pat> = __next`
|
||||||
let pat = self.lower_pat(pat);
|
let pat = self.lower_pat(pat);
|
||||||
let pat_let = self.stmt_let_pat(e.span,
|
let pat_let = self.stmt_let_pat(e.span,
|
||||||
Some(next_expr),
|
Some(next_expr),
|
||||||
|
|
|
@ -594,8 +594,12 @@ impl<'hir> Map<'hir> {
|
||||||
/// last good node id we found. Note that reaching the crate root (id == 0),
|
/// last good node id we found. Note that reaching the crate root (id == 0),
|
||||||
/// is not an error, since items in the crate module have the crate root as
|
/// is not an error, since items in the crate module have the crate root as
|
||||||
/// parent.
|
/// parent.
|
||||||
fn walk_parent_nodes<F>(&self, start_id: NodeId, found: F) -> Result<NodeId, NodeId>
|
fn walk_parent_nodes<F, F2>(&self,
|
||||||
where F: Fn(&Node<'hir>) -> bool
|
start_id: NodeId,
|
||||||
|
found: F,
|
||||||
|
bail_early: F2)
|
||||||
|
-> Result<NodeId, NodeId>
|
||||||
|
where F: Fn(&Node<'hir>) -> bool, F2: Fn(&Node<'hir>) -> bool
|
||||||
{
|
{
|
||||||
let mut id = start_id;
|
let mut id = start_id;
|
||||||
loop {
|
loop {
|
||||||
|
@ -616,6 +620,8 @@ impl<'hir> Map<'hir> {
|
||||||
Some(ref node) => {
|
Some(ref node) => {
|
||||||
if found(node) {
|
if found(node) {
|
||||||
return Ok(parent_node);
|
return Ok(parent_node);
|
||||||
|
} else if bail_early(node) {
|
||||||
|
return Err(parent_node);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
|
@ -626,6 +632,56 @@ impl<'hir> Map<'hir> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Retrieve the NodeId for `id`'s enclosing method, unless there's a
|
||||||
|
/// `while` or `loop` before reacing it, as block tail returns are not
|
||||||
|
/// available in them.
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// fn foo(x: usize) -> bool {
|
||||||
|
/// if x == 1 {
|
||||||
|
/// true // `get_return_block` gets passed the `id` corresponding
|
||||||
|
/// } else { // to this, it will return `foo`'s `NodeId`.
|
||||||
|
/// false
|
||||||
|
/// }
|
||||||
|
/// }
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// fn foo(x: usize) -> bool {
|
||||||
|
/// loop {
|
||||||
|
/// true // `get_return_block` gets passed the `id` corresponding
|
||||||
|
/// } // to this, it will return `None`.
|
||||||
|
/// false
|
||||||
|
/// }
|
||||||
|
/// ```
|
||||||
|
pub fn get_return_block(&self, id: NodeId) -> Option<NodeId> {
|
||||||
|
let match_fn = |node: &Node| {
|
||||||
|
match *node {
|
||||||
|
NodeItem(_) |
|
||||||
|
NodeForeignItem(_) |
|
||||||
|
NodeTraitItem(_) |
|
||||||
|
NodeImplItem(_) => true,
|
||||||
|
_ => false,
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let match_non_returning_block = |node: &Node| {
|
||||||
|
match *node {
|
||||||
|
NodeExpr(ref expr) => {
|
||||||
|
match expr.node {
|
||||||
|
ExprWhile(..) | ExprLoop(..) => true,
|
||||||
|
_ => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => false,
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
match self.walk_parent_nodes(id, match_fn, match_non_returning_block) {
|
||||||
|
Ok(id) => Some(id),
|
||||||
|
Err(_) => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Retrieve the NodeId for `id`'s parent item, or `id` itself if no
|
/// Retrieve the NodeId for `id`'s parent item, or `id` itself if no
|
||||||
/// parent item is in this map. The "parent item" is the closest parent node
|
/// parent item is in this map. The "parent item" is the closest parent node
|
||||||
/// in the AST which is recorded by the map and is an item, either an item
|
/// in the AST which is recorded by the map and is an item, either an item
|
||||||
|
@ -637,7 +693,7 @@ impl<'hir> Map<'hir> {
|
||||||
NodeTraitItem(_) |
|
NodeTraitItem(_) |
|
||||||
NodeImplItem(_) => true,
|
NodeImplItem(_) => true,
|
||||||
_ => false,
|
_ => false,
|
||||||
}) {
|
}, |_| false) {
|
||||||
Ok(id) => id,
|
Ok(id) => id,
|
||||||
Err(id) => id,
|
Err(id) => id,
|
||||||
}
|
}
|
||||||
|
@ -649,7 +705,7 @@ impl<'hir> Map<'hir> {
|
||||||
let id = match self.walk_parent_nodes(id, |node| match *node {
|
let id = match self.walk_parent_nodes(id, |node| match *node {
|
||||||
NodeItem(&Item { node: Item_::ItemMod(_), .. }) => true,
|
NodeItem(&Item { node: Item_::ItemMod(_), .. }) => true,
|
||||||
_ => false,
|
_ => false,
|
||||||
}) {
|
}, |_| false) {
|
||||||
Ok(id) => id,
|
Ok(id) => id,
|
||||||
Err(id) => id,
|
Err(id) => id,
|
||||||
};
|
};
|
||||||
|
@ -668,7 +724,7 @@ impl<'hir> Map<'hir> {
|
||||||
NodeImplItem(_) |
|
NodeImplItem(_) |
|
||||||
NodeBlock(_) => true,
|
NodeBlock(_) => true,
|
||||||
_ => false,
|
_ => false,
|
||||||
}) {
|
}, |_| false) {
|
||||||
Ok(id) => Some(id),
|
Ok(id) => Some(id),
|
||||||
Err(_) => None,
|
Err(_) => None,
|
||||||
}
|
}
|
||||||
|
|
|
@ -1527,7 +1527,8 @@ impl<'a> State<'a> {
|
||||||
if i > 0 {
|
if i > 0 {
|
||||||
word(&mut self.s, "::")?
|
word(&mut self.s, "::")?
|
||||||
}
|
}
|
||||||
if segment.name != keywords::CrateRoot.name() && segment.name != "$crate" {
|
if segment.name != keywords::CrateRoot.name() &&
|
||||||
|
segment.name != keywords::DollarCrate.name() {
|
||||||
self.print_name(segment.name)?;
|
self.print_name(segment.name)?;
|
||||||
self.print_path_parameters(&segment.parameters, colons_before_params)?;
|
self.print_path_parameters(&segment.parameters, colons_before_params)?;
|
||||||
}
|
}
|
||||||
|
@ -1554,7 +1555,8 @@ impl<'a> State<'a> {
|
||||||
if i > 0 {
|
if i > 0 {
|
||||||
word(&mut self.s, "::")?
|
word(&mut self.s, "::")?
|
||||||
}
|
}
|
||||||
if segment.name != keywords::CrateRoot.name() && segment.name != "$crate" {
|
if segment.name != keywords::CrateRoot.name() &&
|
||||||
|
segment.name != keywords::DollarCrate.name() {
|
||||||
self.print_name(segment.name)?;
|
self.print_name(segment.name)?;
|
||||||
self.print_path_parameters(&segment.parameters, colons_before_params)?;
|
self.print_path_parameters(&segment.parameters, colons_before_params)?;
|
||||||
}
|
}
|
||||||
|
|
|
@ -524,10 +524,9 @@ for ty::TypeVariants<'tcx>
|
||||||
region.hash_stable(hcx, hasher);
|
region.hash_stable(hcx, hasher);
|
||||||
pointee_ty.hash_stable(hcx, hasher);
|
pointee_ty.hash_stable(hcx, hasher);
|
||||||
}
|
}
|
||||||
TyFnDef(def_id, substs, ref sig) => {
|
TyFnDef(def_id, substs) => {
|
||||||
def_id.hash_stable(hcx, hasher);
|
def_id.hash_stable(hcx, hasher);
|
||||||
substs.hash_stable(hcx, hasher);
|
substs.hash_stable(hcx, hasher);
|
||||||
sig.hash_stable(hcx, hasher);
|
|
||||||
}
|
}
|
||||||
TyFnPtr(ref sig) => {
|
TyFnPtr(ref sig) => {
|
||||||
sig.hash_stable(hcx, hasher);
|
sig.hash_stable(hcx, hasher);
|
||||||
|
|
|
@ -72,9 +72,11 @@ use ty::error::TypeError;
|
||||||
use syntax::ast::DUMMY_NODE_ID;
|
use syntax::ast::DUMMY_NODE_ID;
|
||||||
use syntax_pos::{Pos, Span};
|
use syntax_pos::{Pos, Span};
|
||||||
use errors::{DiagnosticBuilder, DiagnosticStyledString};
|
use errors::{DiagnosticBuilder, DiagnosticStyledString};
|
||||||
|
|
||||||
mod note;
|
mod note;
|
||||||
|
|
||||||
mod need_type_info;
|
mod need_type_info;
|
||||||
|
mod named_anon_conflict;
|
||||||
|
|
||||||
|
|
||||||
impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
||||||
pub fn note_and_explain_region(self,
|
pub fn note_and_explain_region(self,
|
||||||
|
@ -255,34 +257,48 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
|
impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
|
||||||
pub fn report_region_errors(&self,
|
|
||||||
errors: &Vec<RegionResolutionError<'tcx>>) {
|
pub fn report_region_errors(&self, errors: &Vec<RegionResolutionError<'tcx>>) {
|
||||||
debug!("report_region_errors(): {} errors to start", errors.len());
|
debug!("report_region_errors(): {} errors to start", errors.len());
|
||||||
|
|
||||||
// try to pre-process the errors, which will group some of them
|
// try to pre-process the errors, which will group some of them
|
||||||
// together into a `ProcessedErrors` group:
|
// together into a `ProcessedErrors` group:
|
||||||
let errors = self.process_errors(errors);
|
let errors = self.process_errors(errors);
|
||||||
|
|
||||||
debug!("report_region_errors: {} errors after preprocessing", errors.len());
|
debug!("report_region_errors: {} errors after preprocessing",
|
||||||
|
errors.len());
|
||||||
|
|
||||||
for error in errors {
|
for error in errors {
|
||||||
|
|
||||||
debug!("report_region_errors: error = {:?}", error);
|
debug!("report_region_errors: error = {:?}", error);
|
||||||
match error.clone() {
|
|
||||||
ConcreteFailure(origin, sub, sup) => {
|
|
||||||
self.report_concrete_failure(origin, sub, sup).emit();
|
|
||||||
}
|
|
||||||
|
|
||||||
GenericBoundFailure(kind, param_ty, sub) => {
|
if !self.try_report_named_anon_conflict(&error){
|
||||||
self.report_generic_bound_failure(kind, param_ty, sub);
|
|
||||||
}
|
|
||||||
|
|
||||||
SubSupConflict(var_origin,
|
match error.clone() {
|
||||||
|
// These errors could indicate all manner of different
|
||||||
|
// problems with many different solutions. Rather
|
||||||
|
// than generate a "one size fits all" error, what we
|
||||||
|
// attempt to do is go through a number of specific
|
||||||
|
// scenarios and try to find the best way to present
|
||||||
|
// the error. If all of these fails, we fall back to a rather
|
||||||
|
// general bit of code that displays the error information
|
||||||
|
ConcreteFailure(origin, sub, sup) => {
|
||||||
|
|
||||||
|
self.report_concrete_failure(origin, sub, sup).emit();
|
||||||
|
}
|
||||||
|
|
||||||
|
GenericBoundFailure(kind, param_ty, sub) => {
|
||||||
|
self.report_generic_bound_failure(kind, param_ty, sub);
|
||||||
|
}
|
||||||
|
|
||||||
|
SubSupConflict(var_origin,
|
||||||
sub_origin, sub_r,
|
sub_origin, sub_r,
|
||||||
sup_origin, sup_r) => {
|
sup_origin, sup_r) => {
|
||||||
self.report_sub_sup_conflict(var_origin,
|
self.report_sub_sup_conflict(var_origin,
|
||||||
sub_origin, sub_r,
|
sub_origin, sub_r,
|
||||||
sup_origin, sup_r);
|
sup_origin, sup_r);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
199
src/librustc/infer/error_reporting/named_anon_conflict.rs
Normal file
199
src/librustc/infer/error_reporting/named_anon_conflict.rs
Normal file
|
@ -0,0 +1,199 @@
|
||||||
|
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
|
||||||
|
// file at the top-level directory of this distribution and at
|
||||||
|
// http://rust-lang.org/COPYRIGHT.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||||
|
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||||
|
// option. This file may not be copied, modified, or distributed
|
||||||
|
// except according to those terms.
|
||||||
|
|
||||||
|
//! Error Reporting for Anonymous Region Lifetime Errors.
|
||||||
|
use hir;
|
||||||
|
use infer::InferCtxt;
|
||||||
|
use ty::{self, Region};
|
||||||
|
use infer::region_inference::RegionResolutionError::*;
|
||||||
|
use infer::region_inference::RegionResolutionError;
|
||||||
|
use hir::map as hir_map;
|
||||||
|
use hir::def_id::DefId;
|
||||||
|
|
||||||
|
impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
|
||||||
|
// This method walks the Type of the function body arguments using
|
||||||
|
// `fold_regions()` function and returns the
|
||||||
|
// &hir::Arg of the function argument corresponding to the anonymous
|
||||||
|
// region and the Ty corresponding to the named region.
|
||||||
|
// Currently only the case where the function declaration consists of
|
||||||
|
// one named region and one anonymous region is handled.
|
||||||
|
// Consider the example `fn foo<'a>(x: &'a i32, y: &i32) -> &'a i32`
|
||||||
|
// Here, we would return the hir::Arg for y, we return the type &'a
|
||||||
|
// i32, which is the type of y but with the anonymous region replaced
|
||||||
|
// with 'a, the corresponding bound region and is_first which is true if
|
||||||
|
// the hir::Arg is the first argument in the function declaration.
|
||||||
|
fn find_arg_with_anonymous_region
|
||||||
|
(&self,
|
||||||
|
anon_region: Region<'tcx>,
|
||||||
|
named_region: Region<'tcx>)
|
||||||
|
-> Option<(&hir::Arg, ty::Ty<'tcx>, ty::BoundRegion, bool)> {
|
||||||
|
|
||||||
|
match *anon_region {
|
||||||
|
ty::ReFree(ref free_region) => {
|
||||||
|
|
||||||
|
let id = free_region.scope;
|
||||||
|
let node_id = self.tcx.hir.as_local_node_id(id).unwrap();
|
||||||
|
let body_id = self.tcx.hir.maybe_body_owned_by(node_id).unwrap();
|
||||||
|
let body = self.tcx.hir.body(body_id);
|
||||||
|
if let Some(tables) = self.in_progress_tables {
|
||||||
|
body.arguments
|
||||||
|
.iter()
|
||||||
|
.enumerate()
|
||||||
|
.filter_map(|(index, arg)| {
|
||||||
|
let ty = tables.borrow().node_id_to_type(arg.id);
|
||||||
|
let mut found_anon_region = false;
|
||||||
|
let new_arg_ty = self.tcx
|
||||||
|
.fold_regions(&ty, &mut false, |r, _| if *r == *anon_region {
|
||||||
|
found_anon_region = true;
|
||||||
|
named_region
|
||||||
|
} else {
|
||||||
|
r
|
||||||
|
});
|
||||||
|
if found_anon_region {
|
||||||
|
let is_first = index == 0;
|
||||||
|
Some((arg, new_arg_ty, free_region.bound_region, is_first))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.next()
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => None,
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// This method generates the error message for the case when
|
||||||
|
// the function arguments consist of a named region and an anonymous
|
||||||
|
// region and corresponds to `ConcreteFailure(..)`
|
||||||
|
pub fn try_report_named_anon_conflict(&self, error: &RegionResolutionError<'tcx>) -> bool {
|
||||||
|
|
||||||
|
let (span, sub, sup) = match *error {
|
||||||
|
ConcreteFailure(ref origin, sub, sup) => (origin.span(), sub, sup),
|
||||||
|
_ => return false, // inapplicable
|
||||||
|
};
|
||||||
|
|
||||||
|
// Determine whether the sub and sup consist of one named region ('a)
|
||||||
|
// and one anonymous (elided) region. If so, find the parameter arg
|
||||||
|
// where the anonymous region appears (there must always be one; we
|
||||||
|
// only introduced anonymous regions in parameters) as well as a
|
||||||
|
// version new_ty of its type where the anonymous region is replaced
|
||||||
|
// with the named one.
|
||||||
|
let (named, (arg, new_ty, br, is_first), scope_def_id) =
|
||||||
|
if sub.is_named_region() && self.is_suitable_anonymous_region(sup).is_some() {
|
||||||
|
(sub,
|
||||||
|
self.find_arg_with_anonymous_region(sup, sub).unwrap(),
|
||||||
|
self.is_suitable_anonymous_region(sup).unwrap())
|
||||||
|
} else if sup.is_named_region() && self.is_suitable_anonymous_region(sub).is_some() {
|
||||||
|
(sup,
|
||||||
|
self.find_arg_with_anonymous_region(sub, sup).unwrap(),
|
||||||
|
self.is_suitable_anonymous_region(sub).unwrap())
|
||||||
|
} else {
|
||||||
|
return false; // inapplicable
|
||||||
|
};
|
||||||
|
|
||||||
|
// Here, we check for the case where the anonymous region
|
||||||
|
// is in the return type.
|
||||||
|
// FIXME(#42703) - Need to handle certain cases here.
|
||||||
|
let ret_ty = self.tcx.type_of(scope_def_id);
|
||||||
|
match ret_ty.sty {
|
||||||
|
ty::TyFnDef(_, _) => {
|
||||||
|
let sig = ret_ty.fn_sig(self.tcx);
|
||||||
|
let late_bound_regions = self.tcx
|
||||||
|
.collect_referenced_late_bound_regions(&sig.output());
|
||||||
|
if late_bound_regions.iter().any(|r| *r == br) {
|
||||||
|
return false;
|
||||||
|
} else {
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Here we check for the case where anonymous region
|
||||||
|
// corresponds to self and if yes, we display E0312.
|
||||||
|
// FIXME(#42700) - Need to format self properly to
|
||||||
|
// enable E0621 for it.
|
||||||
|
if is_first &&
|
||||||
|
self.tcx
|
||||||
|
.opt_associated_item(scope_def_id)
|
||||||
|
.map(|i| i.method_has_self_argument)
|
||||||
|
.unwrap_or(false) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
let (error_var, span_label_var) = if let Some(simple_name) = arg.pat.simple_name() {
|
||||||
|
(format!("the type of `{}`", simple_name), format!("the type of `{}`", simple_name))
|
||||||
|
} else {
|
||||||
|
(format!("parameter type"), format!("type"))
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
struct_span_err!(self.tcx.sess,
|
||||||
|
span,
|
||||||
|
E0621,
|
||||||
|
"explicit lifetime required in {}",
|
||||||
|
error_var)
|
||||||
|
.span_label(arg.pat.span,
|
||||||
|
format!("consider changing {} to `{}`", span_label_var, new_ty))
|
||||||
|
.span_label(span, format!("lifetime `{}` required", named))
|
||||||
|
.emit();
|
||||||
|
|
||||||
|
return true;
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
// This method returns whether the given Region is Anonymous
|
||||||
|
// and returns the DefId corresponding to the region.
|
||||||
|
pub fn is_suitable_anonymous_region(&self, region: Region<'tcx>) -> Option<DefId> {
|
||||||
|
|
||||||
|
match *region {
|
||||||
|
ty::ReFree(ref free_region) => {
|
||||||
|
match free_region.bound_region {
|
||||||
|
ty::BrAnon(..) => {
|
||||||
|
let anonymous_region_binding_scope = free_region.scope;
|
||||||
|
let node_id = self.tcx
|
||||||
|
.hir
|
||||||
|
.as_local_node_id(anonymous_region_binding_scope)
|
||||||
|
.unwrap();
|
||||||
|
match self.tcx.hir.find(node_id) {
|
||||||
|
Some(hir_map::NodeItem(..)) |
|
||||||
|
Some(hir_map::NodeTraitItem(..)) => {
|
||||||
|
// proceed ahead //
|
||||||
|
}
|
||||||
|
Some(hir_map::NodeImplItem(..)) => {
|
||||||
|
let container_id = self.tcx
|
||||||
|
.associated_item(anonymous_region_binding_scope)
|
||||||
|
.container
|
||||||
|
.id();
|
||||||
|
if self.tcx.impl_trait_ref(container_id).is_some() {
|
||||||
|
// For now, we do not try to target impls of traits. This is
|
||||||
|
// because this message is going to suggest that the user
|
||||||
|
// change the fn signature, but they may not be free to do so,
|
||||||
|
// since the signature must match the trait.
|
||||||
|
//
|
||||||
|
// FIXME(#42706) -- in some cases, we could do better here.
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => return None, // inapplicable
|
||||||
|
// we target only top-level functions
|
||||||
|
}
|
||||||
|
return Some(anonymous_region_binding_scope);
|
||||||
|
}
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -8,7 +8,7 @@
|
||||||
// option. This file may not be copied, modified, or distributed
|
// option. This file may not be copied, modified, or distributed
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
use hir::{self, map, Local, Pat, Body};
|
use hir::{self, Local, Pat, Body};
|
||||||
use hir::intravisit::{self, Visitor, NestedVisitorMap};
|
use hir::intravisit::{self, Visitor, NestedVisitorMap};
|
||||||
use infer::InferCtxt;
|
use infer::InferCtxt;
|
||||||
use infer::type_variable::TypeVariableOrigin;
|
use infer::type_variable::TypeVariableOrigin;
|
||||||
|
@ -88,7 +88,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn need_type_info(&self, body_id: hir::BodyId, span: Span, ty: Ty<'tcx>) {
|
pub fn need_type_info(&self, body_id: Option<hir::BodyId>, span: Span, ty: Ty<'tcx>) {
|
||||||
let ty = self.resolve_type_vars_if_possible(&ty);
|
let ty = self.resolve_type_vars_if_possible(&ty);
|
||||||
let name = self.extract_type_name(&ty);
|
let name = self.extract_type_name(&ty);
|
||||||
|
|
||||||
|
@ -103,11 +103,9 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
|
||||||
found_arg_pattern: None,
|
found_arg_pattern: None,
|
||||||
};
|
};
|
||||||
|
|
||||||
// #40294: cause.body_id can also be a fn declaration.
|
if let Some(body_id) = body_id {
|
||||||
// Currently, if it's anything other than NodeExpr, we just ignore it
|
let expr = self.tcx.hir.expect_expr(body_id.node_id);
|
||||||
match self.tcx.hir.find(body_id.node_id) {
|
local_visitor.visit_expr(expr);
|
||||||
Some(map::NodeExpr(expr)) => local_visitor.visit_expr(expr),
|
|
||||||
_ => ()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(pattern) = local_visitor.found_arg_pattern {
|
if let Some(pattern) = local_visitor.found_arg_pattern {
|
||||||
|
|
|
@ -38,7 +38,6 @@ use errors::DiagnosticBuilder;
|
||||||
use syntax_pos::{self, Span, DUMMY_SP};
|
use syntax_pos::{self, Span, DUMMY_SP};
|
||||||
use util::nodemap::FxHashMap;
|
use util::nodemap::FxHashMap;
|
||||||
use arena::DroplessArena;
|
use arena::DroplessArena;
|
||||||
|
|
||||||
use self::combine::CombineFields;
|
use self::combine::CombineFields;
|
||||||
use self::higher_ranked::HrMatchResult;
|
use self::higher_ranked::HrMatchResult;
|
||||||
use self::region_inference::{RegionVarBindings, RegionSnapshot};
|
use self::region_inference::{RegionVarBindings, RegionSnapshot};
|
||||||
|
@ -1077,6 +1076,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
|
||||||
region_map,
|
region_map,
|
||||||
free_regions);
|
free_regions);
|
||||||
let errors = self.region_vars.resolve_regions(®ion_rels);
|
let errors = self.region_vars.resolve_regions(®ion_rels);
|
||||||
|
|
||||||
if !self.is_tainted_by_errors() {
|
if !self.is_tainted_by_errors() {
|
||||||
// As a heuristic, just skip reporting region errors
|
// As a heuristic, just skip reporting region errors
|
||||||
// altogether if other errors have been reported while
|
// altogether if other errors have been reported while
|
||||||
|
@ -1191,28 +1191,6 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
|
||||||
// types using one of these methods, and should not call span_err directly for such
|
// types using one of these methods, and should not call span_err directly for such
|
||||||
// errors.
|
// errors.
|
||||||
|
|
||||||
pub fn type_error_message<M>(&self,
|
|
||||||
sp: Span,
|
|
||||||
mk_msg: M,
|
|
||||||
actual_ty: Ty<'tcx>)
|
|
||||||
where M: FnOnce(String) -> String,
|
|
||||||
{
|
|
||||||
self.type_error_struct(sp, mk_msg, actual_ty).emit();
|
|
||||||
}
|
|
||||||
|
|
||||||
// FIXME: this results in errors without an error code. Deprecate?
|
|
||||||
pub fn type_error_struct<M>(&self,
|
|
||||||
sp: Span,
|
|
||||||
mk_msg: M,
|
|
||||||
actual_ty: Ty<'tcx>)
|
|
||||||
-> DiagnosticBuilder<'tcx>
|
|
||||||
where M: FnOnce(String) -> String,
|
|
||||||
{
|
|
||||||
self.type_error_struct_with_diag(sp, |actual_ty| {
|
|
||||||
self.tcx.sess.struct_span_err(sp, &mk_msg(actual_ty))
|
|
||||||
}, actual_ty)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn type_error_struct_with_diag<M>(&self,
|
pub fn type_error_struct_with_diag<M>(&self,
|
||||||
sp: Span,
|
sp: Span,
|
||||||
mk_diag: M,
|
mk_diag: M,
|
||||||
|
@ -1369,7 +1347,11 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
|
||||||
Some(self.tcx.closure_kind(def_id))
|
Some(self.tcx.closure_kind(def_id))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn closure_type(&self, def_id: DefId) -> ty::PolyFnSig<'tcx> {
|
/// Obtain the signature of a function or closure.
|
||||||
|
/// For closures, unlike `tcx.fn_sig(def_id)`, this method will
|
||||||
|
/// work during the type-checking of the enclosing function and
|
||||||
|
/// return the closure signature in its partially inferred state.
|
||||||
|
pub fn fn_sig(&self, def_id: DefId) -> ty::PolyFnSig<'tcx> {
|
||||||
if let Some(tables) = self.in_progress_tables {
|
if let Some(tables) = self.in_progress_tables {
|
||||||
if let Some(id) = self.tcx.hir.as_local_node_id(def_id) {
|
if let Some(id) = self.tcx.hir.as_local_node_id(def_id) {
|
||||||
if let Some(&ty) = tables.borrow().closure_tys.get(&id) {
|
if let Some(&ty) = tables.borrow().closure_tys.get(&id) {
|
||||||
|
@ -1378,7 +1360,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
self.tcx.closure_type(def_id)
|
self.tcx.fn_sig(def_id)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -39,7 +39,6 @@
|
||||||
#![feature(specialization)]
|
#![feature(specialization)]
|
||||||
#![feature(unboxed_closures)]
|
#![feature(unboxed_closures)]
|
||||||
#![feature(discriminant_value)]
|
#![feature(discriminant_value)]
|
||||||
#![feature(sort_unstable)]
|
|
||||||
#![feature(trace_macros)]
|
#![feature(trace_macros)]
|
||||||
#![feature(test)]
|
#![feature(test)]
|
||||||
|
|
||||||
|
|
|
@ -291,16 +291,13 @@ impl LintStore {
|
||||||
self.by_name.insert(name.into(), Removed(reason.into()));
|
self.by_name.insert(name.into(), Removed(reason.into()));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(unused_variables)]
|
fn find_lint(&self, lint_name: &str) -> Result<LintId, FindLintError> {
|
||||||
fn find_lint(&self, lint_name: &str, sess: &Session, span: Option<Span>)
|
|
||||||
-> Result<LintId, FindLintError>
|
|
||||||
{
|
|
||||||
match self.by_name.get(lint_name) {
|
match self.by_name.get(lint_name) {
|
||||||
Some(&Id(lint_id)) => Ok(lint_id),
|
Some(&Id(lint_id)) => Ok(lint_id),
|
||||||
Some(&Renamed(_, lint_id)) => {
|
Some(&Renamed(_, lint_id)) => {
|
||||||
Ok(lint_id)
|
Ok(lint_id)
|
||||||
},
|
},
|
||||||
Some(&Removed(ref reason)) => {
|
Some(&Removed(_)) => {
|
||||||
Err(FindLintError::Removed)
|
Err(FindLintError::Removed)
|
||||||
},
|
},
|
||||||
None => Err(FindLintError::NotFound)
|
None => Err(FindLintError::NotFound)
|
||||||
|
@ -313,7 +310,7 @@ impl LintStore {
|
||||||
&lint_name[..], level);
|
&lint_name[..], level);
|
||||||
|
|
||||||
let lint_flag_val = Symbol::intern(&lint_name);
|
let lint_flag_val = Symbol::intern(&lint_name);
|
||||||
match self.find_lint(&lint_name[..], sess, None) {
|
match self.find_lint(&lint_name[..]) {
|
||||||
Ok(lint_id) => self.levels.set(lint_id, (level, CommandLine(lint_flag_val))),
|
Ok(lint_id) => self.levels.set(lint_id, (level, CommandLine(lint_flag_val))),
|
||||||
Err(FindLintError::Removed) => { }
|
Err(FindLintError::Removed) => { }
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
|
@ -513,7 +510,6 @@ pub fn raw_struct_lint<'a, S>(sess: &'a Session,
|
||||||
}
|
}
|
||||||
|
|
||||||
let name = lint.name_lower();
|
let name = lint.name_lower();
|
||||||
let mut def = None;
|
|
||||||
|
|
||||||
// Except for possible note details, forbid behaves like deny.
|
// Except for possible note details, forbid behaves like deny.
|
||||||
let effective_level = if level == Forbid { Deny } else { level };
|
let effective_level = if level == Forbid { Deny } else { level };
|
||||||
|
@ -528,7 +524,8 @@ pub fn raw_struct_lint<'a, S>(sess: &'a Session,
|
||||||
|
|
||||||
match source {
|
match source {
|
||||||
Default => {
|
Default => {
|
||||||
err.note(&format!("#[{}({})] on by default", level.as_str(), name));
|
sess.diag_note_once(&mut err, lint,
|
||||||
|
&format!("#[{}({})] on by default", level.as_str(), name));
|
||||||
},
|
},
|
||||||
CommandLine(lint_flag_val) => {
|
CommandLine(lint_flag_val) => {
|
||||||
let flag = match level {
|
let flag = match level {
|
||||||
|
@ -537,20 +534,24 @@ pub fn raw_struct_lint<'a, S>(sess: &'a Session,
|
||||||
};
|
};
|
||||||
let hyphen_case_lint_name = name.replace("_", "-");
|
let hyphen_case_lint_name = name.replace("_", "-");
|
||||||
if lint_flag_val.as_str() == name {
|
if lint_flag_val.as_str() == name {
|
||||||
err.note(&format!("requested on the command line with `{} {}`",
|
sess.diag_note_once(&mut err, lint,
|
||||||
flag, hyphen_case_lint_name));
|
&format!("requested on the command line with `{} {}`",
|
||||||
|
flag, hyphen_case_lint_name));
|
||||||
} else {
|
} else {
|
||||||
let hyphen_case_flag_val = lint_flag_val.as_str().replace("_", "-");
|
let hyphen_case_flag_val = lint_flag_val.as_str().replace("_", "-");
|
||||||
err.note(&format!("`{} {}` implied by `{} {}`",
|
sess.diag_note_once(&mut err, lint,
|
||||||
flag, hyphen_case_lint_name, flag, hyphen_case_flag_val));
|
&format!("`{} {}` implied by `{} {}`",
|
||||||
|
flag, hyphen_case_lint_name, flag,
|
||||||
|
hyphen_case_flag_val));
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
Node(lint_attr_name, src) => {
|
Node(lint_attr_name, src) => {
|
||||||
def = Some(src);
|
sess.diag_span_note_once(&mut err, lint, src, "lint level defined here");
|
||||||
if lint_attr_name.as_str() != name {
|
if lint_attr_name.as_str() != name {
|
||||||
let level_str = level.as_str();
|
let level_str = level.as_str();
|
||||||
err.note(&format!("#[{}({})] implied by #[{}({})]",
|
sess.diag_note_once(&mut err, lint,
|
||||||
level_str, name, level_str, lint_attr_name));
|
&format!("#[{}({})] implied by #[{}({})]",
|
||||||
|
level_str, name, level_str, lint_attr_name));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -566,10 +567,6 @@ pub fn raw_struct_lint<'a, S>(sess: &'a Session,
|
||||||
err.note(&citation);
|
err.note(&citation);
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(span) = def {
|
|
||||||
sess.diag_span_note_once(&mut err, lint, span, "lint level defined here");
|
|
||||||
}
|
|
||||||
|
|
||||||
err
|
err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -724,21 +721,22 @@ pub trait LintContext<'tcx>: Sized {
|
||||||
let mut pushed = 0;
|
let mut pushed = 0;
|
||||||
|
|
||||||
for result in gather_attrs(attrs) {
|
for result in gather_attrs(attrs) {
|
||||||
let v = match result {
|
let (is_group, lint_level_spans) = match result {
|
||||||
Err(span) => {
|
Err(span) => {
|
||||||
span_err!(self.sess(), span, E0452,
|
span_err!(self.sess(), span, E0452,
|
||||||
"malformed lint attribute");
|
"malformed lint attribute");
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
Ok((lint_name, level, span)) => {
|
Ok((lint_name, level, span)) => {
|
||||||
match self.lints().find_lint(&lint_name.as_str(), &self.sess(), Some(span)) {
|
match self.lints().find_lint(&lint_name.as_str()) {
|
||||||
Ok(lint_id) => vec![(lint_id, level, span)],
|
Ok(lint_id) => (false, vec![(lint_id, level, span)]),
|
||||||
Err(FindLintError::NotFound) => {
|
Err(FindLintError::NotFound) => {
|
||||||
match self.lints().lint_groups.get(&*lint_name.as_str()) {
|
match self.lints().lint_groups.get(&*lint_name.as_str()) {
|
||||||
Some(&(ref v, _)) => v.iter()
|
Some(&(ref v, _)) => (true,
|
||||||
|
v.iter()
|
||||||
.map(|lint_id: &LintId|
|
.map(|lint_id: &LintId|
|
||||||
(*lint_id, level, span))
|
(*lint_id, level, span))
|
||||||
.collect(),
|
.collect()),
|
||||||
None => {
|
None => {
|
||||||
// The lint or lint group doesn't exist.
|
// The lint or lint group doesn't exist.
|
||||||
// This is an error, but it was handled
|
// This is an error, but it was handled
|
||||||
|
@ -754,14 +752,18 @@ pub trait LintContext<'tcx>: Sized {
|
||||||
|
|
||||||
let lint_attr_name = result.expect("lint attribute should be well-formed").0;
|
let lint_attr_name = result.expect("lint attribute should be well-formed").0;
|
||||||
|
|
||||||
for (lint_id, level, span) in v {
|
for (lint_id, level, span) in lint_level_spans {
|
||||||
let (now, now_source) = self.lint_sess().get_source(lint_id);
|
let (now, now_source) = self.lint_sess().get_source(lint_id);
|
||||||
if now == Forbid && level != Forbid {
|
if now == Forbid && level != Forbid {
|
||||||
let lint_name = lint_id.to_string();
|
let forbidden_lint_name = match now_source {
|
||||||
|
LintSource::Default => lint_id.to_string(),
|
||||||
|
LintSource::Node(name, _) => name.to_string(),
|
||||||
|
LintSource::CommandLine(name) => name.to_string(),
|
||||||
|
};
|
||||||
let mut diag_builder = struct_span_err!(self.sess(), span, E0453,
|
let mut diag_builder = struct_span_err!(self.sess(), span, E0453,
|
||||||
"{}({}) overruled by outer forbid({})",
|
"{}({}) overruled by outer forbid({})",
|
||||||
level.as_str(), lint_name,
|
level.as_str(), lint_attr_name,
|
||||||
lint_name);
|
forbidden_lint_name);
|
||||||
diag_builder.span_label(span, "overruled by previous forbid");
|
diag_builder.span_label(span, "overruled by previous forbid");
|
||||||
match now_source {
|
match now_source {
|
||||||
LintSource::Default => &mut diag_builder,
|
LintSource::Default => &mut diag_builder,
|
||||||
|
@ -772,7 +774,10 @@ pub trait LintContext<'tcx>: Sized {
|
||||||
LintSource::CommandLine(_) => {
|
LintSource::CommandLine(_) => {
|
||||||
diag_builder.note("`forbid` lint level was set on command line")
|
diag_builder.note("`forbid` lint level was set on command line")
|
||||||
}
|
}
|
||||||
}.emit()
|
}.emit();
|
||||||
|
if is_group { // don't set a separate error for every lint in the group
|
||||||
|
break;
|
||||||
|
}
|
||||||
} else if now != level {
|
} else if now != level {
|
||||||
let cx = self.lint_sess_mut();
|
let cx = self.lint_sess_mut();
|
||||||
cx.stack.push((lint_id, (now, now_source)));
|
cx.stack.push((lint_id, (now, now_source)));
|
||||||
|
@ -1420,7 +1425,7 @@ impl Decodable for LintId {
|
||||||
fn decode<D: Decoder>(d: &mut D) -> Result<LintId, D::Error> {
|
fn decode<D: Decoder>(d: &mut D) -> Result<LintId, D::Error> {
|
||||||
let s = d.read_str()?;
|
let s = d.read_str()?;
|
||||||
ty::tls::with(|tcx| {
|
ty::tls::with(|tcx| {
|
||||||
match tcx.sess.lint_store.borrow().find_lint(&s, tcx.sess, None) {
|
match tcx.sess.lint_store.borrow().find_lint(&s) {
|
||||||
Ok(id) => Ok(id),
|
Ok(id) => Ok(id),
|
||||||
Err(_) => panic!("invalid lint-id `{}`", s),
|
Err(_) => panic!("invalid lint-id `{}`", s),
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,7 +12,7 @@
|
||||||
//! `unsafe`.
|
//! `unsafe`.
|
||||||
use self::RootUnsafeContext::*;
|
use self::RootUnsafeContext::*;
|
||||||
|
|
||||||
use ty::{self, Ty, TyCtxt};
|
use ty::{self, TyCtxt};
|
||||||
use lint;
|
use lint;
|
||||||
|
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
|
@ -40,14 +40,6 @@ enum RootUnsafeContext {
|
||||||
UnsafeBlock(ast::NodeId),
|
UnsafeBlock(ast::NodeId),
|
||||||
}
|
}
|
||||||
|
|
||||||
fn type_is_unsafe_function(ty: Ty) -> bool {
|
|
||||||
match ty.sty {
|
|
||||||
ty::TyFnDef(.., f) |
|
|
||||||
ty::TyFnPtr(f) => f.unsafety() == hir::Unsafety::Unsafe,
|
|
||||||
_ => false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
struct EffectCheckVisitor<'a, 'tcx: 'a> {
|
struct EffectCheckVisitor<'a, 'tcx: 'a> {
|
||||||
tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
tables: &'a ty::TypeckTables<'tcx>,
|
tables: &'a ty::TypeckTables<'tcx>,
|
||||||
|
@ -174,10 +166,11 @@ impl<'a, 'tcx> Visitor<'tcx> for EffectCheckVisitor<'a, 'tcx> {
|
||||||
match expr.node {
|
match expr.node {
|
||||||
hir::ExprMethodCall(..) => {
|
hir::ExprMethodCall(..) => {
|
||||||
let def_id = self.tables.type_dependent_defs[&expr.id].def_id();
|
let def_id = self.tables.type_dependent_defs[&expr.id].def_id();
|
||||||
let base_type = self.tcx.type_of(def_id);
|
let sig = self.tcx.fn_sig(def_id);
|
||||||
debug!("effect: method call case, base type is {:?}",
|
debug!("effect: method call case, signature is {:?}",
|
||||||
base_type);
|
sig);
|
||||||
if type_is_unsafe_function(base_type) {
|
|
||||||
|
if sig.0.unsafety == hir::Unsafety::Unsafe {
|
||||||
self.require_unsafe(expr.span,
|
self.require_unsafe(expr.span,
|
||||||
"invocation of unsafe method")
|
"invocation of unsafe method")
|
||||||
}
|
}
|
||||||
|
@ -186,8 +179,13 @@ impl<'a, 'tcx> Visitor<'tcx> for EffectCheckVisitor<'a, 'tcx> {
|
||||||
let base_type = self.tables.expr_ty_adjusted(base);
|
let base_type = self.tables.expr_ty_adjusted(base);
|
||||||
debug!("effect: call case, base type is {:?}",
|
debug!("effect: call case, base type is {:?}",
|
||||||
base_type);
|
base_type);
|
||||||
if type_is_unsafe_function(base_type) {
|
match base_type.sty {
|
||||||
self.require_unsafe(expr.span, "call to unsafe function")
|
ty::TyFnDef(..) | ty::TyFnPtr(_) => {
|
||||||
|
if base_type.fn_sig(self.tcx).unsafety() == hir::Unsafety::Unsafe {
|
||||||
|
self.require_unsafe(expr.span, "call to unsafe function")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
hir::ExprUnary(hir::UnDeref, ref base) => {
|
hir::ExprUnary(hir::UnDeref, ref base) => {
|
||||||
|
|
|
@ -816,7 +816,6 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> {
|
||||||
fn walk_pat(&mut self, cmt_discr: mc::cmt<'tcx>, pat: &hir::Pat, match_mode: MatchMode) {
|
fn walk_pat(&mut self, cmt_discr: mc::cmt<'tcx>, pat: &hir::Pat, match_mode: MatchMode) {
|
||||||
debug!("walk_pat cmt_discr={:?} pat={:?}", cmt_discr, pat);
|
debug!("walk_pat cmt_discr={:?} pat={:?}", cmt_discr, pat);
|
||||||
|
|
||||||
let tcx = self.tcx();
|
|
||||||
let ExprUseVisitor { ref mc, ref mut delegate, param_env } = *self;
|
let ExprUseVisitor { ref mc, ref mut delegate, param_env } = *self;
|
||||||
return_if_err!(mc.cat_pattern(cmt_discr.clone(), pat, |cmt_pat, pat| {
|
return_if_err!(mc.cat_pattern(cmt_discr.clone(), pat, |cmt_pat, pat| {
|
||||||
if let PatKind::Binding(bmode, def_id, ..) = pat.node {
|
if let PatKind::Binding(bmode, def_id, ..) = pat.node {
|
||||||
|
@ -864,13 +863,7 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> {
|
||||||
match def {
|
match def {
|
||||||
Def::Variant(variant_did) |
|
Def::Variant(variant_did) |
|
||||||
Def::VariantCtor(variant_did, ..) => {
|
Def::VariantCtor(variant_did, ..) => {
|
||||||
let enum_did = tcx.parent_def_id(variant_did).unwrap();
|
let downcast_cmt = mc.cat_downcast_if_needed(pat, cmt_pat, variant_did);
|
||||||
let downcast_cmt = if tcx.adt_def(enum_did).is_univariant() {
|
|
||||||
cmt_pat
|
|
||||||
} else {
|
|
||||||
let cmt_pat_ty = cmt_pat.ty;
|
|
||||||
mc.cat_downcast(pat, cmt_pat, cmt_pat_ty, variant_did)
|
|
||||||
};
|
|
||||||
|
|
||||||
debug!("variant downcast_cmt={:?} pat={:?}", downcast_cmt, pat);
|
debug!("variant downcast_cmt={:?} pat={:?}", downcast_cmt, pat);
|
||||||
delegate.matched_pat(pat, downcast_cmt, match_mode);
|
delegate.matched_pat(pat, downcast_cmt, match_mode);
|
||||||
|
|
|
@ -66,11 +66,8 @@ fn unpack_option_like<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
|
|
||||||
impl<'a, 'tcx> ExprVisitor<'a, 'tcx> {
|
impl<'a, 'tcx> ExprVisitor<'a, 'tcx> {
|
||||||
fn def_id_is_transmute(&self, def_id: DefId) -> bool {
|
fn def_id_is_transmute(&self, def_id: DefId) -> bool {
|
||||||
let intrinsic = match self.tcx.type_of(def_id).sty {
|
self.tcx.fn_sig(def_id).abi() == RustIntrinsic &&
|
||||||
ty::TyFnDef(.., bfty) => bfty.abi() == RustIntrinsic,
|
self.tcx.item_name(def_id) == "transmute"
|
||||||
_ => return false
|
|
||||||
};
|
|
||||||
intrinsic && self.tcx.item_name(def_id) == "transmute"
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_transmute(&self, span: Span, from: Ty<'tcx>, to: Ty<'tcx>) {
|
fn check_transmute(&self, span: Span, from: Ty<'tcx>, to: Ty<'tcx>) {
|
||||||
|
@ -153,22 +150,14 @@ impl<'a, 'tcx> Visitor<'tcx> for ExprVisitor<'a, 'tcx> {
|
||||||
} else {
|
} else {
|
||||||
Def::Err
|
Def::Err
|
||||||
};
|
};
|
||||||
match def {
|
if let Def::Fn(did) = def {
|
||||||
Def::Fn(did) if self.def_id_is_transmute(did) => {
|
if self.def_id_is_transmute(did) {
|
||||||
let typ = self.tables.node_id_to_type(expr.id);
|
let typ = self.tables.node_id_to_type(expr.id);
|
||||||
let typ = self.tcx.lift_to_global(&typ).unwrap();
|
let sig = typ.fn_sig(self.tcx);
|
||||||
match typ.sty {
|
let from = sig.inputs().skip_binder()[0];
|
||||||
ty::TyFnDef(.., sig) if sig.abi() == RustIntrinsic => {
|
let to = *sig.output().skip_binder();
|
||||||
let from = sig.inputs().skip_binder()[0];
|
self.check_transmute(expr.span, from, to);
|
||||||
let to = *sig.output().skip_binder();
|
|
||||||
self.check_transmute(expr.span, from, to);
|
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
span_bug!(expr.span, "transmute wasn't a bare fn?!");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
_ => {}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
intravisit::walk_expr(self, expr);
|
intravisit::walk_expr(self, expr);
|
||||||
|
|
|
@ -1032,22 +1032,29 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> {
|
||||||
ret
|
ret
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn cat_downcast<N:ast_node>(&self,
|
pub fn cat_downcast_if_needed<N:ast_node>(&self,
|
||||||
node: &N,
|
node: &N,
|
||||||
base_cmt: cmt<'tcx>,
|
base_cmt: cmt<'tcx>,
|
||||||
downcast_ty: Ty<'tcx>,
|
variant_did: DefId)
|
||||||
variant_did: DefId)
|
-> cmt<'tcx> {
|
||||||
-> cmt<'tcx> {
|
// univariant enums do not need downcasts
|
||||||
let ret = Rc::new(cmt_ {
|
let base_did = self.tcx.parent_def_id(variant_did).unwrap();
|
||||||
id: node.id(),
|
if !self.tcx.adt_def(base_did).is_univariant() {
|
||||||
span: node.span(),
|
let base_ty = base_cmt.ty;
|
||||||
mutbl: base_cmt.mutbl.inherit(),
|
let ret = Rc::new(cmt_ {
|
||||||
cat: Categorization::Downcast(base_cmt, variant_did),
|
id: node.id(),
|
||||||
ty: downcast_ty,
|
span: node.span(),
|
||||||
note: NoteNone
|
mutbl: base_cmt.mutbl.inherit(),
|
||||||
});
|
cat: Categorization::Downcast(base_cmt, variant_did),
|
||||||
debug!("cat_downcast ret={:?}", ret);
|
ty: base_ty,
|
||||||
ret
|
note: NoteNone
|
||||||
|
});
|
||||||
|
debug!("cat_downcast ret={:?}", ret);
|
||||||
|
ret
|
||||||
|
} else {
|
||||||
|
debug!("cat_downcast univariant={:?}", base_cmt);
|
||||||
|
base_cmt
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn cat_pattern<F>(&self, cmt: cmt<'tcx>, pat: &hir::Pat, mut op: F) -> McResult<()>
|
pub fn cat_pattern<F>(&self, cmt: cmt<'tcx>, pat: &hir::Pat, mut op: F) -> McResult<()>
|
||||||
|
@ -1109,45 +1116,23 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> {
|
||||||
|
|
||||||
op(cmt.clone(), pat);
|
op(cmt.clone(), pat);
|
||||||
|
|
||||||
// Note: This goes up here (rather than within the PatKind::TupleStruct arm
|
|
||||||
// alone) because PatKind::Struct can also refer to variants.
|
|
||||||
let cmt = match pat.node {
|
|
||||||
PatKind::Path(hir::QPath::Resolved(_, ref path)) |
|
|
||||||
PatKind::TupleStruct(hir::QPath::Resolved(_, ref path), ..) |
|
|
||||||
PatKind::Struct(hir::QPath::Resolved(_, ref path), ..) => {
|
|
||||||
match path.def {
|
|
||||||
Def::Err => {
|
|
||||||
debug!("access to unresolvable pattern {:?}", pat);
|
|
||||||
return Err(())
|
|
||||||
}
|
|
||||||
Def::Variant(variant_did) |
|
|
||||||
Def::VariantCtor(variant_did, ..) => {
|
|
||||||
// univariant enums do not need downcasts
|
|
||||||
let enum_did = self.tcx.parent_def_id(variant_did).unwrap();
|
|
||||||
if !self.tcx.adt_def(enum_did).is_univariant() {
|
|
||||||
self.cat_downcast(pat, cmt.clone(), cmt.ty, variant_did)
|
|
||||||
} else {
|
|
||||||
cmt
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => cmt
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => cmt
|
|
||||||
};
|
|
||||||
|
|
||||||
match pat.node {
|
match pat.node {
|
||||||
PatKind::TupleStruct(ref qpath, ref subpats, ddpos) => {
|
PatKind::TupleStruct(ref qpath, ref subpats, ddpos) => {
|
||||||
let def = self.tables.qpath_def(qpath, pat.id);
|
let def = self.tables.qpath_def(qpath, pat.id);
|
||||||
let expected_len = match def {
|
let (cmt, expected_len) = match def {
|
||||||
|
Def::Err => {
|
||||||
|
debug!("access to unresolvable pattern {:?}", pat);
|
||||||
|
return Err(())
|
||||||
|
}
|
||||||
Def::VariantCtor(def_id, CtorKind::Fn) => {
|
Def::VariantCtor(def_id, CtorKind::Fn) => {
|
||||||
let enum_def = self.tcx.parent_def_id(def_id).unwrap();
|
let enum_def = self.tcx.parent_def_id(def_id).unwrap();
|
||||||
self.tcx.adt_def(enum_def).variant_with_id(def_id).fields.len()
|
(self.cat_downcast_if_needed(pat, cmt, def_id),
|
||||||
|
self.tcx.adt_def(enum_def).variant_with_id(def_id).fields.len())
|
||||||
}
|
}
|
||||||
Def::StructCtor(_, CtorKind::Fn) => {
|
Def::StructCtor(_, CtorKind::Fn) => {
|
||||||
match self.pat_ty(&pat)?.sty {
|
match self.pat_ty(&pat)?.sty {
|
||||||
ty::TyAdt(adt_def, _) => {
|
ty::TyAdt(adt_def, _) => {
|
||||||
adt_def.struct_variant().fields.len()
|
(cmt, adt_def.struct_variant().fields.len())
|
||||||
}
|
}
|
||||||
ref ty => {
|
ref ty => {
|
||||||
span_bug!(pat.span, "tuple struct pattern unexpected type {:?}", ty);
|
span_bug!(pat.span, "tuple struct pattern unexpected type {:?}", ty);
|
||||||
|
@ -1168,8 +1153,21 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
PatKind::Struct(_, ref field_pats, _) => {
|
PatKind::Struct(ref qpath, ref field_pats, _) => {
|
||||||
// {f1: p1, ..., fN: pN}
|
// {f1: p1, ..., fN: pN}
|
||||||
|
let def = self.tables.qpath_def(qpath, pat.id);
|
||||||
|
let cmt = match def {
|
||||||
|
Def::Err => {
|
||||||
|
debug!("access to unresolvable pattern {:?}", pat);
|
||||||
|
return Err(())
|
||||||
|
},
|
||||||
|
Def::Variant(variant_did) |
|
||||||
|
Def::VariantCtor(variant_did, ..) => {
|
||||||
|
self.cat_downcast_if_needed(pat, cmt, variant_did)
|
||||||
|
},
|
||||||
|
_ => cmt
|
||||||
|
};
|
||||||
|
|
||||||
for fp in field_pats {
|
for fp in field_pats {
|
||||||
let field_ty = self.pat_ty(&fp.node.pat)?; // see (*2)
|
let field_ty = self.pat_ty(&fp.node.pat)?; // see (*2)
|
||||||
let cmt_field = self.cat_field(pat, cmt.clone(), fp.node.name, field_ty);
|
let cmt_field = self.cat_field(pat, cmt.clone(), fp.node.name, field_ty);
|
||||||
|
|
|
@ -28,6 +28,7 @@ use syntax::attr;
|
||||||
use syntax::ptr::P;
|
use syntax::ptr::P;
|
||||||
use syntax_pos::Span;
|
use syntax_pos::Span;
|
||||||
use errors::DiagnosticBuilder;
|
use errors::DiagnosticBuilder;
|
||||||
|
use util::common::ErrorReported;
|
||||||
use util::nodemap::{NodeMap, NodeSet, FxHashSet, FxHashMap, DefIdMap};
|
use util::nodemap::{NodeMap, NodeSet, FxHashSet, FxHashMap, DefIdMap};
|
||||||
use rustc_back::slice;
|
use rustc_back::slice;
|
||||||
|
|
||||||
|
@ -255,7 +256,7 @@ const ROOT_SCOPE: ScopeRef<'static> = &Scope::Root;
|
||||||
|
|
||||||
pub fn krate(sess: &Session,
|
pub fn krate(sess: &Session,
|
||||||
hir_map: &Map)
|
hir_map: &Map)
|
||||||
-> Result<NamedRegionMap, usize> {
|
-> Result<NamedRegionMap, ErrorReported> {
|
||||||
let krate = hir_map.krate();
|
let krate = hir_map.krate();
|
||||||
let mut map = NamedRegionMap {
|
let mut map = NamedRegionMap {
|
||||||
defs: NodeMap(),
|
defs: NodeMap(),
|
||||||
|
|
|
@ -21,7 +21,7 @@ use session::search_paths::PathKind;
|
||||||
use session::config::DebugInfoLevel;
|
use session::config::DebugInfoLevel;
|
||||||
use ty::tls;
|
use ty::tls;
|
||||||
use util::nodemap::{FxHashMap, FxHashSet};
|
use util::nodemap::{FxHashMap, FxHashSet};
|
||||||
use util::common::duration_to_secs_str;
|
use util::common::{duration_to_secs_str, ErrorReported};
|
||||||
|
|
||||||
use syntax::ast::NodeId;
|
use syntax::ast::NodeId;
|
||||||
use errors::{self, DiagnosticBuilder};
|
use errors::{self, DiagnosticBuilder};
|
||||||
|
@ -79,10 +79,10 @@ pub struct Session {
|
||||||
pub working_dir: (String, bool),
|
pub working_dir: (String, bool),
|
||||||
pub lint_store: RefCell<lint::LintStore>,
|
pub lint_store: RefCell<lint::LintStore>,
|
||||||
pub lints: RefCell<lint::LintTable>,
|
pub lints: RefCell<lint::LintTable>,
|
||||||
/// Set of (LintId, span, message) tuples tracking lint (sub)diagnostics
|
/// Set of (LintId, Option<Span>, message) tuples tracking lint
|
||||||
/// that have been set once, but should not be set again, in order to avoid
|
/// (sub)diagnostics that have been set once, but should not be set again,
|
||||||
/// redundantly verbose output (Issue #24690).
|
/// in order to avoid redundantly verbose output (Issue #24690).
|
||||||
pub one_time_diagnostics: RefCell<FxHashSet<(lint::LintId, Span, String)>>,
|
pub one_time_diagnostics: RefCell<FxHashSet<(lint::LintId, Option<Span>, String)>>,
|
||||||
pub plugin_llvm_passes: RefCell<Vec<String>>,
|
pub plugin_llvm_passes: RefCell<Vec<String>>,
|
||||||
pub plugin_attributes: RefCell<Vec<(String, AttributeType)>>,
|
pub plugin_attributes: RefCell<Vec<(String, AttributeType)>>,
|
||||||
pub crate_types: RefCell<Vec<config::CrateType>>,
|
pub crate_types: RefCell<Vec<config::CrateType>>,
|
||||||
|
@ -157,6 +157,13 @@ pub struct PerfStats {
|
||||||
pub decode_def_path_tables_time: Cell<Duration>,
|
pub decode_def_path_tables_time: Cell<Duration>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Enum to support dispatch of one-time diagnostics (in Session.diag_once)
|
||||||
|
enum DiagnosticBuilderMethod {
|
||||||
|
Note,
|
||||||
|
SpanNote,
|
||||||
|
// add more variants as needed to support one-time diagnostics
|
||||||
|
}
|
||||||
|
|
||||||
impl Session {
|
impl Session {
|
||||||
pub fn local_crate_disambiguator(&self) -> Symbol {
|
pub fn local_crate_disambiguator(&self) -> Symbol {
|
||||||
*self.crate_disambiguator.borrow()
|
*self.crate_disambiguator.borrow()
|
||||||
|
@ -248,7 +255,10 @@ impl Session {
|
||||||
pub fn abort_if_errors(&self) {
|
pub fn abort_if_errors(&self) {
|
||||||
self.diagnostic().abort_if_errors();
|
self.diagnostic().abort_if_errors();
|
||||||
}
|
}
|
||||||
pub fn track_errors<F, T>(&self, f: F) -> Result<T, usize>
|
pub fn compile_status(&self) -> Result<(), CompileIncomplete> {
|
||||||
|
compile_result_from_err_count(self.err_count())
|
||||||
|
}
|
||||||
|
pub fn track_errors<F, T>(&self, f: F) -> Result<T, ErrorReported>
|
||||||
where F: FnOnce() -> T
|
where F: FnOnce() -> T
|
||||||
{
|
{
|
||||||
let old_count = self.err_count();
|
let old_count = self.err_count();
|
||||||
|
@ -257,7 +267,7 @@ impl Session {
|
||||||
if errors == 0 {
|
if errors == 0 {
|
||||||
Ok(result)
|
Ok(result)
|
||||||
} else {
|
} else {
|
||||||
Err(errors)
|
Err(ErrorReported)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
pub fn span_warn<S: Into<MultiSpan>>(&self, sp: S, msg: &str) {
|
pub fn span_warn<S: Into<MultiSpan>>(&self, sp: S, msg: &str) {
|
||||||
|
@ -329,34 +339,53 @@ impl Session {
|
||||||
&self.parse_sess.span_diagnostic
|
&self.parse_sess.span_diagnostic
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Analogous to calling `.span_note` on the given DiagnosticBuilder, but
|
/// Analogous to calling methods on the given `DiagnosticBuilder`, but
|
||||||
/// deduplicates on lint ID, span, and message for this `Session` if we're
|
/// deduplicates on lint ID, span (if any), and message for this `Session`
|
||||||
/// not outputting in JSON mode.
|
/// if we're not outputting in JSON mode.
|
||||||
//
|
fn diag_once<'a, 'b>(&'a self,
|
||||||
// FIXME: if the need arises for one-time diagnostics other than
|
diag_builder: &'b mut DiagnosticBuilder<'a>,
|
||||||
// `span_note`, we almost certainly want to generalize this
|
method: DiagnosticBuilderMethod,
|
||||||
// "check/insert-into the one-time diagnostics map, then set message if
|
lint: &'static lint::Lint, message: &str, span: Option<Span>) {
|
||||||
// it's not already there" code to accomodate all of them
|
let mut do_method = || {
|
||||||
pub fn diag_span_note_once<'a, 'b>(&'a self,
|
match method {
|
||||||
diag_builder: &'b mut DiagnosticBuilder<'a>,
|
DiagnosticBuilderMethod::Note => {
|
||||||
lint: &'static lint::Lint, span: Span, message: &str) {
|
diag_builder.note(message);
|
||||||
|
},
|
||||||
|
DiagnosticBuilderMethod::SpanNote => {
|
||||||
|
diag_builder.span_note(span.expect("span_note expects a span"), message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
match self.opts.error_format {
|
match self.opts.error_format {
|
||||||
// when outputting JSON for tool consumption, the tool might want
|
// when outputting JSON for tool consumption, the tool might want
|
||||||
// the duplicates
|
// the duplicates
|
||||||
config::ErrorOutputType::Json => {
|
config::ErrorOutputType::Json => {
|
||||||
diag_builder.span_note(span, &message);
|
do_method()
|
||||||
},
|
},
|
||||||
_ => {
|
_ => {
|
||||||
let lint_id = lint::LintId::of(lint);
|
let lint_id = lint::LintId::of(lint);
|
||||||
let id_span_message = (lint_id, span, message.to_owned());
|
let id_span_message = (lint_id, span, message.to_owned());
|
||||||
let fresh = self.one_time_diagnostics.borrow_mut().insert(id_span_message);
|
let fresh = self.one_time_diagnostics.borrow_mut().insert(id_span_message);
|
||||||
if fresh {
|
if fresh {
|
||||||
diag_builder.span_note(span, &message);
|
do_method()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn diag_span_note_once<'a, 'b>(&'a self,
|
||||||
|
diag_builder: &'b mut DiagnosticBuilder<'a>,
|
||||||
|
lint: &'static lint::Lint, span: Span, message: &str) {
|
||||||
|
self.diag_once(diag_builder, DiagnosticBuilderMethod::SpanNote, lint, message, Some(span));
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn diag_note_once<'a, 'b>(&'a self,
|
||||||
|
diag_builder: &'b mut DiagnosticBuilder<'a>,
|
||||||
|
lint: &'static lint::Lint, message: &str) {
|
||||||
|
self.diag_once(diag_builder, DiagnosticBuilderMethod::Note, lint, message, None);
|
||||||
|
}
|
||||||
|
|
||||||
pub fn codemap<'a>(&'a self) -> &'a codemap::CodeMap {
|
pub fn codemap<'a>(&'a self) -> &'a codemap::CodeMap {
|
||||||
self.parse_sess.codemap()
|
self.parse_sess.codemap()
|
||||||
}
|
}
|
||||||
|
@ -776,15 +805,23 @@ pub fn early_warn(output: config::ErrorOutputType, msg: &str) {
|
||||||
handler.emit(&MultiSpan::new(), msg, errors::Level::Warning);
|
handler.emit(&MultiSpan::new(), msg, errors::Level::Warning);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Err(0) means compilation was stopped, but no errors were found.
|
#[derive(Copy, Clone, Debug)]
|
||||||
// This would be better as a dedicated enum, but using try! is so convenient.
|
pub enum CompileIncomplete {
|
||||||
pub type CompileResult = Result<(), usize>;
|
Stopped,
|
||||||
|
Errored(ErrorReported)
|
||||||
|
}
|
||||||
|
impl From<ErrorReported> for CompileIncomplete {
|
||||||
|
fn from(err: ErrorReported) -> CompileIncomplete {
|
||||||
|
CompileIncomplete::Errored(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub type CompileResult = Result<(), CompileIncomplete>;
|
||||||
|
|
||||||
pub fn compile_result_from_err_count(err_count: usize) -> CompileResult {
|
pub fn compile_result_from_err_count(err_count: usize) -> CompileResult {
|
||||||
if err_count == 0 {
|
if err_count == 0 {
|
||||||
Ok(())
|
Ok(())
|
||||||
} else {
|
} else {
|
||||||
Err(err_count)
|
Err(CompileIncomplete::Errored(ErrorReported))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -45,7 +45,8 @@ use syntax_pos::{DUMMY_SP, Span};
|
||||||
|
|
||||||
impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
|
impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
|
||||||
pub fn report_fulfillment_errors(&self,
|
pub fn report_fulfillment_errors(&self,
|
||||||
errors: &Vec<FulfillmentError<'tcx>>) {
|
errors: &Vec<FulfillmentError<'tcx>>,
|
||||||
|
body_id: Option<hir::BodyId>) {
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
struct ErrorDescriptor<'tcx> {
|
struct ErrorDescriptor<'tcx> {
|
||||||
predicate: ty::Predicate<'tcx>,
|
predicate: ty::Predicate<'tcx>,
|
||||||
|
@ -105,7 +106,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
|
||||||
|
|
||||||
for (error, suppressed) in errors.iter().zip(is_suppressed) {
|
for (error, suppressed) in errors.iter().zip(is_suppressed) {
|
||||||
if !suppressed {
|
if !suppressed {
|
||||||
self.report_fulfillment_error(error);
|
self.report_fulfillment_error(error, body_id);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -148,7 +149,8 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
|
||||||
false
|
false
|
||||||
}
|
}
|
||||||
|
|
||||||
fn report_fulfillment_error(&self, error: &FulfillmentError<'tcx>) {
|
fn report_fulfillment_error(&self, error: &FulfillmentError<'tcx>,
|
||||||
|
body_id: Option<hir::BodyId>) {
|
||||||
debug!("report_fulfillment_errors({:?})", error);
|
debug!("report_fulfillment_errors({:?})", error);
|
||||||
match error.code {
|
match error.code {
|
||||||
FulfillmentErrorCode::CodeSelectionError(ref e) => {
|
FulfillmentErrorCode::CodeSelectionError(ref e) => {
|
||||||
|
@ -158,7 +160,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
|
||||||
self.report_projection_error(&error.obligation, e);
|
self.report_projection_error(&error.obligation, e);
|
||||||
}
|
}
|
||||||
FulfillmentErrorCode::CodeAmbiguity => {
|
FulfillmentErrorCode::CodeAmbiguity => {
|
||||||
self.maybe_report_ambiguity(&error.obligation);
|
self.maybe_report_ambiguity(&error.obligation, body_id);
|
||||||
}
|
}
|
||||||
FulfillmentErrorCode::CodeSubtypeError(ref expected_found, ref err) => {
|
FulfillmentErrorCode::CodeSubtypeError(ref expected_found, ref err) => {
|
||||||
self.report_mismatched_types(&error.obligation.cause,
|
self.report_mismatched_types(&error.obligation.cause,
|
||||||
|
@ -869,14 +871,14 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
|
impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
|
||||||
fn maybe_report_ambiguity(&self, obligation: &PredicateObligation<'tcx>) {
|
fn maybe_report_ambiguity(&self, obligation: &PredicateObligation<'tcx>,
|
||||||
|
body_id: Option<hir::BodyId>) {
|
||||||
// Unable to successfully determine, probably means
|
// Unable to successfully determine, probably means
|
||||||
// insufficient type information, but could mean
|
// insufficient type information, but could mean
|
||||||
// ambiguous impls. The latter *ought* to be a
|
// ambiguous impls. The latter *ought* to be a
|
||||||
// coherence violation, so we don't report it here.
|
// coherence violation, so we don't report it here.
|
||||||
|
|
||||||
let predicate = self.resolve_type_vars_if_possible(&obligation.predicate);
|
let predicate = self.resolve_type_vars_if_possible(&obligation.predicate);
|
||||||
let body_id = hir::BodyId { node_id: obligation.cause.body_id };
|
|
||||||
let span = obligation.cause.span;
|
let span = obligation.cause.span;
|
||||||
|
|
||||||
debug!("maybe_report_ambiguity(predicate={:?}, obligation={:?})",
|
debug!("maybe_report_ambiguity(predicate={:?}, obligation={:?})",
|
||||||
|
@ -953,7 +955,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
|
||||||
let &SubtypePredicate { a_is_expected: _, a, b } = data.skip_binder();
|
let &SubtypePredicate { a_is_expected: _, a, b } = data.skip_binder();
|
||||||
// both must be type variables, or the other would've been instantiated
|
// both must be type variables, or the other would've been instantiated
|
||||||
assert!(a.is_ty_var() && b.is_ty_var());
|
assert!(a.is_ty_var() && b.is_ty_var());
|
||||||
self.need_type_info(hir::BodyId { node_id: obligation.cause.body_id },
|
self.need_type_info(body_id,
|
||||||
obligation.cause.span,
|
obligation.cause.span,
|
||||||
a);
|
a);
|
||||||
}
|
}
|
||||||
|
@ -1058,7 +1060,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
|
||||||
err.note("slice and array elements must have `Sized` type");
|
err.note("slice and array elements must have `Sized` type");
|
||||||
}
|
}
|
||||||
ObligationCauseCode::TupleElem => {
|
ObligationCauseCode::TupleElem => {
|
||||||
err.note("tuple elements must have `Sized` type");
|
err.note("only the last element of a tuple may have a dynamically sized type");
|
||||||
}
|
}
|
||||||
ObligationCauseCode::ProjectionWf(data) => {
|
ObligationCauseCode::ProjectionWf(data) => {
|
||||||
err.note(&format!("required so that the projection `{}` is well-formed",
|
err.note(&format!("required so that the projection `{}` is well-formed",
|
||||||
|
@ -1088,13 +1090,16 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
|
||||||
ObligationCauseCode::VariableType(_) => {
|
ObligationCauseCode::VariableType(_) => {
|
||||||
err.note("all local variables must have a statically known size");
|
err.note("all local variables must have a statically known size");
|
||||||
}
|
}
|
||||||
ObligationCauseCode::ReturnType => {
|
ObligationCauseCode::SizedReturnType => {
|
||||||
err.note("the return type of a function must have a \
|
err.note("the return type of a function must have a \
|
||||||
statically known size");
|
statically known size");
|
||||||
}
|
}
|
||||||
ObligationCauseCode::AssignmentLhsSized => {
|
ObligationCauseCode::AssignmentLhsSized => {
|
||||||
err.note("the left-hand-side of an assignment must have a statically known size");
|
err.note("the left-hand-side of an assignment must have a statically known size");
|
||||||
}
|
}
|
||||||
|
ObligationCauseCode::TupleInitializerSized => {
|
||||||
|
err.note("tuples must have a statically known size to be initialized");
|
||||||
|
}
|
||||||
ObligationCauseCode::StructInitializerSized => {
|
ObligationCauseCode::StructInitializerSized => {
|
||||||
err.note("structs must have a statically known size to be initialized");
|
err.note("structs must have a statically known size to be initialized");
|
||||||
}
|
}
|
||||||
|
@ -1133,6 +1138,8 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
|
||||||
but not on the corresponding trait method",
|
but not on the corresponding trait method",
|
||||||
predicate));
|
predicate));
|
||||||
}
|
}
|
||||||
|
ObligationCauseCode::ReturnType(_) |
|
||||||
|
ObligationCauseCode::BlockTailExpression(_) => (),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -118,27 +118,34 @@ pub enum ObligationCauseCode<'tcx> {
|
||||||
/// Obligation incurred due to an object cast.
|
/// Obligation incurred due to an object cast.
|
||||||
ObjectCastObligation(/* Object type */ Ty<'tcx>),
|
ObjectCastObligation(/* Object type */ Ty<'tcx>),
|
||||||
|
|
||||||
/// Various cases where expressions must be sized/copy/etc:
|
// Various cases where expressions must be sized/copy/etc:
|
||||||
AssignmentLhsSized, // L = X implies that L is Sized
|
/// L = X implies that L is Sized
|
||||||
StructInitializerSized, // S { ... } must be Sized
|
AssignmentLhsSized,
|
||||||
VariableType(ast::NodeId), // Type of each variable must be Sized
|
/// (x1, .., xn) must be Sized
|
||||||
ReturnType, // Return type must be Sized
|
TupleInitializerSized,
|
||||||
RepeatVec, // [T,..n] --> T must be Copy
|
/// S { ... } must be Sized
|
||||||
|
StructInitializerSized,
|
||||||
|
/// Type of each variable must be Sized
|
||||||
|
VariableType(ast::NodeId),
|
||||||
|
/// Return type must be Sized
|
||||||
|
SizedReturnType,
|
||||||
|
/// [T,..n] --> T must be Copy
|
||||||
|
RepeatVec,
|
||||||
|
|
||||||
// Types of fields (other than the last) in a struct must be sized.
|
/// Types of fields (other than the last) in a struct must be sized.
|
||||||
FieldSized,
|
FieldSized,
|
||||||
|
|
||||||
// Constant expressions must be sized.
|
/// Constant expressions must be sized.
|
||||||
ConstSized,
|
ConstSized,
|
||||||
|
|
||||||
// static items must have `Sync` type
|
/// static items must have `Sync` type
|
||||||
SharedStatic,
|
SharedStatic,
|
||||||
|
|
||||||
BuiltinDerivedObligation(DerivedObligationCause<'tcx>),
|
BuiltinDerivedObligation(DerivedObligationCause<'tcx>),
|
||||||
|
|
||||||
ImplDerivedObligation(DerivedObligationCause<'tcx>),
|
ImplDerivedObligation(DerivedObligationCause<'tcx>),
|
||||||
|
|
||||||
// error derived when matching traits/impls; see ObligationCause for more details
|
/// error derived when matching traits/impls; see ObligationCause for more details
|
||||||
CompareImplMethodObligation {
|
CompareImplMethodObligation {
|
||||||
item_name: ast::Name,
|
item_name: ast::Name,
|
||||||
impl_item_def_id: DefId,
|
impl_item_def_id: DefId,
|
||||||
|
@ -146,37 +153,43 @@ pub enum ObligationCauseCode<'tcx> {
|
||||||
lint_id: Option<ast::NodeId>,
|
lint_id: Option<ast::NodeId>,
|
||||||
},
|
},
|
||||||
|
|
||||||
// Checking that this expression can be assigned where it needs to be
|
/// Checking that this expression can be assigned where it needs to be
|
||||||
// FIXME(eddyb) #11161 is the original Expr required?
|
// FIXME(eddyb) #11161 is the original Expr required?
|
||||||
ExprAssignable,
|
ExprAssignable,
|
||||||
|
|
||||||
// Computing common supertype in the arms of a match expression
|
/// Computing common supertype in the arms of a match expression
|
||||||
MatchExpressionArm { arm_span: Span,
|
MatchExpressionArm { arm_span: Span,
|
||||||
source: hir::MatchSource },
|
source: hir::MatchSource },
|
||||||
|
|
||||||
// Computing common supertype in an if expression
|
/// Computing common supertype in an if expression
|
||||||
IfExpression,
|
IfExpression,
|
||||||
|
|
||||||
// Computing common supertype of an if expression with no else counter-part
|
/// Computing common supertype of an if expression with no else counter-part
|
||||||
IfExpressionWithNoElse,
|
IfExpressionWithNoElse,
|
||||||
|
|
||||||
// `where a == b`
|
/// `where a == b`
|
||||||
EquatePredicate,
|
EquatePredicate,
|
||||||
|
|
||||||
// `main` has wrong type
|
/// `main` has wrong type
|
||||||
MainFunctionType,
|
MainFunctionType,
|
||||||
|
|
||||||
// `start` has wrong type
|
/// `start` has wrong type
|
||||||
StartFunctionType,
|
StartFunctionType,
|
||||||
|
|
||||||
// intrinsic has wrong type
|
/// intrinsic has wrong type
|
||||||
IntrinsicType,
|
IntrinsicType,
|
||||||
|
|
||||||
// method receiver
|
/// method receiver
|
||||||
MethodReceiver,
|
MethodReceiver,
|
||||||
|
|
||||||
// `return` with no expression
|
/// `return` with no expression
|
||||||
ReturnNoExpression,
|
ReturnNoExpression,
|
||||||
|
|
||||||
|
/// `return` with an expression
|
||||||
|
ReturnType(ast::NodeId),
|
||||||
|
|
||||||
|
/// Block implicit return
|
||||||
|
BlockTailExpression(ast::NodeId),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||||
|
@ -498,7 +511,7 @@ pub fn normalize_param_env_or_error<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
) {
|
) {
|
||||||
Ok(predicates) => predicates,
|
Ok(predicates) => predicates,
|
||||||
Err(errors) => {
|
Err(errors) => {
|
||||||
infcx.report_fulfillment_errors(&errors);
|
infcx.report_fulfillment_errors(&errors, None);
|
||||||
// An unnormalized env is better than nothing.
|
// An unnormalized env is better than nothing.
|
||||||
return elaborated_env;
|
return elaborated_env;
|
||||||
}
|
}
|
||||||
|
@ -597,7 +610,7 @@ pub fn normalize_and_test_predicates<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
debug!("normalize_and_test_predicates(predicates={:?})",
|
debug!("normalize_and_test_predicates(predicates={:?})",
|
||||||
predicates);
|
predicates);
|
||||||
|
|
||||||
tcx.infer_ctxt().enter(|infcx| {
|
let result = tcx.infer_ctxt().enter(|infcx| {
|
||||||
let param_env = ty::ParamEnv::empty(Reveal::All);
|
let param_env = ty::ParamEnv::empty(Reveal::All);
|
||||||
let mut selcx = SelectionContext::new(&infcx);
|
let mut selcx = SelectionContext::new(&infcx);
|
||||||
let mut fulfill_cx = FulfillmentContext::new();
|
let mut fulfill_cx = FulfillmentContext::new();
|
||||||
|
@ -613,7 +626,10 @@ pub fn normalize_and_test_predicates<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
}
|
}
|
||||||
|
|
||||||
fulfill_cx.select_all_or_error(&infcx).is_ok()
|
fulfill_cx.select_all_or_error(&infcx).is_ok()
|
||||||
})
|
});
|
||||||
|
debug!("normalize_and_test_predicates(predicates={:?}) = {:?}",
|
||||||
|
predicates, result);
|
||||||
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Given a trait `trait_ref`, iterates the vtable entries
|
/// Given a trait `trait_ref`, iterates the vtable entries
|
||||||
|
|
|
@ -260,7 +260,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
||||||
|
|
||||||
// The `Self` type is erased, so it should not appear in list of
|
// The `Self` type is erased, so it should not appear in list of
|
||||||
// arguments or return type apart from the receiver.
|
// arguments or return type apart from the receiver.
|
||||||
let ref sig = self.type_of(method.def_id).fn_sig();
|
let ref sig = self.fn_sig(method.def_id);
|
||||||
for input_ty in &sig.skip_binder().inputs()[1..] {
|
for input_ty in &sig.skip_binder().inputs()[1..] {
|
||||||
if self.contains_illegal_self_type_reference(trait_def_id, input_ty) {
|
if self.contains_illegal_self_type_reference(trait_def_id, input_ty) {
|
||||||
return Some(MethodViolationCode::ReferencesSelf);
|
return Some(MethodViolationCode::ReferencesSelf);
|
||||||
|
|
|
@ -1137,9 +1137,19 @@ fn confirm_fn_pointer_candidate<'cx, 'gcx, 'tcx>(
|
||||||
-> Progress<'tcx>
|
-> Progress<'tcx>
|
||||||
{
|
{
|
||||||
let fn_type = selcx.infcx().shallow_resolve(fn_pointer_vtable.fn_ty);
|
let fn_type = selcx.infcx().shallow_resolve(fn_pointer_vtable.fn_ty);
|
||||||
let sig = fn_type.fn_sig();
|
let sig = fn_type.fn_sig(selcx.tcx());
|
||||||
|
let Normalized {
|
||||||
|
value: sig,
|
||||||
|
obligations
|
||||||
|
} = normalize_with_depth(selcx,
|
||||||
|
obligation.param_env,
|
||||||
|
obligation.cause.clone(),
|
||||||
|
obligation.recursion_depth+1,
|
||||||
|
&sig);
|
||||||
|
|
||||||
confirm_callable_candidate(selcx, obligation, sig, util::TupleArgumentsFlag::Yes)
|
confirm_callable_candidate(selcx, obligation, sig, util::TupleArgumentsFlag::Yes)
|
||||||
.with_addl_obligations(fn_pointer_vtable.nested)
|
.with_addl_obligations(fn_pointer_vtable.nested)
|
||||||
|
.with_addl_obligations(obligations)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn confirm_closure_candidate<'cx, 'gcx, 'tcx>(
|
fn confirm_closure_candidate<'cx, 'gcx, 'tcx>(
|
||||||
|
@ -1149,7 +1159,7 @@ fn confirm_closure_candidate<'cx, 'gcx, 'tcx>(
|
||||||
-> Progress<'tcx>
|
-> Progress<'tcx>
|
||||||
{
|
{
|
||||||
let closure_typer = selcx.closure_typer();
|
let closure_typer = selcx.closure_typer();
|
||||||
let closure_type = closure_typer.closure_type(vtable.closure_def_id)
|
let closure_type = closure_typer.fn_sig(vtable.closure_def_id)
|
||||||
.subst(selcx.tcx(), vtable.substs.substs);
|
.subst(selcx.tcx(), vtable.substs.substs);
|
||||||
let Normalized {
|
let Normalized {
|
||||||
value: closure_type,
|
value: closure_type,
|
||||||
|
|
|
@ -1404,19 +1404,15 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
// provide an impl, but only for suitable `fn` pointers
|
// provide an impl, but only for suitable `fn` pointers
|
||||||
ty::TyFnDef(.., ty::Binder(ty::FnSig {
|
ty::TyFnDef(..) | ty::TyFnPtr(_) => {
|
||||||
unsafety: hir::Unsafety::Normal,
|
if let ty::Binder(ty::FnSig {
|
||||||
abi: Abi::Rust,
|
unsafety: hir::Unsafety::Normal,
|
||||||
variadic: false,
|
abi: Abi::Rust,
|
||||||
..
|
variadic: false,
|
||||||
})) |
|
..
|
||||||
ty::TyFnPtr(ty::Binder(ty::FnSig {
|
}) = self_ty.fn_sig(self.tcx()) {
|
||||||
unsafety: hir::Unsafety::Normal,
|
candidates.vec.push(FnPointerCandidate);
|
||||||
abi: Abi::Rust,
|
}
|
||||||
variadic: false,
|
|
||||||
..
|
|
||||||
})) => {
|
|
||||||
candidates.vec.push(FnPointerCandidate);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
_ => { }
|
_ => { }
|
||||||
|
@ -1655,6 +1651,11 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
|
||||||
def_id_a == def_id_b
|
def_id_a == def_id_b
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// (.., T) -> (.., U).
|
||||||
|
(&ty::TyTuple(tys_a, _), &ty::TyTuple(tys_b, _)) => {
|
||||||
|
tys_a.len() == tys_b.len()
|
||||||
|
}
|
||||||
|
|
||||||
_ => false
|
_ => false
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -2348,7 +2349,7 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
|
||||||
|
|
||||||
// ok to skip binder; it is reintroduced below
|
// ok to skip binder; it is reintroduced below
|
||||||
let self_ty = self.infcx.shallow_resolve(*obligation.self_ty().skip_binder());
|
let self_ty = self.infcx.shallow_resolve(*obligation.self_ty().skip_binder());
|
||||||
let sig = self_ty.fn_sig();
|
let sig = self_ty.fn_sig(self.tcx());
|
||||||
let trait_ref =
|
let trait_ref =
|
||||||
self.tcx().closure_trait_ref_and_return_type(obligation.predicate.def_id(),
|
self.tcx().closure_trait_ref_and_return_type(obligation.predicate.def_id(),
|
||||||
self_ty,
|
self_ty,
|
||||||
|
@ -2356,11 +2357,18 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
|
||||||
util::TupleArgumentsFlag::Yes)
|
util::TupleArgumentsFlag::Yes)
|
||||||
.map_bound(|(trait_ref, _)| trait_ref);
|
.map_bound(|(trait_ref, _)| trait_ref);
|
||||||
|
|
||||||
|
let Normalized { value: trait_ref, obligations } =
|
||||||
|
project::normalize_with_depth(self,
|
||||||
|
obligation.param_env,
|
||||||
|
obligation.cause.clone(),
|
||||||
|
obligation.recursion_depth + 1,
|
||||||
|
&trait_ref);
|
||||||
|
|
||||||
self.confirm_poly_trait_refs(obligation.cause.clone(),
|
self.confirm_poly_trait_refs(obligation.cause.clone(),
|
||||||
obligation.param_env,
|
obligation.param_env,
|
||||||
obligation.predicate.to_poly_trait_ref(),
|
obligation.predicate.to_poly_trait_ref(),
|
||||||
trait_ref)?;
|
trait_ref)?;
|
||||||
Ok(VtableFnPointerData { fn_ty: self_ty, nested: vec![] })
|
Ok(VtableFnPointerData { fn_ty: self_ty, nested: obligations })
|
||||||
}
|
}
|
||||||
|
|
||||||
fn confirm_closure_candidate(&mut self,
|
fn confirm_closure_candidate(&mut self,
|
||||||
|
@ -2588,8 +2596,8 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
|
||||||
let inner_source = field.subst(tcx, substs_a);
|
let inner_source = field.subst(tcx, substs_a);
|
||||||
let inner_target = field.subst(tcx, substs_b);
|
let inner_target = field.subst(tcx, substs_b);
|
||||||
|
|
||||||
// Check that the source structure with the target's
|
// Check that the source struct with the target's
|
||||||
// type parameters is a subtype of the target.
|
// unsized parameters is equal to the target.
|
||||||
let params = substs_a.iter().enumerate().map(|(i, &k)| {
|
let params = substs_a.iter().enumerate().map(|(i, &k)| {
|
||||||
if ty_params.contains(i) {
|
if ty_params.contains(i) {
|
||||||
Kind::from(substs_b.type_at(i))
|
Kind::from(substs_b.type_at(i))
|
||||||
|
@ -2614,6 +2622,37 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
|
||||||
&[inner_target]));
|
&[inner_target]));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// (.., T) -> (.., U).
|
||||||
|
(&ty::TyTuple(tys_a, _), &ty::TyTuple(tys_b, _)) => {
|
||||||
|
assert_eq!(tys_a.len(), tys_b.len());
|
||||||
|
|
||||||
|
// The last field of the tuple has to exist.
|
||||||
|
let (a_last, a_mid) = if let Some(x) = tys_a.split_last() {
|
||||||
|
x
|
||||||
|
} else {
|
||||||
|
return Err(Unimplemented);
|
||||||
|
};
|
||||||
|
let b_last = tys_b.last().unwrap();
|
||||||
|
|
||||||
|
// Check that the source tuple with the target's
|
||||||
|
// last element is equal to the target.
|
||||||
|
let new_tuple = tcx.mk_tup(a_mid.iter().chain(Some(b_last)), false);
|
||||||
|
let InferOk { obligations, .. } =
|
||||||
|
self.infcx.at(&obligation.cause, obligation.param_env)
|
||||||
|
.eq(target, new_tuple)
|
||||||
|
.map_err(|_| Unimplemented)?;
|
||||||
|
self.inferred_obligations.extend(obligations);
|
||||||
|
|
||||||
|
// Construct the nested T: Unsize<U> predicate.
|
||||||
|
nested.push(tcx.predicate_for_trait_def(
|
||||||
|
obligation.param_env,
|
||||||
|
obligation.cause.clone(),
|
||||||
|
obligation.predicate.def_id(),
|
||||||
|
obligation.recursion_depth + 1,
|
||||||
|
a_last,
|
||||||
|
&[b_last]));
|
||||||
|
}
|
||||||
|
|
||||||
_ => bug!()
|
_ => bug!()
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -2799,7 +2838,7 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
|
||||||
substs: ty::ClosureSubsts<'tcx>)
|
substs: ty::ClosureSubsts<'tcx>)
|
||||||
-> ty::PolyTraitRef<'tcx>
|
-> ty::PolyTraitRef<'tcx>
|
||||||
{
|
{
|
||||||
let closure_type = self.infcx.closure_type(closure_def_id)
|
let closure_type = self.infcx.fn_sig(closure_def_id)
|
||||||
.subst(self.tcx(), substs.substs);
|
.subst(self.tcx(), substs.substs);
|
||||||
let ty::Binder((trait_ref, _)) =
|
let ty::Binder((trait_ref, _)) =
|
||||||
self.tcx().closure_trait_ref_and_return_type(obligation.predicate.def_id(),
|
self.tcx().closure_trait_ref_and_return_type(obligation.predicate.def_id(),
|
||||||
|
|
|
@ -189,9 +189,11 @@ impl<'a, 'tcx> Lift<'tcx> for traits::ObligationCauseCode<'a> {
|
||||||
tcx.lift(&ty).map(super::ObjectCastObligation)
|
tcx.lift(&ty).map(super::ObjectCastObligation)
|
||||||
}
|
}
|
||||||
super::AssignmentLhsSized => Some(super::AssignmentLhsSized),
|
super::AssignmentLhsSized => Some(super::AssignmentLhsSized),
|
||||||
|
super::TupleInitializerSized => Some(super::TupleInitializerSized),
|
||||||
super::StructInitializerSized => Some(super::StructInitializerSized),
|
super::StructInitializerSized => Some(super::StructInitializerSized),
|
||||||
super::VariableType(id) => Some(super::VariableType(id)),
|
super::VariableType(id) => Some(super::VariableType(id)),
|
||||||
super::ReturnType => Some(super::ReturnType),
|
super::ReturnType(id) => Some(super::ReturnType(id)),
|
||||||
|
super::SizedReturnType => Some(super::SizedReturnType),
|
||||||
super::RepeatVec => Some(super::RepeatVec),
|
super::RepeatVec => Some(super::RepeatVec),
|
||||||
super::FieldSized => Some(super::FieldSized),
|
super::FieldSized => Some(super::FieldSized),
|
||||||
super::ConstSized => Some(super::ConstSized),
|
super::ConstSized => Some(super::ConstSized),
|
||||||
|
@ -213,34 +215,19 @@ impl<'a, 'tcx> Lift<'tcx> for traits::ObligationCauseCode<'a> {
|
||||||
lint_id: lint_id,
|
lint_id: lint_id,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
super::ExprAssignable => {
|
super::ExprAssignable => Some(super::ExprAssignable),
|
||||||
Some(super::ExprAssignable)
|
|
||||||
}
|
|
||||||
super::MatchExpressionArm { arm_span, source } => {
|
super::MatchExpressionArm { arm_span, source } => {
|
||||||
Some(super::MatchExpressionArm { arm_span: arm_span,
|
Some(super::MatchExpressionArm { arm_span: arm_span,
|
||||||
source: source })
|
source: source })
|
||||||
}
|
}
|
||||||
super::IfExpression => {
|
super::IfExpression => Some(super::IfExpression),
|
||||||
Some(super::IfExpression)
|
super::IfExpressionWithNoElse => Some(super::IfExpressionWithNoElse),
|
||||||
}
|
super::EquatePredicate => Some(super::EquatePredicate),
|
||||||
super::IfExpressionWithNoElse => {
|
super::MainFunctionType => Some(super::MainFunctionType),
|
||||||
Some(super::IfExpressionWithNoElse)
|
super::StartFunctionType => Some(super::StartFunctionType),
|
||||||
}
|
super::IntrinsicType => Some(super::IntrinsicType),
|
||||||
super::EquatePredicate => {
|
super::MethodReceiver => Some(super::MethodReceiver),
|
||||||
Some(super::EquatePredicate)
|
super::BlockTailExpression(id) => Some(super::BlockTailExpression(id)),
|
||||||
}
|
|
||||||
super::MainFunctionType => {
|
|
||||||
Some(super::MainFunctionType)
|
|
||||||
}
|
|
||||||
super::StartFunctionType => {
|
|
||||||
Some(super::StartFunctionType)
|
|
||||||
}
|
|
||||||
super::IntrinsicType => {
|
|
||||||
Some(super::IntrinsicType)
|
|
||||||
}
|
|
||||||
super::MethodReceiver => {
|
|
||||||
Some(super::MethodReceiver)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -490,14 +477,17 @@ impl<'tcx> TypeFoldable<'tcx> for traits::ObligationCauseCode<'tcx> {
|
||||||
super::TupleElem |
|
super::TupleElem |
|
||||||
super::ItemObligation(_) |
|
super::ItemObligation(_) |
|
||||||
super::AssignmentLhsSized |
|
super::AssignmentLhsSized |
|
||||||
|
super::TupleInitializerSized |
|
||||||
super::StructInitializerSized |
|
super::StructInitializerSized |
|
||||||
super::VariableType(_) |
|
super::VariableType(_) |
|
||||||
super::ReturnType |
|
super::ReturnType(_) |
|
||||||
|
super::SizedReturnType |
|
||||||
super::ReturnNoExpression |
|
super::ReturnNoExpression |
|
||||||
super::RepeatVec |
|
super::RepeatVec |
|
||||||
super::FieldSized |
|
super::FieldSized |
|
||||||
super::ConstSized |
|
super::ConstSized |
|
||||||
super::SharedStatic |
|
super::SharedStatic |
|
||||||
|
super::BlockTailExpression(_) |
|
||||||
super::CompareImplMethodObligation { .. } => self.clone(),
|
super::CompareImplMethodObligation { .. } => self.clone(),
|
||||||
|
|
||||||
super::ProjectionWf(proj) => super::ProjectionWf(proj.fold_with(folder)),
|
super::ProjectionWf(proj) => super::ProjectionWf(proj.fold_with(folder)),
|
||||||
|
@ -535,14 +525,17 @@ impl<'tcx> TypeFoldable<'tcx> for traits::ObligationCauseCode<'tcx> {
|
||||||
super::TupleElem |
|
super::TupleElem |
|
||||||
super::ItemObligation(_) |
|
super::ItemObligation(_) |
|
||||||
super::AssignmentLhsSized |
|
super::AssignmentLhsSized |
|
||||||
|
super::TupleInitializerSized |
|
||||||
super::StructInitializerSized |
|
super::StructInitializerSized |
|
||||||
super::VariableType(_) |
|
super::VariableType(_) |
|
||||||
super::ReturnType |
|
super::ReturnType(_) |
|
||||||
|
super::SizedReturnType |
|
||||||
super::ReturnNoExpression |
|
super::ReturnNoExpression |
|
||||||
super::RepeatVec |
|
super::RepeatVec |
|
||||||
super::FieldSized |
|
super::FieldSized |
|
||||||
super::ConstSized |
|
super::ConstSized |
|
||||||
super::SharedStatic |
|
super::SharedStatic |
|
||||||
|
super::BlockTailExpression(_) |
|
||||||
super::CompareImplMethodObligation { .. } => false,
|
super::CompareImplMethodObligation { .. } => false,
|
||||||
|
|
||||||
super::ProjectionWf(proj) => proj.visit_with(visitor),
|
super::ProjectionWf(proj) => proj.visit_with(visitor),
|
||||||
|
|
|
@ -1378,9 +1378,8 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn mk_fn_def(self, def_id: DefId,
|
pub fn mk_fn_def(self, def_id: DefId,
|
||||||
substs: &'tcx Substs<'tcx>,
|
substs: &'tcx Substs<'tcx>) -> Ty<'tcx> {
|
||||||
fty: PolyFnSig<'tcx>) -> Ty<'tcx> {
|
self.mk_ty(TyFnDef(def_id, substs))
|
||||||
self.mk_ty(TyFnDef(def_id, substs, fty))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn mk_fn_ptr(self, fty: PolyFnSig<'tcx>) -> Ty<'tcx> {
|
pub fn mk_fn_ptr(self, fty: PolyFnSig<'tcx>) -> Ty<'tcx> {
|
||||||
|
|
|
@ -68,6 +68,7 @@ pub fn simplify_type<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
|
||||||
// view of possibly unifying
|
// view of possibly unifying
|
||||||
simplify_type(tcx, mt.ty, can_simplify_params)
|
simplify_type(tcx, mt.ty, can_simplify_params)
|
||||||
}
|
}
|
||||||
|
ty::TyFnDef(def_id, _) |
|
||||||
ty::TyClosure(def_id, _) => {
|
ty::TyClosure(def_id, _) => {
|
||||||
Some(ClosureSimplifiedType(def_id))
|
Some(ClosureSimplifiedType(def_id))
|
||||||
}
|
}
|
||||||
|
@ -75,7 +76,7 @@ pub fn simplify_type<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
|
||||||
ty::TyTuple(ref tys, _) => {
|
ty::TyTuple(ref tys, _) => {
|
||||||
Some(TupleSimplifiedType(tys.len()))
|
Some(TupleSimplifiedType(tys.len()))
|
||||||
}
|
}
|
||||||
ty::TyFnDef(.., ref f) | ty::TyFnPtr(ref f) => {
|
ty::TyFnPtr(ref f) => {
|
||||||
Some(FunctionSimplifiedType(f.skip_binder().inputs().len()))
|
Some(FunctionSimplifiedType(f.skip_binder().inputs().len()))
|
||||||
}
|
}
|
||||||
ty::TyProjection(_) | ty::TyParam(_) => {
|
ty::TyProjection(_) | ty::TyParam(_) => {
|
||||||
|
|
|
@ -155,9 +155,8 @@ impl FlagComputation {
|
||||||
self.add_tys(&ts[..]);
|
self.add_tys(&ts[..]);
|
||||||
}
|
}
|
||||||
|
|
||||||
&ty::TyFnDef(_, substs, f) => {
|
&ty::TyFnDef(_, substs) => {
|
||||||
self.add_substs(substs);
|
self.add_substs(substs);
|
||||||
self.add_fn_sig(f);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
&ty::TyFnPtr(f) => {
|
&ty::TyFnPtr(f) => {
|
||||||
|
|
|
@ -348,7 +348,7 @@ pub fn characteristic_def_id_of_type(ty: Ty) -> Option<DefId> {
|
||||||
.filter_map(|ty| characteristic_def_id_of_type(ty))
|
.filter_map(|ty| characteristic_def_id_of_type(ty))
|
||||||
.next(),
|
.next(),
|
||||||
|
|
||||||
ty::TyFnDef(def_id, ..) |
|
ty::TyFnDef(def_id, _) |
|
||||||
ty::TyClosure(def_id, _) => Some(def_id),
|
ty::TyClosure(def_id, _) => Some(def_id),
|
||||||
|
|
||||||
ty::TyBool |
|
ty::TyBool |
|
||||||
|
|
|
@ -1220,12 +1220,16 @@ impl<'a, 'tcx> Layout {
|
||||||
}
|
}
|
||||||
|
|
||||||
ty::TyTuple(tys, _) => {
|
ty::TyTuple(tys, _) => {
|
||||||
// FIXME(camlorn): if we ever allow unsized tuples, this needs to be checked.
|
let kind = if tys.len() == 0 {
|
||||||
// See the univariant case below to learn how.
|
StructKind::AlwaysSizedUnivariant
|
||||||
|
} else {
|
||||||
|
StructKind::MaybeUnsizedUnivariant
|
||||||
|
};
|
||||||
|
|
||||||
let st = Struct::new(dl,
|
let st = Struct::new(dl,
|
||||||
&tys.iter().map(|ty| ty.layout(tcx, param_env))
|
&tys.iter().map(|ty| ty.layout(tcx, param_env))
|
||||||
.collect::<Result<Vec<_>, _>>()?,
|
.collect::<Result<Vec<_>, _>>()?,
|
||||||
&ReprOptions::default(), StructKind::AlwaysSizedUnivariant, ty)?;
|
&ReprOptions::default(), kind, ty)?;
|
||||||
Univariant { variant: st, non_zero: false }
|
Univariant { variant: st, non_zero: false }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -875,13 +875,12 @@ define_maps! { <'tcx>
|
||||||
/// for trans. This is also the only query that can fetch non-local MIR, at present.
|
/// for trans. This is also the only query that can fetch non-local MIR, at present.
|
||||||
[] optimized_mir: Mir(DefId) -> &'tcx mir::Mir<'tcx>,
|
[] optimized_mir: Mir(DefId) -> &'tcx mir::Mir<'tcx>,
|
||||||
|
|
||||||
/// Records the type of each closure. The def ID is the ID of the
|
/// Type of each closure. The def ID is the ID of the
|
||||||
/// expression defining the closure.
|
/// expression defining the closure.
|
||||||
[] closure_kind: ItemSignature(DefId) -> ty::ClosureKind,
|
[] closure_kind: ItemSignature(DefId) -> ty::ClosureKind,
|
||||||
|
|
||||||
/// Records the type of each closure. The def ID is the ID of the
|
/// The signature of functions and closures.
|
||||||
/// expression defining the closure.
|
[] fn_sig: ItemSignature(DefId) -> ty::PolyFnSig<'tcx>,
|
||||||
[] closure_type: ItemSignature(DefId) -> ty::PolyFnSig<'tcx>,
|
|
||||||
|
|
||||||
/// Caches CoerceUnsized kinds for impls on custom types.
|
/// Caches CoerceUnsized kinds for impls on custom types.
|
||||||
[] coerce_unsized_info: ItemSignature(DefId)
|
[] coerce_unsized_info: ItemSignature(DefId)
|
||||||
|
|
|
@ -206,7 +206,7 @@ impl AssociatedItem {
|
||||||
// late-bound regions, and we don't want method signatures to show up
|
// late-bound regions, and we don't want method signatures to show up
|
||||||
// `as for<'r> fn(&'r MyType)`. Pretty-printing handles late-bound
|
// `as for<'r> fn(&'r MyType)`. Pretty-printing handles late-bound
|
||||||
// regions just fine, showing `fn(&MyType)`.
|
// regions just fine, showing `fn(&MyType)`.
|
||||||
format!("{}", tcx.type_of(self.def_id).fn_sig().skip_binder())
|
format!("{}", tcx.fn_sig(self.def_id).skip_binder())
|
||||||
}
|
}
|
||||||
ty::AssociatedKind::Type => format!("type {};", self.name.to_string()),
|
ty::AssociatedKind::Type => format!("type {};", self.name.to_string()),
|
||||||
ty::AssociatedKind::Const => {
|
ty::AssociatedKind::Const => {
|
||||||
|
@ -481,6 +481,18 @@ impl<'tcx> TyS<'tcx> {
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn is_suggestable(&self) -> bool {
|
||||||
|
match self.sty {
|
||||||
|
TypeVariants::TyAnon(..) |
|
||||||
|
TypeVariants::TyFnDef(..) |
|
||||||
|
TypeVariants::TyFnPtr(..) |
|
||||||
|
TypeVariants::TyDynamic(..) |
|
||||||
|
TypeVariants::TyClosure(..) |
|
||||||
|
TypeVariants::TyProjection(..) => false,
|
||||||
|
_ => true,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for ty::TyS<'tcx> {
|
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for ty::TyS<'tcx> {
|
||||||
|
|
|
@ -291,7 +291,7 @@ impl<'tcx> Relate<'tcx> for ty::TraitRef<'tcx> {
|
||||||
if a.def_id != b.def_id {
|
if a.def_id != b.def_id {
|
||||||
Err(TypeError::Traits(expected_found(relation, &a.def_id, &b.def_id)))
|
Err(TypeError::Traits(expected_found(relation, &a.def_id, &b.def_id)))
|
||||||
} else {
|
} else {
|
||||||
let substs = relation.relate_item_substs(a.def_id, a.substs, b.substs)?;
|
let substs = relate_substs(relation, None, a.substs, b.substs)?;
|
||||||
Ok(ty::TraitRef { def_id: a.def_id, substs: substs })
|
Ok(ty::TraitRef { def_id: a.def_id, substs: substs })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -308,7 +308,7 @@ impl<'tcx> Relate<'tcx> for ty::ExistentialTraitRef<'tcx> {
|
||||||
if a.def_id != b.def_id {
|
if a.def_id != b.def_id {
|
||||||
Err(TypeError::Traits(expected_found(relation, &a.def_id, &b.def_id)))
|
Err(TypeError::Traits(expected_found(relation, &a.def_id, &b.def_id)))
|
||||||
} else {
|
} else {
|
||||||
let substs = relation.relate_item_substs(a.def_id, a.substs, b.substs)?;
|
let substs = relate_substs(relation, None, a.substs, b.substs)?;
|
||||||
Ok(ty::ExistentialTraitRef { def_id: a.def_id, substs: substs })
|
Ok(ty::ExistentialTraitRef { def_id: a.def_id, substs: substs })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -440,13 +440,11 @@ pub fn super_relate_tys<'a, 'gcx, 'tcx, R>(relation: &mut R,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
(&ty::TyFnDef(a_def_id, a_substs, a_fty),
|
(&ty::TyFnDef(a_def_id, a_substs), &ty::TyFnDef(b_def_id, b_substs))
|
||||||
&ty::TyFnDef(b_def_id, b_substs, b_fty))
|
|
||||||
if a_def_id == b_def_id =>
|
if a_def_id == b_def_id =>
|
||||||
{
|
{
|
||||||
let substs = relate_substs(relation, None, a_substs, b_substs)?;
|
let substs = relation.relate_item_substs(a_def_id, a_substs, b_substs)?;
|
||||||
let fty = relation.relate(&a_fty, &b_fty)?;
|
Ok(tcx.mk_fn_def(a_def_id, substs))
|
||||||
Ok(tcx.mk_fn_def(a_def_id, substs, fty))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
(&ty::TyFnPtr(a_fty), &ty::TyFnPtr(b_fty)) =>
|
(&ty::TyFnPtr(a_fty), &ty::TyFnPtr(b_fty)) =>
|
||||||
|
|
|
@ -531,10 +531,8 @@ impl<'tcx> TypeFoldable<'tcx> for Ty<'tcx> {
|
||||||
ty::TyDynamic(ref trait_ty, ref region) =>
|
ty::TyDynamic(ref trait_ty, ref region) =>
|
||||||
ty::TyDynamic(trait_ty.fold_with(folder), region.fold_with(folder)),
|
ty::TyDynamic(trait_ty.fold_with(folder), region.fold_with(folder)),
|
||||||
ty::TyTuple(ts, defaulted) => ty::TyTuple(ts.fold_with(folder), defaulted),
|
ty::TyTuple(ts, defaulted) => ty::TyTuple(ts.fold_with(folder), defaulted),
|
||||||
ty::TyFnDef(def_id, substs, f) => {
|
ty::TyFnDef(def_id, substs) => {
|
||||||
ty::TyFnDef(def_id,
|
ty::TyFnDef(def_id, substs.fold_with(folder))
|
||||||
substs.fold_with(folder),
|
|
||||||
f.fold_with(folder))
|
|
||||||
}
|
}
|
||||||
ty::TyFnPtr(f) => ty::TyFnPtr(f.fold_with(folder)),
|
ty::TyFnPtr(f) => ty::TyFnPtr(f.fold_with(folder)),
|
||||||
ty::TyRef(ref r, tm) => {
|
ty::TyRef(ref r, tm) => {
|
||||||
|
@ -568,9 +566,7 @@ impl<'tcx> TypeFoldable<'tcx> for Ty<'tcx> {
|
||||||
ty::TyDynamic(ref trait_ty, ref reg) =>
|
ty::TyDynamic(ref trait_ty, ref reg) =>
|
||||||
trait_ty.visit_with(visitor) || reg.visit_with(visitor),
|
trait_ty.visit_with(visitor) || reg.visit_with(visitor),
|
||||||
ty::TyTuple(ts, _) => ts.visit_with(visitor),
|
ty::TyTuple(ts, _) => ts.visit_with(visitor),
|
||||||
ty::TyFnDef(_, substs, ref f) => {
|
ty::TyFnDef(_, substs) => substs.visit_with(visitor),
|
||||||
substs.visit_with(visitor) || f.visit_with(visitor)
|
|
||||||
}
|
|
||||||
ty::TyFnPtr(ref f) => f.visit_with(visitor),
|
ty::TyFnPtr(ref f) => f.visit_with(visitor),
|
||||||
ty::TyRef(r, ref tm) => r.visit_with(visitor) || tm.visit_with(visitor),
|
ty::TyRef(r, ref tm) => r.visit_with(visitor) || tm.visit_with(visitor),
|
||||||
ty::TyClosure(_did, ref substs) => substs.visit_with(visitor),
|
ty::TyClosure(_did, ref substs) => substs.visit_with(visitor),
|
||||||
|
|
|
@ -14,7 +14,7 @@ use hir::def_id::DefId;
|
||||||
use hir::map::DefPathHash;
|
use hir::map::DefPathHash;
|
||||||
|
|
||||||
use middle::region;
|
use middle::region;
|
||||||
use ty::subst::Substs;
|
use ty::subst::{Substs, Subst};
|
||||||
use ty::{self, AdtDef, TypeFlags, Ty, TyCtxt, TypeFoldable};
|
use ty::{self, AdtDef, TypeFlags, Ty, TyCtxt, TypeFoldable};
|
||||||
use ty::{Slice, TyS};
|
use ty::{Slice, TyS};
|
||||||
use ty::subst::Kind;
|
use ty::subst::Kind;
|
||||||
|
@ -138,7 +138,7 @@ pub enum TypeVariants<'tcx> {
|
||||||
|
|
||||||
/// The anonymous type of a function declaration/definition. Each
|
/// The anonymous type of a function declaration/definition. Each
|
||||||
/// function has a unique type.
|
/// function has a unique type.
|
||||||
TyFnDef(DefId, &'tcx Substs<'tcx>, PolyFnSig<'tcx>),
|
TyFnDef(DefId, &'tcx Substs<'tcx>),
|
||||||
|
|
||||||
/// A pointer to a function. Written as `fn() -> i32`.
|
/// A pointer to a function. Written as `fn() -> i32`.
|
||||||
TyFnPtr(PolyFnSig<'tcx>),
|
TyFnPtr(PolyFnSig<'tcx>),
|
||||||
|
@ -990,6 +990,20 @@ impl RegionKind {
|
||||||
|
|
||||||
flags
|
flags
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// This method returns whether the given Region is Named
|
||||||
|
pub fn is_named_region(&self) -> bool {
|
||||||
|
|
||||||
|
match *self {
|
||||||
|
ty::ReFree(ref free_region) => {
|
||||||
|
match free_region.bound_region {
|
||||||
|
ty::BrNamed(..) => true,
|
||||||
|
_ => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Type utilities
|
/// Type utilities
|
||||||
|
@ -1329,9 +1343,12 @@ impl<'a, 'gcx, 'tcx> TyS<'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn fn_sig(&self) -> PolyFnSig<'tcx> {
|
pub fn fn_sig(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> PolyFnSig<'tcx> {
|
||||||
match self.sty {
|
match self.sty {
|
||||||
TyFnDef(.., f) | TyFnPtr(f) => f,
|
TyFnDef(def_id, substs) => {
|
||||||
|
tcx.fn_sig(def_id).subst(tcx, substs)
|
||||||
|
}
|
||||||
|
TyFnPtr(f) => f,
|
||||||
_ => bug!("Ty::fn_sig() called on non-fn type: {:?}", self)
|
_ => bug!("Ty::fn_sig() called on non-fn type: {:?}", self)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -317,15 +317,26 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
||||||
target: Ty<'tcx>)
|
target: Ty<'tcx>)
|
||||||
-> (Ty<'tcx>, Ty<'tcx>) {
|
-> (Ty<'tcx>, Ty<'tcx>) {
|
||||||
let (mut a, mut b) = (source, target);
|
let (mut a, mut b) = (source, target);
|
||||||
while let (&TyAdt(a_def, a_substs), &TyAdt(b_def, b_substs)) = (&a.sty, &b.sty) {
|
loop {
|
||||||
if a_def != b_def || !a_def.is_struct() {
|
match (&a.sty, &b.sty) {
|
||||||
break;
|
(&TyAdt(a_def, a_substs), &TyAdt(b_def, b_substs))
|
||||||
}
|
if a_def == b_def && a_def.is_struct() => {
|
||||||
match a_def.struct_variant().fields.last() {
|
if let Some(f) = a_def.struct_variant().fields.last() {
|
||||||
Some(f) => {
|
a = f.ty(self, a_substs);
|
||||||
a = f.ty(self, a_substs);
|
b = f.ty(self, b_substs);
|
||||||
b = f.ty(self, b_substs);
|
} else {
|
||||||
}
|
break;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
(&TyTuple(a_tys, _), &TyTuple(b_tys, _))
|
||||||
|
if a_tys.len() == b_tys.len() => {
|
||||||
|
if let Some(a_last) = a_tys.last() {
|
||||||
|
a = a_last;
|
||||||
|
b = b_tys.last().unwrap();
|
||||||
|
} else {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
},
|
||||||
_ => break,
|
_ => break,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -679,7 +690,7 @@ impl<'a, 'gcx, 'tcx, W> TypeVisitor<'tcx> for TypeIdHasher<'a, 'gcx, 'tcx, W>
|
||||||
TyRef(_, m) => self.hash(m.mutbl),
|
TyRef(_, m) => self.hash(m.mutbl),
|
||||||
TyClosure(def_id, _) |
|
TyClosure(def_id, _) |
|
||||||
TyAnon(def_id, _) |
|
TyAnon(def_id, _) |
|
||||||
TyFnDef(def_id, ..) => self.def_id(def_id),
|
TyFnDef(def_id, _) => self.def_id(def_id),
|
||||||
TyAdt(d, _) => self.def_id(d.did),
|
TyAdt(d, _) => self.def_id(d.did),
|
||||||
TyFnPtr(f) => {
|
TyFnPtr(f) => {
|
||||||
self.hash(f.unsafety());
|
self.hash(f.unsafety());
|
||||||
|
|
|
@ -115,9 +115,8 @@ fn push_subtypes<'tcx>(stack: &mut TypeWalkerStack<'tcx>, parent_ty: Ty<'tcx>) {
|
||||||
ty::TyTuple(ts, _) => {
|
ty::TyTuple(ts, _) => {
|
||||||
stack.extend(ts.iter().cloned().rev());
|
stack.extend(ts.iter().cloned().rev());
|
||||||
}
|
}
|
||||||
ty::TyFnDef(_, substs, ft) => {
|
ty::TyFnDef(_, substs) => {
|
||||||
stack.extend(substs.types().rev());
|
stack.extend(substs.types().rev());
|
||||||
push_sig_subtypes(stack, ft);
|
|
||||||
}
|
}
|
||||||
ty::TyFnPtr(ft) => {
|
ty::TyFnPtr(ft) => {
|
||||||
push_sig_subtypes(stack, ft);
|
push_sig_subtypes(stack, ft);
|
||||||
|
|
|
@ -753,8 +753,14 @@ impl<'tcx> fmt::Display for ty::TypeVariants<'tcx> {
|
||||||
}
|
}
|
||||||
write!(f, ")")
|
write!(f, ")")
|
||||||
}
|
}
|
||||||
TyFnDef(def_id, substs, ref bare_fn) => {
|
TyFnDef(def_id, substs) => {
|
||||||
write!(f, "{} {{", bare_fn.0)?;
|
ty::tls::with(|tcx| {
|
||||||
|
let mut sig = tcx.fn_sig(def_id);
|
||||||
|
if let Some(substs) = tcx.lift(&substs) {
|
||||||
|
sig = sig.subst(tcx, substs);
|
||||||
|
}
|
||||||
|
write!(f, "{} {{", sig.0)
|
||||||
|
})?;
|
||||||
parameterized(f, substs, def_id, &[])?;
|
parameterized(f, substs, def_id, &[])?;
|
||||||
write!(f, "}}")
|
write!(f, "}}")
|
||||||
}
|
}
|
||||||
|
|
|
@ -32,4 +32,5 @@ fn main() {
|
||||||
.build_target("asan")
|
.build_target("asan")
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
|
println!("cargo:rerun-if-env-changed=LLVM_CONFIG");
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,8 +18,6 @@ pub use self::bckerr_code::*;
|
||||||
pub use self::AliasableViolationKind::*;
|
pub use self::AliasableViolationKind::*;
|
||||||
pub use self::MovedValueUseKind::*;
|
pub use self::MovedValueUseKind::*;
|
||||||
|
|
||||||
pub use self::mir::elaborate_drops::ElaborateDrops;
|
|
||||||
|
|
||||||
use self::InteriorKind::*;
|
use self::InteriorKind::*;
|
||||||
|
|
||||||
use rustc::hir::map as hir_map;
|
use rustc::hir::map as hir_map;
|
||||||
|
@ -55,8 +53,6 @@ pub mod gather_loans;
|
||||||
|
|
||||||
pub mod move_data;
|
pub mod move_data;
|
||||||
|
|
||||||
mod mir;
|
|
||||||
|
|
||||||
#[derive(Clone, Copy)]
|
#[derive(Clone, Copy)]
|
||||||
pub struct LoanDataFlowOperator;
|
pub struct LoanDataFlowOperator;
|
||||||
|
|
||||||
|
@ -100,26 +96,21 @@ fn borrowck<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, owner_def_id: DefId) {
|
||||||
}
|
}
|
||||||
|
|
||||||
let body_id = tcx.hir.body_owned_by(owner_id);
|
let body_id = tcx.hir.body_owned_by(owner_id);
|
||||||
let attributes = tcx.get_attrs(owner_def_id);
|
|
||||||
let tables = tcx.typeck_tables_of(owner_def_id);
|
let tables = tcx.typeck_tables_of(owner_def_id);
|
||||||
let region_maps = tcx.region_maps(owner_def_id);
|
let region_maps = tcx.region_maps(owner_def_id);
|
||||||
let mut bccx = &mut BorrowckCtxt { tcx, tables, region_maps, owner_def_id };
|
let mut bccx = &mut BorrowckCtxt { tcx, tables, region_maps, owner_def_id };
|
||||||
|
|
||||||
let body = bccx.tcx.hir.body(body_id);
|
let body = bccx.tcx.hir.body(body_id);
|
||||||
|
|
||||||
if bccx.tcx.has_attr(owner_def_id, "rustc_mir_borrowck") {
|
// Eventually, borrowck will always read the MIR, but at the
|
||||||
mir::borrowck_mir(bccx, owner_id, &attributes);
|
// moment we do not. So, for now, we always force MIR to be
|
||||||
} else {
|
// constructed for a given fn, since this may result in errors
|
||||||
// Eventually, borrowck will always read the MIR, but at the
|
// being reported and we want that to happen.
|
||||||
// moment we do not. So, for now, we always force MIR to be
|
//
|
||||||
// constructed for a given fn, since this may result in errors
|
// Note that `mir_validated` is a "stealable" result; the
|
||||||
// being reported and we want that to happen.
|
// thief, `optimized_mir()`, forces borrowck, so we know that
|
||||||
//
|
// is not yet stolen.
|
||||||
// Note that `mir_validated` is a "stealable" result; the
|
tcx.mir_validated(owner_def_id).borrow();
|
||||||
// thief, `optimized_mir()`, forces borrowck, so we know that
|
|
||||||
// is not yet stolen.
|
|
||||||
tcx.mir_validated(owner_def_id).borrow();
|
|
||||||
}
|
|
||||||
|
|
||||||
let cfg = cfg::CFG::new(bccx.tcx, &body);
|
let cfg = cfg::CFG::new(bccx.tcx, &body);
|
||||||
let AnalysisData { all_loans,
|
let AnalysisData { all_loans,
|
||||||
|
|
|
@ -21,7 +21,6 @@
|
||||||
#![feature(quote)]
|
#![feature(quote)]
|
||||||
#![feature(rustc_diagnostic_macros)]
|
#![feature(rustc_diagnostic_macros)]
|
||||||
#![feature(associated_consts)]
|
#![feature(associated_consts)]
|
||||||
#![feature(nonzero)]
|
|
||||||
|
|
||||||
#[macro_use] extern crate log;
|
#[macro_use] extern crate log;
|
||||||
#[macro_use] extern crate syntax;
|
#[macro_use] extern crate syntax;
|
||||||
|
@ -39,7 +38,7 @@ extern crate core; // for NonZero
|
||||||
|
|
||||||
pub use borrowck::check_crate;
|
pub use borrowck::check_crate;
|
||||||
pub use borrowck::build_borrowck_dataflow_data_for_fn;
|
pub use borrowck::build_borrowck_dataflow_data_for_fn;
|
||||||
pub use borrowck::{AnalysisData, BorrowckCtxt, ElaborateDrops};
|
pub use borrowck::{AnalysisData, BorrowckCtxt};
|
||||||
|
|
||||||
// NB: This module needs to be declared first so diagnostics are
|
// NB: This module needs to be declared first so diagnostics are
|
||||||
// registered before they are used.
|
// registered before they are used.
|
||||||
|
|
|
@ -12,7 +12,7 @@ use eval;
|
||||||
|
|
||||||
use rustc::middle::const_val::{ConstEvalErr, ConstVal};
|
use rustc::middle::const_val::{ConstEvalErr, ConstVal};
|
||||||
use rustc::mir::{Field, BorrowKind, Mutability};
|
use rustc::mir::{Field, BorrowKind, Mutability};
|
||||||
use rustc::ty::{self, TyCtxt, AdtDef, Ty, TypeVariants, Region};
|
use rustc::ty::{self, TyCtxt, AdtDef, Ty, Region};
|
||||||
use rustc::ty::subst::{Substs, Kind};
|
use rustc::ty::subst::{Substs, Kind};
|
||||||
use rustc::hir::{self, PatKind, RangeEnd};
|
use rustc::hir::{self, PatKind, RangeEnd};
|
||||||
use rustc::hir::def::{Def, CtorKind};
|
use rustc::hir::def::{Def, CtorKind};
|
||||||
|
@ -549,8 +549,8 @@ impl<'a, 'gcx, 'tcx> PatternContext<'a, 'gcx, 'tcx> {
|
||||||
let adt_def = self.tcx.adt_def(enum_id);
|
let adt_def = self.tcx.adt_def(enum_id);
|
||||||
if adt_def.variants.len() > 1 {
|
if adt_def.variants.len() > 1 {
|
||||||
let substs = match ty.sty {
|
let substs = match ty.sty {
|
||||||
TypeVariants::TyAdt(_, substs) => substs,
|
ty::TyAdt(_, substs) |
|
||||||
TypeVariants::TyFnDef(_, substs, _) => substs,
|
ty::TyFnDef(_, substs) => substs,
|
||||||
_ => bug!("inappropriate type for def: {:?}", ty.sty),
|
_ => bug!("inappropriate type for def: {:?}", ty.sty),
|
||||||
};
|
};
|
||||||
PatternKind::Variant {
|
PatternKind::Variant {
|
||||||
|
|
|
@ -13,7 +13,8 @@ use rustc::hir::lowering::lower_crate;
|
||||||
use rustc::ich::Fingerprint;
|
use rustc::ich::Fingerprint;
|
||||||
use rustc_data_structures::stable_hasher::StableHasher;
|
use rustc_data_structures::stable_hasher::StableHasher;
|
||||||
use rustc_mir as mir;
|
use rustc_mir as mir;
|
||||||
use rustc::session::{Session, CompileResult, compile_result_from_err_count};
|
use rustc::session::{Session, CompileResult};
|
||||||
|
use rustc::session::CompileIncomplete;
|
||||||
use rustc::session::config::{self, Input, OutputFilenames, OutputType,
|
use rustc::session::config::{self, Input, OutputFilenames, OutputType,
|
||||||
OutputTypes};
|
OutputTypes};
|
||||||
use rustc::session::search_paths::PathKind;
|
use rustc::session::search_paths::PathKind;
|
||||||
|
@ -23,7 +24,7 @@ use rustc::middle::privacy::AccessLevels;
|
||||||
use rustc::mir::transform::{MIR_CONST, MIR_VALIDATED, MIR_OPTIMIZED, Passes};
|
use rustc::mir::transform::{MIR_CONST, MIR_VALIDATED, MIR_OPTIMIZED, Passes};
|
||||||
use rustc::ty::{self, TyCtxt, Resolutions, GlobalArenas};
|
use rustc::ty::{self, TyCtxt, Resolutions, GlobalArenas};
|
||||||
use rustc::traits;
|
use rustc::traits;
|
||||||
use rustc::util::common::time;
|
use rustc::util::common::{ErrorReported, time};
|
||||||
use rustc::util::nodemap::NodeSet;
|
use rustc::util::nodemap::NodeSet;
|
||||||
use rustc::util::fs::rename_or_copy_remove;
|
use rustc::util::fs::rename_or_copy_remove;
|
||||||
use rustc_borrowck as borrowck;
|
use rustc_borrowck as borrowck;
|
||||||
|
@ -78,7 +79,9 @@ pub fn compile_input(sess: &Session,
|
||||||
}
|
}
|
||||||
|
|
||||||
if control.$point.stop == Compilation::Stop {
|
if control.$point.stop == Compilation::Stop {
|
||||||
return compile_result_from_err_count($tsess.err_count());
|
// FIXME: shouldn't this return Err(CompileIncomplete::Stopped)
|
||||||
|
// if there are no errors?
|
||||||
|
return $tsess.compile_status();
|
||||||
}
|
}
|
||||||
}}
|
}}
|
||||||
}
|
}
|
||||||
|
@ -91,7 +94,7 @@ pub fn compile_input(sess: &Session,
|
||||||
Ok(krate) => krate,
|
Ok(krate) => krate,
|
||||||
Err(mut parse_error) => {
|
Err(mut parse_error) => {
|
||||||
parse_error.emit();
|
parse_error.emit();
|
||||||
return Err(1);
|
return Err(CompileIncomplete::Errored(ErrorReported));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -194,7 +197,7 @@ pub fn compile_input(sess: &Session,
|
||||||
(control.after_analysis.callback)(&mut state);
|
(control.after_analysis.callback)(&mut state);
|
||||||
|
|
||||||
if control.after_analysis.stop == Compilation::Stop {
|
if control.after_analysis.stop == Compilation::Stop {
|
||||||
return result.and_then(|_| Err(0usize));
|
return result.and_then(|_| Err(CompileIncomplete::Stopped));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -564,7 +567,7 @@ pub fn phase_2_configure_and_expand<F>(sess: &Session,
|
||||||
addl_plugins: Option<Vec<String>>,
|
addl_plugins: Option<Vec<String>>,
|
||||||
make_glob_map: MakeGlobMap,
|
make_glob_map: MakeGlobMap,
|
||||||
after_expand: F)
|
after_expand: F)
|
||||||
-> Result<ExpansionResult, usize>
|
-> Result<ExpansionResult, CompileIncomplete>
|
||||||
where F: FnOnce(&ast::Crate) -> CompileResult,
|
where F: FnOnce(&ast::Crate) -> CompileResult,
|
||||||
{
|
{
|
||||||
let time_passes = sess.time_passes();
|
let time_passes = sess.time_passes();
|
||||||
|
@ -636,7 +639,7 @@ pub fn phase_2_configure_and_expand<F>(sess: &Session,
|
||||||
// Lint plugins are registered; now we can process command line flags.
|
// Lint plugins are registered; now we can process command line flags.
|
||||||
if sess.opts.describe_lints {
|
if sess.opts.describe_lints {
|
||||||
super::describe_lints(&sess.lint_store.borrow(), true);
|
super::describe_lints(&sess.lint_store.borrow(), true);
|
||||||
return Err(0);
|
return Err(CompileIncomplete::Stopped);
|
||||||
}
|
}
|
||||||
sess.track_errors(|| sess.lint_store.borrow_mut().process_command_line(sess))?;
|
sess.track_errors(|| sess.lint_store.borrow_mut().process_command_line(sess))?;
|
||||||
|
|
||||||
|
@ -839,7 +842,7 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(sess: &'tcx Session,
|
||||||
arenas: &'tcx GlobalArenas<'tcx>,
|
arenas: &'tcx GlobalArenas<'tcx>,
|
||||||
name: &str,
|
name: &str,
|
||||||
f: F)
|
f: F)
|
||||||
-> Result<R, usize>
|
-> Result<R, CompileIncomplete>
|
||||||
where F: for<'a> FnOnce(TyCtxt<'a, 'tcx, 'tcx>,
|
where F: for<'a> FnOnce(TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
ty::CrateAnalysis,
|
ty::CrateAnalysis,
|
||||||
IncrementalHashesMap,
|
IncrementalHashesMap,
|
||||||
|
@ -920,6 +923,7 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(sess: &'tcx Session,
|
||||||
// What we need to do constant evaluation.
|
// What we need to do constant evaluation.
|
||||||
passes.push_pass(MIR_CONST, mir::transform::simplify::SimplifyCfg::new("initial"));
|
passes.push_pass(MIR_CONST, mir::transform::simplify::SimplifyCfg::new("initial"));
|
||||||
passes.push_pass(MIR_CONST, mir::transform::type_check::TypeckMir);
|
passes.push_pass(MIR_CONST, mir::transform::type_check::TypeckMir);
|
||||||
|
passes.push_pass(MIR_CONST, mir::transform::rustc_peek::SanityCheck);
|
||||||
|
|
||||||
// What we need to run borrowck etc.
|
// What we need to run borrowck etc.
|
||||||
passes.push_pass(MIR_VALIDATED, mir::transform::qualify_consts::QualifyAndPromoteConstants);
|
passes.push_pass(MIR_VALIDATED, mir::transform::qualify_consts::QualifyAndPromoteConstants);
|
||||||
|
@ -934,7 +938,7 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(sess: &'tcx Session,
|
||||||
// From here on out, regions are gone.
|
// From here on out, regions are gone.
|
||||||
passes.push_pass(MIR_OPTIMIZED, mir::transform::erase_regions::EraseRegions);
|
passes.push_pass(MIR_OPTIMIZED, mir::transform::erase_regions::EraseRegions);
|
||||||
passes.push_pass(MIR_OPTIMIZED, mir::transform::add_call_guards::AddCallGuards);
|
passes.push_pass(MIR_OPTIMIZED, mir::transform::add_call_guards::AddCallGuards);
|
||||||
passes.push_pass(MIR_OPTIMIZED, borrowck::ElaborateDrops);
|
passes.push_pass(MIR_OPTIMIZED, mir::transform::elaborate_drops::ElaborateDrops);
|
||||||
passes.push_pass(MIR_OPTIMIZED, mir::transform::no_landing_pads::NoLandingPads);
|
passes.push_pass(MIR_OPTIMIZED, mir::transform::no_landing_pads::NoLandingPads);
|
||||||
passes.push_pass(MIR_OPTIMIZED, mir::transform::simplify::SimplifyCfg::new("elaborate-drops"));
|
passes.push_pass(MIR_OPTIMIZED, mir::transform::simplify::SimplifyCfg::new("elaborate-drops"));
|
||||||
|
|
||||||
|
@ -1018,7 +1022,7 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(sess: &'tcx Session,
|
||||||
// lint warnings and so on -- kindck used to do this abort, but
|
// lint warnings and so on -- kindck used to do this abort, but
|
||||||
// kindck is gone now). -nmatsakis
|
// kindck is gone now). -nmatsakis
|
||||||
if sess.err_count() > 0 {
|
if sess.err_count() > 0 {
|
||||||
return Ok(f(tcx, analysis, incremental_hashes_map, Err(sess.err_count())));
|
return Ok(f(tcx, analysis, incremental_hashes_map, sess.compile_status()));
|
||||||
}
|
}
|
||||||
|
|
||||||
analysis.reachable =
|
analysis.reachable =
|
||||||
|
@ -1034,12 +1038,7 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(sess: &'tcx Session,
|
||||||
|
|
||||||
time(time_passes, "lint checking", || lint::check_crate(tcx));
|
time(time_passes, "lint checking", || lint::check_crate(tcx));
|
||||||
|
|
||||||
// The above three passes generate errors w/o aborting
|
return Ok(f(tcx, analysis, incremental_hashes_map, tcx.sess.compile_status()));
|
||||||
if sess.err_count() > 0 {
|
|
||||||
return Ok(f(tcx, analysis, incremental_hashes_map, Err(sess.err_count())));
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(f(tcx, analysis, incremental_hashes_map, Ok(())))
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1115,11 +1114,7 @@ pub fn phase_5_run_llvm_passes(sess: &Session,
|
||||||
"serialize work products",
|
"serialize work products",
|
||||||
move || rustc_incremental::save_work_products(sess));
|
move || rustc_incremental::save_work_products(sess));
|
||||||
|
|
||||||
if sess.err_count() > 0 {
|
sess.compile_status()
|
||||||
Err(sess.err_count())
|
|
||||||
} else {
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Run the linker on any artifacts that resulted from the LLVM run.
|
/// Run the linker on any artifacts that resulted from the LLVM run.
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue